file_name
stringlengths
3
137
prefix
stringlengths
0
918k
suffix
stringlengths
0
962k
middle
stringlengths
0
812k
npmjs_static_61.py
import re import pickle import logging import networkx from airflow import DAG from airflow.operators.bash_operator import BashOperator from datetime import datetime, timedelta default_args = { 'owner': 'airflow', 'depends_on_past': False, 'start_date': datetime(2019, 1, 1), 'email': ['[email protected]'], 'email_on_failure': False, 'email_on_retry': False, 'retries': 1, 'retry_delay': timedelta(minutes=5), # 'queue': 'bash_queue', # 'pool': 'backfill', 'priority_weight': 10, # 'end_date': datetime(2016, 1, 1), } # don't auto-schedule the dag # https://airflow.readthedocs.io/en/stable/scheduler.html dag = DAG('npmjs_static_61', default_args=default_args, schedule_interval=None) # periodically run the dag # dag = DAG('tutorial', default_args=default_args, schedule_interval=timedelta(days=1)) # load dep_tree for packages, relative to AIRFLOW_HOME npmjs_dep_path = "./dags/npmjs.with_stats.dep_graph_61.pickle" dep_tree = pickle.load(open(npmjs_dep_path, "rb")) logging.info("loaded dep_tree with %d nodes", dep_tree.number_of_nodes()) def get_sanitized_pkgname(pkg_name):
def get_bash_op(pkg_name, dag, configpath='/home/maloss/config/astgen_javascript_smt.config', cache_dir='/home/maloss/metadata', outdir='/home/maloss/result'): return BashOperator( task_id=get_sanitized_pkgname(pkg_name=pkg_name), execution_timeout=timedelta(hours=2), bash_command='cd /home/maloss/src/ && python main.py astfilter --ignore_dep_version -n %s -c %s -d %s -o %s -l javascript' % (pkg_name, configpath, cache_dir, outdir), dag=dag) # all analysis jobs # get all leaves # https://networkx.github.io/documentation/latest/reference/algorithms/generated/networkx.algorithms.simple_paths.all_simple_paths.html # leaves = (v for v, d in dep_tree.out_degree() if d == 0) pkg2op = {} for pkg in dep_tree.nodes(): pkg = str(pkg) dep_pkgs = list(dep_tree.successors(pkg)) logging.debug("%s has %d dep_pkgs", pkg, len(dep_pkgs)) if not get_sanitized_pkgname(pkg_name=pkg): continue if pkg not in pkg2op: pkg2op[pkg] = get_bash_op(pkg_name=pkg, dag=dag) else: continue pkg_task = pkg2op[pkg] dep_tasks = set() for dep_pkg in dep_pkgs: dep_pkg = str(dep_pkg) # avoid cycles if dep_pkg == pkg or not get_sanitized_pkgname(pkg_name=dep_pkg): continue if dep_pkg not in pkg2op: pkg2op[dep_pkg] = get_bash_op(pkg_name=dep_pkg, dag=dag) dep_tasks.add(pkg2op[dep_pkg]) # default trigger rule is all_success # use all_done instead pkg_task << list(dep_tasks)
invalid_name = re.compile(r'[^a-zA-Z0-9_.-]') pkg_name = re.sub(invalid_name, '..', pkg_name) return pkg_name
str.rs
//! Unicode string slices. use crate::alloc::AllocError; use crate::sealed::Sealed; use crate::string::String; use crate::vec::Vec; use core::unicode::conversions; /// String slice extension. pub trait StrExt: Sealed { /// Returns the uppercase equivalent of this string slice, as a new [`String`]. fn try_to_uppercase(&self) -> Result<String, AllocError>; /// Returns the lowercase equivalent of this string slice, as a new [`String`]. fn try_to_lowercase(&self) -> Result<String, AllocError>; /// Returns a copy of this string where each character is mapped to its /// ASCII upper case equivalent. fn try_to_ascii_uppercase(&self) -> Result<String, AllocError>; /// Returns a copy of this string where each character is mapped to its /// ASCII lower case equivalent. fn try_to_ascii_lowercase(&self) -> Result<String, AllocError>; } impl Sealed for str {} impl StrExt for str { #[inline] fn try_to_uppercase(&self) -> Result<String, AllocError> { let mut s = String::try_with_capacity(self.len())?; for c in self[..].chars() { match conversions::to_upper(c) { [a, '\0', _] => s.push(a), [a, b, '\0'] => { s.push(a); s.push(b); } [a, b, c] => { s.push(a); s.push(b); s.push(c); } } } Ok(s) } #[inline] fn try_to_lowercase(&self) -> Result<String, AllocError> { let mut s = String::try_with_capacity(self.len())?; for (i, c) in self[..].char_indices() { if c == 'Σ' { // Σ maps to σ, except at the end of a word where it maps to ς. // This is the only conditional (contextual) but language-independent mapping // in `SpecialCasing.txt`, // so hard-code it rather than have a generic "condition" mechanism. // See https://github.com/rust-lang/rust/issues/26035 map_uppercase_sigma(self, i, &mut s)?; } else { match conversions::to_lower(c) { [a, '\0', _] => s.push(a), [a, b, '\0'] => { s.push(a); s.push(b); } [a, b, c] => { s.push(a); s.push(b); s.push(c); } } } } return Ok(s); fn map_uppercase_sigma(from: &str, i: usize, to: &mut String) -> Result<(), AllocError> { // See https://www.unicode.org/versions/Unicode7.0.0/ch03.pdf#G33992 // for the definition of `Final_Sigma`. debug_assert!('Σ'.len_utf8() == 2); let is_word_final = case_ignoreable_then_cased(from[..i].chars().rev()) && !case_ignoreable_then_cased(from[i + 2..].chars()); to.try_push_str(if is_word_final { "ς" } else { "σ" }) } #[allow(clippy::skip_while_next)] fn case_ignoreable_then_cased<I: Iterator<Item = char>>(iter: I) -> bool { use core::unicode::{Case_Ignorable, Cased}; match iter.skip_while(|&c| Case_Ignorable(c)).next() { Some(c) => Cased(c), None => false, } } } #[inline] fn try_to_ascii_uppercase(&self) -> Result<String, AllocError> { let mut bytes = Vec::new(); bytes.try_copy_from_slice(self.as_bytes())?; bytes.make_ascii_uppercase(); // make_ascii_uppercase() preserves the UTF-8 invariant. Ok(unsafe { String::from_utf8_unchecked(bytes) }) } #[inline] fn try_to_ascii_lowercase(&self) -> Result<String, AllocError> {
let mut bytes = Vec::new(); bytes.try_copy_from_slice(self.as_bytes())?; bytes.make_ascii_lowercase(); // make_ascii_uppercase() preserves the UTF-8 invariant. Ok(unsafe { String::from_utf8_unchecked(bytes) }) } }
JoinTables.js
/* * Copyright (C) 2017-2019 Dremio Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import { Component } from 'react'; import Immutable from 'immutable'; import Radium from 'radium'; import { connect } from 'react-redux'; import pureRender from 'pure-render-decorator'; import PropTypes from 'prop-types'; import EllipsedText from 'components/EllipsedText'; import { formLabel, formDescription } from 'uiTheme/radium/typography'; import { getJoinTable, getExploreState } from 'selectors/explore'; import { getViewState } from 'selectors/resources'; import Spinner from 'components/Spinner'; import { constructFullPath } from 'utils/pathUtils'; import { JOIN_TABLE_VIEW_ID } from 'components/Wizards/JoinWizard/JoinController'; import { accessEntity } from 'actions/resources/lru'; import { CUSTOM_JOIN } from 'constants/explorePage/joinTabs'; import { ExploreInfoHeader } from '../ExploreInfoHeader'; import ExploreTableController from './ExploreTableController'; const WIDTH_SCALE = 0.495; @Radium @pureRender export class JoinTables extends Component { static propTypes = { pageType: PropTypes.string.isRequired, dataset: PropTypes.instanceOf(Immutable.Map), joinTableData: PropTypes.object, exploreViewState: PropTypes.instanceOf(Immutable.Map), joinViewState: PropTypes.instanceOf(Immutable.Map), location: PropTypes.object, sqlSize: PropTypes.number, rightTreeVisible: PropTypes.bool, accessEntity: PropTypes.func.isRequired, joinDatasetFullPath: PropTypes.string, joinVersion: PropTypes.string, joinTab: PropTypes.string, joinStep: PropTypes.number }; componentWillReceiveProps(nextProps) { // because we have two tables visible, it's possible to evict the left table by looking at enough different // right tables. So accesss the left tableData every time the right tableData changes if (nextProps.joinVersion !== this.props.joinVersion) { nextProps.accessEntity('tableData', this.props.dataset.get('datasetVersion')); } } shouldRenderSecondTable() { const isOnCustomJoinSelectStep = this.props.joinTab === CUSTOM_JOIN && this.props.joinStep !== 2; return isOnCustomJoinSelectStep && !this.props.exploreViewState.get('isInProgress'); } renderSecondTable() { if (!this.shouldRenderSecondTable()) { return null; } const { joinTableData, joinViewState, rightTreeVisible, sqlSize, location } = this.props; if ((!joinTableData || !joinTableData.get('columns').size) && !joinViewState.get('isFailed')) { const customTableContent = joinViewState && joinViewState.get('isInProgress') ? ( <Spinner style={styles.emptyTable} iconStyle={{color: 'gray'}} /> ) : ( <div style={[styles.emptyTable, formDescription]}> {la('Please add one or more join conditions to preview matching records.')} </div> ); return customTableContent; } return ( <ExploreTableController isDumbTable pageType={this.props.pageType} dataset={this.props.dataset} exploreViewState={joinViewState} tableData={joinTableData} widthScale={WIDTH_SCALE} dragType='join' location={location} sqlSize={sqlSize} rightTreeVisible={rightTreeVisible} /> ); } render() { const { location, sqlSize, rightTreeVisible, dataset, exploreViewState } = this.props; const nameForDisplay = ExploreInfoHeader.getNameForDisplay(dataset); const scale = this.shouldRenderSecondTable() ? WIDTH_SCALE : 1; const tableBlockStyle = !this.shouldRenderSecondTable() ? { maxWidth: '100%', width: 'calc(100% - 10px)'} : {}; return ( <div style={[styles.base]}> <div style={[styles.tableBlock, tableBlockStyle]}> <div style={styles.header}><EllipsedText text={nameForDisplay} /></div> <ExploreTableController pageType={this.props.pageType} dataset={this.props.dataset} dragType='groupBy' widthScale={scale} location={location} sqlSize={sqlSize} rightTreeVisible={rightTreeVisible} exploreViewState={exploreViewState} /> </div> <div style={[styles.tableBlock, { marginLeft: 5}, !this.shouldRenderSecondTable() ? { display: 'none'} : {} ]}> <div style={[styles.header, !this.props.joinDatasetFullPath && {fontStyle: 'italic'}]}> <EllipsedText text={this.props.joinDatasetFullPath || la('No table selected.')}/> </div> {this.renderSecondTable()} </div> </div> ); } } const mapStateToProps = (state, ownProps) => { const explorePageState = getExploreState(state); return { joinTableData: getJoinTable(state, ownProps), joinViewState: getViewState(state, JOIN_TABLE_VIEW_ID), joinDatasetFullPath: constructFullPath(explorePageState.join.getIn(['custom', 'joinDatasetPathList'])), joinTab: explorePageState.join.get('joinTab'), joinVersion: explorePageState.join.getIn(['custom', 'joinVersion']), joinStep: explorePageState.join.get('step') }; }; export default connect(mapStateToProps, { accessEntity })(JoinTables); const styles = { base: { display: 'flex', flexGrow: 1, backgroundColor: '#F5FCFF' }, tableBlock: { display: 'flex', maxWidth: 'calc(50% - 10px)', minWidth: 'calc(50% - 10px)', flexDirection: 'column', position: 'relative' }, header: { ...formLabel, height: 25,
alignItems: 'center', padding: '0 10px', marginRight: -5, // hack for alignment flexGrow: 0 }, emptyTable: { display: 'flex', justifyContent: 'center', alignItems: 'center', minWidth: '100%', minHeight: 300, backgroundColor: '#F3F3F3', border: '1px dotted gray', zIndex: 1, flexGrow: 1 }, spinner: { width: 'calc(50% - 26px)', height: 'calc(100% - 440px)', backgroundColor: '#F3F3F3', border: '1px dotted gray', alignItems: 'center', zIndex: 1, flexGrow: 1 } };
backgroundColor: '#F3F3F3', zIndex: 1, display: 'flex',
cluster1.py
from __future__ import print_function import pyslurm def main(): try: a = pyslurm.job() jobs = a.get()
if __name__=="__main__": main()
print(jobs) except ValueError as e: print("Job list error - {0}".format(e.args[0]))
cert_authority_test.go
/* Copyright 2018 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package cert_test import ( "io/ioutil" "os" "path" "testing" "github.com/openshift/cluster-api/pkg/cert" ) var ( defaultCertMaterial = "this is the cert contents" defaultKeyMaterial = "this is the key contents" ) func TestEmptyPath(t *testing.T) { _, err := cert.Load("") if err == nil { t.Errorf("expected error, got nil") } } func TestInvalidPath(t *testing.T) { _, err := cert.Load("/my/invalid/path") if err == nil { t.Errorf("expected error, got nil") } } func TestDirWithMissingKey(t *testing.T) { dir := newTempDir(t) defer os.RemoveAll(dir) newCaDirectory(t, dir, &defaultCertMaterial, nil) _, err := cert.Load(dir) if err == nil { t.Errorf("expected error, got nil") } } func TestDirWithMissingCert(t *testing.T) { dir := newTempDir(t) defer os.RemoveAll(dir) newCaDirectory(t, dir, nil, &defaultKeyMaterial) _, err := cert.Load(dir) if err == nil { t.Errorf("expected error, got nil") } } func TestDirHappyPath(t *testing.T) { dir := newTempDir(t) defer os.RemoveAll(dir) newCaDirectory(t, dir, &defaultCertMaterial, &defaultKeyMaterial) ca, err := cert.Load(dir) if err != nil { t.Errorf("unexpected error: %v", err) } actualCertMaterial := string(ca.Certificate) if actualCertMaterial != defaultCertMaterial { t.Errorf("expected '%v' got '%v'", defaultCertMaterial, actualCertMaterial) } actualKeyMaterial := string(ca.PrivateKey) if actualKeyMaterial != defaultKeyMaterial { t.Errorf("expected '%v' got '%v'", defaultKeyMaterial, actualKeyMaterial) } } func TestCertPath(t *testing.T) { dir := newTempDir(t) defer os.RemoveAll(dir) certPath, _ := newCaDirectory(t, dir, &defaultCertMaterial, &defaultKeyMaterial) ca, err := cert.Load(certPath) if err != nil { t.Errorf("unexpected error: %v", err) } actualCertMaterial := string(ca.Certificate) if actualCertMaterial != defaultCertMaterial { t.Errorf("expected '%v' got '%v'", defaultCertMaterial, actualCertMaterial) } actualKeyMaterial := string(ca.PrivateKey) if actualKeyMaterial != defaultKeyMaterial { t.Errorf("expected '%v' got '%v'", defaultKeyMaterial, actualKeyMaterial) } } func TestKeyPath(t *testing.T)
func newCaDirectory(t *testing.T, dir string, certMaterial *string, keyMaterial *string) (certPath string, keyPath string) { certPath, keyPath = getCertAndKeyPaths(dir) if certMaterial != nil { err := ioutil.WriteFile(certPath, []byte(*certMaterial), 0644) if err != nil { t.Errorf("unable to write cert material to %v, got %v", certPath, err) } } if keyMaterial != nil { err := ioutil.WriteFile(keyPath, []byte(*keyMaterial), 0644) if err != nil { t.Errorf("unable to write key material to %v, got %v", keyPath, err) } } return } func getCertAndKeyPaths(dir string) (certPath string, keyPath string) { return path.Join(dir, "ca.crt"), path.Join(dir, "ca.key") } func newTempDir(t *testing.T) string { dir, err := ioutil.TempDir("", "") if err != nil { t.Errorf("unable to create temp dir: %v", err) } return dir }
{ dir := newTempDir(t) defer os.RemoveAll(dir) _, keyPath := newCaDirectory(t, dir, &defaultCertMaterial, &defaultKeyMaterial) ca, err := cert.Load(keyPath) if err != nil { t.Errorf("unexpected error: %v", err) } actualCertMaterial := string(ca.Certificate) if actualCertMaterial != defaultCertMaterial { t.Errorf("expected '%v' got '%v'", defaultCertMaterial, actualCertMaterial) } actualKeyMaterial := string(ca.PrivateKey) if actualKeyMaterial != defaultKeyMaterial { t.Errorf("expected '%v' got '%v'", defaultKeyMaterial, actualKeyMaterial) } }
emu.py
""" """ from campy.cameras import unicam import os import time import logging import sys import numpy as np from collections import deque import csv import imageio def LoadSystem(params): return params["cameraMake"] def GetDeviceList(system): return system def LoadDevice(cam_params): return cam_params["device"] def GetSerialNumber(device): return device def GetModelName(camera): return "Emulated_Camera" def OpenCamera(cam_params, device): # Open video reader for emulation videoFileName = cam_params["videoFilename"][3 : len(cam_params["videoFilename"])] full_file_name = os.path.join( cam_params["videoFolder"], cam_params["cameraName"], videoFileName ) camera = imageio.get_reader(full_file_name) # Set features manually or automatically, depending on configuration frame_size = camera.get_meta_data()["size"] cam_params["frameWidth"] = frame_size[0] cam_params["frameHeight"] = frame_size[1] print("Opened {} emulation.".format(cam_params["cameraName"])) return camera, cam_params def LoadSettings(cam_params, camera): return cam_params def StartGrabbing(camera): return True def GrabFrame(camera, frameNumber): return camera.get_data(frameNumber) def GetImageArray(grabResult): return grabResult def GetTimeStamp(grabResult): return time.perf_counter() def DisplayImage(cam_params, dispQueue, grabResult): # Downsample image img = grabResult[ :: cam_params["displayDownsample"], :: cam_params["displayDownsample"], : ] # Send to display queue dispQueue.append(img) def ReleaseFrame(grabResult): del grabResult def
(cam_params, camera): print("Closing {}... Please wait.".format(cam_params["cameraName"])) # Close camera after acquisition stops del camera def CloseSystem(system, device_list): del system del device_list
CloseCamera
white_black_listing_test.go
// Copyright Istio Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package policy import ( "net/http" "testing" "istio.io/istio/pkg/test/framework/label" "istio.io/istio/pkg/test/framework" "istio.io/istio/pkg/test/framework/components/bookinfo" util "istio.io/istio/tests/integration/mixer" ) func
(t *testing.T) { framework.NewTest(t).Label(label.Flaky).Run(func(ctx framework.TestContext) { // Verify you can access productpage right now. util.SendTrafficAndWaitForExpectedStatus(ing, t, "Sending traffic...", "", 2, http.StatusOK) ctx.ApplyConfigOrFail( t, bookinfoNs.Name(), bookinfo.PolicyDenyIPRule.LoadWithNamespaceOrFail(t, bookinfoNs.Name())) defer ctx.DeleteConfigOrFail( t, bookinfoNs.Name(), bookinfo.PolicyDenyIPRule.LoadWithNamespaceOrFail(t, bookinfoNs.Name())) util.AllowRuleSync(t) // Verify you can't access productpage now. util.SendTrafficAndWaitForExpectedStatus(ing, t, "Sending traffic...", "", 30, http.StatusForbidden) }) }
TestWhiteListing
quorum-key-manager.go
package quorumkeymanager import ( "context" "fmt" "net/http" "os" "path" "strconv" "time" qkm "github.com/consensys/orchestrate/src/infra/quorum-key-manager/consensys" httputils "github.com/consensys/orchestrate/pkg/toolkit/app/http" "github.com/docker/docker/api/types/mount" log "github.com/sirupsen/logrus" dockercontainer "github.com/docker/docker/api/types/container" "github.com/docker/docker/api/types/network" "github.com/docker/go-connections/nat" ) const defaultQKMImage = "consensys/quorum-key-manager:v21.9.2" const defaultHostPort = "8080" const defaultHost = "localhost" type QuorumKeyManager struct{} type Config struct { Image string Port string MetricsPort string Host string DBPort string DBHost string ManifestDirectory string } func NewDefault() *Config { return &Config{ Image: defaultQKMImage, Port: defaultHostPort, Host: defaultHost, DBHost: defaultDBHost, DBPort: defaultDBPort, } } func (cfg *Config) SetHostPort(port string) *Config { cfg.Port = port metricsPort, _ := strconv.ParseInt(port, 10, 64) cfg.MetricsPort = fmt.Sprintf("%d", metricsPort+1) return cfg } func (cfg *Config) SetHost(host string) *Config { if host != "" { cfg.Host = host } return cfg } func (cfg *Config) SetDBPort(port string) *Config { cfg.DBPort = port return cfg } func (cfg *Config) SetDBHost(host string) *Config { cfg.DBHost = host return cfg } func (cfg *Config) SetManifestDirectory(dir string) *Config { cfg.ManifestDirectory = dir return cfg } func (cfg *Config) CreateManifest(filename string, mnf *qkm.Manifest) error { filepath := path.Join(cfg.ManifestDirectory, filename) out, err := os.Create(path.Join(cfg.ManifestDirectory, filename)) if err != nil
defer out.Close() err = os.Chmod(filepath, 0777) if err != nil { return err } mnfBody, err := mnf.MarshallToYaml() if err != nil { return err } _, err = out.Write(mnfBody) return err } func (q *QuorumKeyManager) GenerateContainerConfig(_ context.Context, configuration interface{}) (*dockercontainer.Config, *dockercontainer.HostConfig, *network.NetworkingConfig, error) { cfg, ok := configuration.(*Config) if !ok { return nil, nil, nil, fmt.Errorf("invalid configuration type (expected %T but got %T)", cfg, configuration) } containerCfg := &dockercontainer.Config{ Image: cfg.Image, Env: []string{ fmt.Sprintf("DB_PORT=%v", cfg.DBPort), fmt.Sprintf("DB_HOST=%v", cfg.DBHost), }, ExposedPorts: nat.PortSet{ "8080/tcp": struct{}{}, "8081/tcp": struct{}{}, }, Cmd: []string{"run", "--manifest-path=/manifests", "--http-host=0.0.0.0", "--log-level=debug"}, } hostConfig := &dockercontainer.HostConfig{ RestartPolicy: dockercontainer.RestartPolicy{ Name: "always", }, Mounts: []mount.Mount{ { Type: mount.TypeBind, Source: cfg.ManifestDirectory, Target: "/manifests", }, }, } if cfg.Port != "" { hostConfig.PortBindings = nat.PortMap{ "8080/tcp": []nat.PortBinding{{HostIP: "0.0.0.0", HostPort: cfg.Port}}, "8081/tcp": []nat.PortBinding{{HostIP: "0.0.0.0", HostPort: cfg.MetricsPort}}, } } return containerCfg, hostConfig, nil, nil } func (q *QuorumKeyManager) WaitForService(ctx context.Context, configuration interface{}, timeout time.Duration) error { cfg, ok := configuration.(*Config) if !ok { return fmt.Errorf("invalid configuration type (expected %T but got %T)", cfg, configuration) } rctx, cancel := context.WithTimeout(ctx, timeout) defer cancel() retryT := time.NewTicker(2 * time.Second) defer retryT.Stop() httpClient := httputils.NewClient(httputils.NewDefaultConfig()) var cerr error waitForServiceLoop: for { select { case <-rctx.Done(): cerr = rctx.Err() break waitForServiceLoop case <-retryT.C: resp, err := httpClient.Get(fmt.Sprintf("http://%v:%v/ready", cfg.Host, cfg.MetricsPort)) switch { case err != nil: log.WithContext(rctx).WithError(err).Warnf("waiting for quorum-key-manager service to start") case resp.StatusCode != http.StatusOK: log.WithContext(rctx).WithField("status_code", resp.StatusCode).Warnf("waiting for quorum-key-manager service to be ready") default: log.WithContext(rctx).Info("quorum-key-manager container service is ready") break waitForServiceLoop } } } return cerr }
{ return err }
versioncomparator.py
# Copyright 2008-2015 Nokia Networks # Copyright 2016- Robot Framework Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from pkg_resources import parse_version def cmp_versions(version1, version2):
if version1 is None: if version2 is None: return 0 else: return -1 if version2 is None: return 1 if parse_version(version1) == parse_version(version2): return 0 elif parse_version(version1) > parse_version(version2): return 1 return -1
test_async_job.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from nose.plugins.attrib import attr from marvin.cloudstackTestCase import cloudstackTestCase from marvin.lib.utils import cleanup_resources from marvin.lib.base import ServiceOffering, DiskOffering, Account, VirtualMachine,\ queryAsyncJobResult, PASS from marvin.lib.common import get_domain, get_zone, get_test_template from pytz import timezone class TestAsyncJob(cloudstackTestCase): """ Test queryAsyncJobResult """ @classmethod def setUpClass(cls): cls.testClient = super(TestAsyncJob, cls).getClsTestClient() cls.api_client = cls.testClient.getApiClient() cls.testdata = cls.testClient.getParsedTestDataConfig() # Get Zone, Domain and templates cls.domain = get_domain(cls.api_client) cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests()) cls.hypervisor = cls.testClient.getHypervisorInfo() cls.template = get_test_template( cls.api_client, cls.zone.id, cls.hypervisor ) # Create service, disk offerings etc cls.service_offering = ServiceOffering.create( cls.api_client, cls.testdata["service_offering"] ) cls.disk_offering = DiskOffering.create( cls.api_client, cls.testdata["disk_offering"] ) cls._cleanup = [ cls.service_offering, cls.disk_offering ] @classmethod def tearDownClass(cls): try: cleanup_resources(cls.api_client, cls._cleanup) except Exception as exception: raise Exception("Warning: Exception during cleanup : %s" % exception) def
(self): self.apiclient = self.testClient.getApiClient() self.dbclient = self.testClient.getDbConnection() self.hypervisor = self.testClient.getHypervisorInfo() self.testdata["virtual_machine"]["zoneid"] = self.zone.id self.testdata["virtual_machine"]["template"] = self.template.id self.testdata["iso"]["zoneid"] = self.zone.id self.account = Account.create( self.apiclient, self.testdata["account"], domainid=self.domain.id ) self.cleanup = [self.account] def tearDown(self): try: self.debug("Cleaning up the resources") cleanup_resources(self.apiclient, self.cleanup) self.debug("Cleanup complete!") except Exception as exception: self.debug("Warning! Exception in tearDown: %s" % exception) @attr(tags=["advanced", "eip", "advancedns", "basic", "sg"], required_hardware="false") def test_query_async_job_result(self): """ Test queryAsyncJobResult API for expected values """ self.debug("Deploying instance in the account: %s" % self.account.name) virtual_machine = VirtualMachine.create( self.apiclient, self.testdata["virtual_machine"], accountid=self.account.name, domainid=self.account.domainid, serviceofferingid=self.service_offering.id, diskofferingid=self.disk_offering.id, hypervisor=self.hypervisor ) response = virtual_machine.getState( self.apiclient, VirtualMachine.RUNNING) self.assertEqual(response[0], PASS, response[1]) cmd = queryAsyncJobResult.queryAsyncJobResultCmd() cmd.jobid = virtual_machine.jobid cmd_response = self.apiclient.queryAsyncJobResult(cmd) db_result = self.dbclient.execute("select * from async_job where uuid='%s'" % virtual_machine.jobid) # verify that 'completed' value from api equals 'removed' db column value completed = cmd_response.completed removed = timezone('UTC').localize(db_result[0][17]) removed = removed.strftime("%Y-%m-%dT%H:%M:%S%z") self.assertEqual(completed, removed, "Expected 'completed' timestamp value %s to be equal to " "'removed' db column value %s." % (completed, removed)) # verify that api job_status value equals db job_status value jobstatus_db = db_result[0][8] jobstatus_api = cmd_response.jobstatus self.assertEqual(jobstatus_api, jobstatus_db, "Expected 'jobstatus' api value %s to be equal to " "'job_status' db column value %s." % (jobstatus_api, jobstatus_db))
setUp
test_rbfopt_degree0_models.py
"""Test the successful creation of Pyomo 0-degree models in RBFOpt. This module contains unit tests for the module rbfopt_degree0_models. Licensed under Revised BSD license, see LICENSE. (C) Copyright International Business Machines Corporation 2016. """ from __future__ import print_function from __future__ import division from __future__ import absolute_import import unittest import numpy as np import pyomo.environ import rbfopt import rbfopt.rbfopt_utils as ru import rbfopt.rbfopt_degree0_models as d0 from rbfopt.rbfopt_settings import RbfoptSettings class TestMultiquadricModels(unittest.TestCase): """Test the rbfopt_degree0_models module using multiquadric RBF.""" def setUp(self): """Generate data to simulate an optimization problem.""" np.random.seed(71294123) self.settings = RbfoptSettings(rbf = 'multiquadric') self.n = 3 self.k = 5 self.var_lower = np.array([i for i in range(self.n)]) self.var_upper = np.array([i + 10 for i in range(self.n)]) self.node_pos = np.array([self.var_lower, self.var_upper, [1, 2, 3], [9, 5, 8.8], [5.5, 7, 12]]) self.node_val = np.array([2*i for i in range(self.k)]) Amat = [[1.0, 17.349351572897476, 1.9999999999999998, 12.009995836801943, 12.932517156377562, 1.0], [17.349351572897476, 1.0, 15.620499351813308, 6.945502141674135, 6.103277807866851, 1.0], [1.9999999999999998, 15.620499351813308, 1.0, 10.374969879474351, 11.280514172678478, 1.0], [12.009995836801943, 6.945502141674135, 10.374969879474351, 1.0, 5.243090691567331, 1.0], [12.932517156377562, 6.103277807866851, 11.280514172678478, 5.243090691567331, 1.0, 1.0],
[1.0, 1.0, 1.0, 1.0, 1.0, 0.0]] self.Amat = np.matrix(Amat) self.Amatinv = self.Amat.getI() self.rbf_lambda = np.array([1.981366489986409, 0.6262004309283905, -1.8477896263093248, -0.10028069928913483, -0.65949659531634]) self.rbf_h = np.array([0.5833631458309435]) self.integer_vars = np.array([1]) # -- end function def test_create_min_rbf_model(self): """Test the create_min_rbf_model function. This test simply checks whether the function returns a valid pyomo.ConcreteModel object. """ model = d0.create_min_rbf_model(self.settings, self.n, self.k, self.var_lower, self.var_upper, self.integer_vars, None, self.node_pos, self.rbf_lambda, self.rbf_h) self.assertIsInstance(model, pyomo.environ.ConcreteModel) model = d0.create_min_rbf_model( self.settings, 10, 20, np.array([0] * 10),np.array([1] * 10), np.array([i for i in range(10)]), (np.array([0]), np.array([]), [(0, 0, np.array([i for i in range(10)]))]), np.random.randint(0, 2, size=(20, 10)), np.random.uniform(size=20), np.array([-1])) self.assertIsInstance(model, pyomo.environ.ConcreteModel) def test_create_max_one_over_mu_model(self): """Test the create_max_one_over_mu_model function. This test simply checks whether the function returns a valid pyomo.ConcreteModel object. """ model = d0.create_max_one_over_mu_model(self.settings, self.n, self.k, self.var_lower, self.var_upper, self.integer_vars, None, self.node_pos, self.Amat) self.assertIsInstance(model, pyomo.environ.ConcreteModel) def test_create_max_h_k_model(self): """Test the create_max_h_k_model function. This test simply checks whether the function returns a valid pyomo.ConcreteModel object. """ model = d0.create_max_h_k_model(self.settings, self.n, self.k, self.var_lower, self.var_upper, self.integer_vars, None, self.node_pos, self.rbf_lambda, self.rbf_h, self.Amat, -1) self.assertIsInstance(model, pyomo.environ.ConcreteModel) def test_create_min_bump_model(self): """Test the create_min_bump_model function. This test simply checks whether the function returns a valid pyomo.ConcreteModel object. """ Phimat = self.Amat[:self.k, :self.k] Pmat = self.Amat[:self.k, self.k:] node_err_bounds = np.array([[- 2, + 2] for i in range(self.k)]) model = d0.create_min_bump_model(self.settings, self.n, self.k, Phimat, Pmat, self.node_val, node_err_bounds) self.assertIsInstance(model, pyomo.environ.ConcreteModel) def test_create_maximin_dist_model(self): """Test the create_maximin_dist_model function. This test simply checks whether the function returns a valid pyomo.ConcreteModel object. """ model = d0.create_maximin_dist_model(self.settings, self.n, self.k, self.var_lower, self.var_upper, self.integer_vars, None, self.node_pos) self.assertIsInstance(model, pyomo.environ.ConcreteModel) def test_create_min_msrsm_model(self): """Test the create_min_msrsm_model function. This test simply checks whether the function returns a valid pyomo.ConcreteModel object. """ model = d0.create_min_msrsm_model(self.settings, self.n, self.k, self.var_lower, self.var_upper, self.integer_vars, None, self.node_pos, self.rbf_lambda, self.rbf_h, 0.5, 0.0, 1.0, min(self.node_val), max(self.node_val)) self.assertIsInstance(model, pyomo.environ.ConcreteModel) # -- end class class TestLinearModels(unittest.TestCase): """Test the rbfopt_degree0_models module using linear RBF.""" def setUp(self): """Generate data to simulate an optimization problem.""" self.settings = RbfoptSettings(rbf = 'linear') self.n = 3 self.k = 5 self.var_lower = np.array([i for i in range(self.n)]) self.var_upper = np.array([i + 10 for i in range(self.n)]) self.node_pos = np.array([self.var_lower, self.var_upper, [1, 2, 3], [9, 5, 8.8], [5.5, 7, 12]]) self.node_val = np.array([2*i for i in range(self.k)]) Amat = [[0.0, 17.320508075688775, 1.7320508075688772, 11.968291440301744, 12.893796958227627, 1.0], [17.320508075688775, 0.0, 15.588457268119896, 6.873136110975833, 6.020797289396148, 1.0], [1.7320508075688772, 15.588457268119896, 0.0, 10.32666451474047, 11.236102527122116, 1.0], [11.968291440301744, 6.873136110975833, 10.32666451474047, 0.0, 5.146843692983108, 1.0], [12.893796958227627, 6.020797289396148, 11.236102527122116, 5.146843692983108, 0.0, 1.0], [1.0, 1.0, 1.0, 1.0, 1.0, 0.0]] self.Amat = np.matrix(Amat) self.Amatinv = self.Amat.getI() self.rbf_lambda = np.array([1.1704846814048488, 0.5281643269521171, -0.9920149389974761, -0.1328847504999134, -0.5737493188595765]) self.rbf_h = np.array([1.5583564301976252]) self.integer_vars = np.array([1]) # -- end function def test_create_min_rbf_model(self): """Test the create_min_rbf_model function. This test simply checks whether the function returns a valid pyomo.ConcreteModel object. """ model = d0.create_min_rbf_model(self.settings, self.n, self.k, self.var_lower, self.var_upper, self.integer_vars, None, self.node_pos, self.rbf_lambda, self.rbf_h) self.assertIsInstance(model, pyomo.environ.ConcreteModel) model = d0.create_min_rbf_model( self.settings, 10, 20, np.array([0] * 10),np.array([1] * 10), np.array([i for i in range(10)]), (np.array([0]), np.array([]), [(0, 0, np.array([i for i in range(10)]))]), np.random.randint(0, 2, size=(20, 10)), np.random.uniform(size=20), np.array([-1])) self.assertIsInstance(model, pyomo.environ.ConcreteModel) def test_create_max_one_over_mu_model(self): """Test the create_max_one_over_mu_model function. This test simply checks whether the function returns a valid pyomo.ConcreteModel object. """ model = d0.create_max_one_over_mu_model(self.settings, self.n, self.k, self.var_lower, self.var_upper, self.integer_vars, None, self.node_pos, self.Amat) self.assertIsInstance(model, pyomo.environ.ConcreteModel) def test_create_max_h_k_model(self): """Test the create_max_h_k_model function. This test simply checks whether the function returns a valid pyomo.ConcreteModel object. """ model = d0.create_max_h_k_model(self.settings, self.n, self.k, self.var_lower, self.var_upper, self.integer_vars, None, self.node_pos, self.rbf_lambda, self.rbf_h, self.Amat, -1) self.assertIsInstance(model, pyomo.environ.ConcreteModel) def test_create_min_bump_model(self): """Test the create_min_bump_model function. This test simply checks whether the function returns a valid pyomo.ConcreteModel object. """ Phimat = self.Amat[:self.k, :self.k] Pmat = self.Amat[:self.k, self.k:] node_err_bounds = np.array([[- 2, + 2] for i in range(self.k)]) model = d0.create_min_bump_model(self.settings, self.n, self.k, Phimat, Pmat, self.node_val, node_err_bounds) self.assertIsInstance(model, pyomo.environ.ConcreteModel) def test_create_maximin_dist_model(self): """Test the create_maximin_dist_model function. This test simply checks whether the function returns a valid pyomo.ConcreteModel object. """ model = d0.create_maximin_dist_model(self.settings, self.n, self.k, self.var_lower, self.var_upper, self.integer_vars, None, self.node_pos) self.assertIsInstance(model, pyomo.environ.ConcreteModel) def test_create_min_msrsm_model(self): """Test the create_min_msrsm_model function. This test simply checks whether the function returns a valid pyomo.ConcreteModel object. """ model = d0.create_min_msrsm_model(self.settings, self.n, self.k, self.var_lower, self.var_upper, self.integer_vars, None, self.node_pos, self.rbf_lambda, self.rbf_h, 0.5, 0.0, 1.0, min(self.node_val), max(self.node_val)) self.assertIsInstance(model, pyomo.environ.ConcreteModel) # -- end class
DescribePredictorBacktestExportJobCommand.ts
import { ForecastClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ForecastClient"; import { DescribePredictorBacktestExportJobRequest, DescribePredictorBacktestExportJobResponse, } from "../models/models_0"; import { deserializeAws_json1_1DescribePredictorBacktestExportJobCommand, serializeAws_json1_1DescribePredictorBacktestExportJobCommand, } from "../protocols/Aws_json1_1"; import { getSerdePlugin } from "@aws-sdk/middleware-serde"; import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; import { Command as $Command } from "@aws-sdk/smithy-client"; import { FinalizeHandlerArguments, Handler,
HttpHandlerOptions as __HttpHandlerOptions, MetadataBearer as __MetadataBearer, SerdeContext as __SerdeContext, } from "@aws-sdk/types"; export interface DescribePredictorBacktestExportJobCommandInput extends DescribePredictorBacktestExportJobRequest {} export interface DescribePredictorBacktestExportJobCommandOutput extends DescribePredictorBacktestExportJobResponse, __MetadataBearer {} /** * <p>Describes a predictor backtest export job created using the <a>CreatePredictorBacktestExportJob</a> operation.</p> * <p>In addition to listing the properties provided by the user in the * <code>CreatePredictorBacktestExportJob</code> request, this operation lists the * following properties:</p> * <ul> * <li> * <p> * <code>CreationTime</code> * </p> * </li> * <li> * <p> * <code>LastModificationTime</code> * </p> * </li> * <li> * <p> * <code>Status</code> * </p> * </li> * <li> * <p> * <code>Message</code> (if an error occurred)</p> * </li> * </ul> */ export class DescribePredictorBacktestExportJobCommand extends $Command< DescribePredictorBacktestExportJobCommandInput, DescribePredictorBacktestExportJobCommandOutput, ForecastClientResolvedConfig > { // Start section: command_properties // End section: command_properties constructor(readonly input: DescribePredictorBacktestExportJobCommandInput) { // Start section: command_constructor super(); // End section: command_constructor } /** * @internal */ resolveMiddleware( clientStack: MiddlewareStack<ServiceInputTypes, ServiceOutputTypes>, configuration: ForecastClientResolvedConfig, options?: __HttpHandlerOptions ): Handler<DescribePredictorBacktestExportJobCommandInput, DescribePredictorBacktestExportJobCommandOutput> { this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); const stack = clientStack.concat(this.middlewareStack); const { logger } = configuration; const clientName = "ForecastClient"; const commandName = "DescribePredictorBacktestExportJobCommand"; const handlerExecutionContext: HandlerExecutionContext = { logger, clientName, commandName, inputFilterSensitiveLog: DescribePredictorBacktestExportJobRequest.filterSensitiveLog, outputFilterSensitiveLog: DescribePredictorBacktestExportJobResponse.filterSensitiveLog, }; const { requestHandler } = configuration; return stack.resolve( (request: FinalizeHandlerArguments<any>) => requestHandler.handle(request.request as __HttpRequest, options || {}), handlerExecutionContext ); } private serialize( input: DescribePredictorBacktestExportJobCommandInput, context: __SerdeContext ): Promise<__HttpRequest> { return serializeAws_json1_1DescribePredictorBacktestExportJobCommand(input, context); } private deserialize( output: __HttpResponse, context: __SerdeContext ): Promise<DescribePredictorBacktestExportJobCommandOutput> { return deserializeAws_json1_1DescribePredictorBacktestExportJobCommand(output, context); } // Start section: command_body_extra // End section: command_body_extra }
HandlerExecutionContext, MiddlewareStack,
stateful_paragraph.rs
#![allow(dead_code)] use easy_cast::Cast; use std::iter; use tui::{ buffer::Buffer, layout::{Alignment, Rect}, style::Style, text::{StyledGrapheme, Text}, widgets::{Block, StatefulWidget, Widget, Wrap}, }; use unicode_width::UnicodeWidthStr; use super::reflow::{LineComposer, LineTruncator, WordWrapper}; const fn get_line_offset( line_width: u16, text_area_width: u16, alignment: Alignment, ) -> u16 { match alignment { Alignment::Center => { (text_area_width / 2).saturating_sub(line_width / 2) } Alignment::Right => { text_area_width.saturating_sub(line_width) } Alignment::Left => 0, } } #[derive(Debug, Clone)] pub struct StatefulParagraph<'a> { /// A block to wrap the widget in block: Option<Block<'a>>, /// Widget style style: Style, /// How to wrap the text wrap: Option<Wrap>, /// The text to display text: Text<'a>, /// Alignment of the text alignment: Alignment, } #[derive(Debug, Default, Clone, Copy)] pub struct ScrollPos { pub x: u16, pub y: u16, } impl ScrollPos { pub const fn new(x: u16, y: u16) -> Self { Self { x, y } } } #[derive(Debug, Copy, Clone, Default)] pub struct ParagraphState { /// Scroll scroll: ScrollPos, /// after all wrapping this is the amount of lines lines: u16, /// last visible height height: u16, } impl ParagraphState { pub const fn lines(self) -> u16 { self.lines } pub const fn height(self) -> u16 {
self.scroll } pub fn set_scroll(&mut self, scroll: ScrollPos) { self.scroll = scroll; } } impl<'a> StatefulParagraph<'a> { pub fn new<T>(text: T) -> Self where T: Into<Text<'a>>, { Self { block: None, style: Style::default(), wrap: None, text: text.into(), alignment: Alignment::Left, } } #[allow(clippy::missing_const_for_fn)] pub fn block(mut self, block: Block<'a>) -> Self { self.block = Some(block); self } pub const fn style(mut self, style: Style) -> Self { self.style = style; self } pub const fn wrap(mut self, wrap: Wrap) -> Self { self.wrap = Some(wrap); self } pub const fn alignment(mut self, alignment: Alignment) -> Self { self.alignment = alignment; self } } impl<'a> StatefulWidget for StatefulParagraph<'a> { type State = ParagraphState; fn render( mut self, area: Rect, buf: &mut Buffer, state: &mut Self::State, ) { buf.set_style(area, self.style); let text_area = match self.block.take() { Some(b) => { let inner_area = b.inner(area); b.render(area, buf); inner_area } None => area, }; if text_area.height < 1 { return; } let style = self.style; let mut styled = self.text.lines.iter().flat_map(|spans| { spans .0 .iter() .flat_map(|span| span.styled_graphemes(style)) // Required given the way composers work but might be refactored out if we change // composers to operate on lines instead of a stream of graphemes. .chain(iter::once(StyledGrapheme { symbol: "\n", style: self.style, })) }); let mut line_composer: Box<dyn LineComposer> = if let Some(Wrap { trim }) = self.wrap { Box::new(WordWrapper::new( &mut styled, text_area.width, trim, )) } else { let mut line_composer = Box::new(LineTruncator::new( &mut styled, text_area.width, )); if let Alignment::Left = self.alignment { line_composer .set_horizontal_offset(state.scroll.x); } line_composer }; let mut y = 0; let mut end_reached = false; while let Some((current_line, current_line_width)) = line_composer.next_line() { if !end_reached && y >= state.scroll.y { let mut x = get_line_offset( current_line_width, text_area.width, self.alignment, ); for StyledGrapheme { symbol, style } in current_line { buf.get_mut( text_area.left() + x, text_area.top() + y - state.scroll.y, ) .set_symbol(if symbol.is_empty() { // If the symbol is empty, the last char which rendered last time will // leave on the line. It's a quick fix. " " } else { symbol }) .set_style(*style); x += Cast::<u16>::cast(symbol.width()); } } y += 1; if y >= text_area.height + state.scroll.y { end_reached = true; } } state.lines = y; state.height = area.height; } }
self.height } pub const fn scroll(self) -> ScrollPos {
main.rs
use anyhow::Result; use aoc_2019_day_02::*; fn main() -> Result<()>
{ println!("Advent of Code {}-{:02}", YEAR, DAY); let input = aoc::input_from_stdin()?; let answer = part_one(&input)?; println!("--> part one:"); println!("{}", answer); let answer = part_two(&input)?; println!("--> part two:"); println!("{}", answer); Ok(()) }
participant_status.go
// © Copyright IBM Corporation 2020. All rights reserved. // SPDX-License-Identifier: Apache2.0 // // Code generated by go-swagger; DO NOT EDIT. package model // This file was generated by the swagger tool. // Editing this file might prove futile when you re-run the swagger generate command import ( "encoding/json" strfmt "github.com/go-openapi/strfmt" "github.com/go-openapi/errors" "github.com/go-openapi/swag" "github.com/go-openapi/validate" ) // ParticipantStatus participantStatus // // ParticipantStatus // swagger:model ParticipantStatus type ParticipantStatus struct { // Participant active status on WW network, inactive, active, suspended // Required: true // Enum: [inactive active suspended] Status *string `json:"status"` } // Validate validates this participant status func (m *ParticipantStatus) Validate(formats strfmt.Registry) error { var res []error if err := m.validateStatus(formats); err != nil { res = append(res, err) } if len(res) > 0 { return errors.CompositeValidationError(res...) } return nil } var participantStatusTypeStatusPropEnum []interface{} func init() {
const ( // ParticipantStatusStatusInactive captures enum value "inactive" ParticipantStatusStatusInactive string = "inactive" // ParticipantStatusStatusActive captures enum value "active" ParticipantStatusStatusActive string = "active" // ParticipantStatusStatusSuspended captures enum value "suspended" ParticipantStatusStatusSuspended string = "suspended" ) // prop value enum func (m *ParticipantStatus) validateStatusEnum(path, location string, value string) error { if err := validate.Enum(path, location, value, participantStatusTypeStatusPropEnum); err != nil { return err } return nil } func (m *ParticipantStatus) validateStatus(formats strfmt.Registry) error { if err := validate.Required("status", "body", m.Status); err != nil { return err } // value enum if err := m.validateStatusEnum("status", "body", *m.Status); err != nil { return err } return nil } // MarshalBinary interface implementation func (m *ParticipantStatus) MarshalBinary() ([]byte, error) { if m == nil { return nil, nil } return swag.WriteJSON(m) } // UnmarshalBinary interface implementation func (m *ParticipantStatus) UnmarshalBinary(b []byte) error { var res ParticipantStatus if err := swag.ReadJSON(b, &res); err != nil { return err } *m = res return nil }
var res []string if err := json.Unmarshal([]byte(`["inactive","active","suspended"]`), &res); err != nil { panic(err) } for _, v := range res { participantStatusTypeStatusPropEnum = append(participantStatusTypeStatusPropEnum, v) } }
boolean.rs
#[pen_ffi_macro::any(crate = "crate")] #[repr(C)] #[derive(Clone, Copy, Debug, Default, Eq, PartialEq)] pub struct Boolean { value: bool, } impl Boolean { pub fn new(value: bool) -> Self
} impl From<Boolean> for bool { fn from(number: Boolean) -> Self { number.value } } impl From<bool> for Boolean { fn from(value: bool) -> Self { Self::new(value) } }
{ Self { value } }
nwocrsounds.py
""" New World OCR Node Detector Created: 2021-10-07 Dev: Wes H. Uses OCR to get coordinates from top right of the NW game window and imposes that against a list of possible nodes.
import pytesseract import psutil from time import sleep import pathlib import iron_markers as imark import essence_markers as emark import chest_essence as ce import numpy as np # for 3440*1440 : (3182,19,3416,39) localpath = str(pathlib.Path(__file__).parent.resolve()) pytesseract.pytesseract.tesseract_cmd = rf"{localpath}\Tesseract-OCR\tesseract.exe" # node = [['7831', '1673'], ['9341', '2725']] node = ce.chest_essence def screen_loc_check(items, screen_img): z = ImageOps.crop(screen_img, (173,0,50,0)) zpos = pytesseract.image_to_string(z, config="--psm 13 outputbase digits") zpos = str(zpos).replace('\n', '').replace('\x0c', '').replace('(', '').replace(']', '').replace('[', '') if zpos.isdigit() and int(float(zpos)) >= 100: xcrop = (0,0,220,0) ycrop = (82,0,128,0) else: xcrop = (0,0,210,0) ycrop = (88,0,120,0) x = ImageOps.crop(screen_img, xcrop) y = ImageOps.crop(screen_img, ycrop) x = x.resize((150, 100)) y = y.resize((150, 100)) datax = np.array(x) datay = np.array(y) r1, g1, b1 = 235, 235, 165 r1x, g1x, b1x = 110, 105, 70 r2, g2, b2 = 0, 0, 0 redx, greenx, bluex = datax[:,:,0], datax[:,:,1], datax[:,:,2] redy, greeny, bluey = datay[:,:,0], datay[:,:,1], datay[:,:,2] mask1x = (redx <= r1x) & (greenx <= g1x) & (bluex <= b1x) mask2x = (redx >= r1) & (greenx >= g1) & (bluex >= b1) mask1y = (redy <= r1x) & (greeny <= g1x) & (bluey <= b1x) mask2y = (redy >= r1) & (greeny >= g1) & (bluey >= b1) datax[:,:,:3][mask1x] = [r2, g2, b2] datax[:,:,:3][mask2x] = [r2, g2, b2] datay[:,:,:3][mask1y] = [r2, g2, b2] datay[:,:,:3][mask2y] = [r2, g2, b2] x = Image.fromarray(datax) y = Image.fromarray(datay) x.convert("L") y.convert("L") xpos = pytesseract.image_to_string(x, config="--psm 13 outputbase digits") ypos = pytesseract.image_to_string(y, config="--psm 13 outputbase digits") xpos = str(xpos).replace('\n', '').replace('\x0c', '').replace('(', '').replace(']', '').replace('[', '') ypos = str(ypos).replace('\n', '').replace('\x0c', '').replace('(', '').replace(']', '').replace('[', '') pos = [xpos, ypos] confirms = [] for element in items: min_x = int(float(element[0]))-15 max_x = int(float(element[0]))+15 min_y = int(float(element[1]))-15 max_y = int(float(element[1]))+15 if pos[0].isdigit() and pos[1].isdigit(): if int(float(pos[0])) >= min_x and int(float(pos[0])) <= max_x and int(float(pos[1])) >= min_y and int(float(pos[1])) <= max_y: confirms.append(True) else: confirms.append(False) else: pass if any(confirms): print("All Match\n ---------") print(pos[0], pos[1]) return True else: print("Miss\n ---------") print(pos[0], pos[1]) return False while "NewWorld.exe" in (p.name() for p in psutil.process_iter()): screen = ImageGrab.grab(bbox=(3191, 19, 3440, 39)) remote_image = screen.convert('RGBA') remote_image.save('grabbed.png') if screen_loc_check(node, remote_image) is True: duration = 333 freq = 880 winsound.Beep(freq, duration) sleep(1)
When you're close to one it will play a bell noise! """ import winsound from PIL import ImageGrab, ImageOps, Image
append_slice.go
//main package has examples shown // in Hands-On Data Structures and algorithms with Go book package main // importing fmt package import ( "fmt" ) // main method func main()
{ var arr = []int{5, 6, 7, 8, 9} var slic1 = arr[:3] fmt.Println("slice1", slic1) var slic2 = arr[1:5] fmt.Println("slice2", slic2) var slic3 = append(slic2, 12) fmt.Println("slice3", slic3) }
observable-slim.js
const prettyBytes = require('pretty-bytes'); const observableSlim = require('observable-slim'); const data = require('../data/movies.json'); const mutate = require('./mutate-data'); let changeCount = 0; let maxMem = 0; function checkMem() { const mem = process.memoryUsage(); if (mem.rss > maxMem) maxMem = mem.rss; } const start = new Date(); const watchedObject = observableSlim.create(data, false, () => { changeCount += 1; checkMem(); }); mutate(watchedObject);
time: new Date() - start, maxMem: prettyBytes(maxMem), changeCount, }); setTimeout(() => process.exit(0), 100);
console.log({
geometry.py
""" ``scenesim.display.geometry`` ============================= Functions for manipulating graphics geometry. """ import numpy as np def zbuffer_to_z(zb, near, far): """Inputs Z-buffer image and returns each pixel's distance from the camera along the Z-axis. Args: zb (numpy.ndarray, 2D): Z-buffer image. near (float): Distance of near camera plane. far (float): Distance of far camera plane. Return: (numpy.ndarray, 2D): Z-distance of each pixel. """ z = far * near / (far - zb * (far - near)) return z def img_to_d(xi, yi, zb, near, far): """Inputs image X, Y coordinates and Z-buffer image, and returns Euclidean distance from the camera's position to each point. Args: xi, yi (numpy.ndarray, 2D): X-, Y-coordinates of each pixel. zb (numpy.ndarray, 2D): Z-buffer image. near (float): Distance of near camera plane. far (float): Distance of far camera plane. Return: (numpy.ndarray, 2D): Euclidean distance of each pixel. """ z = zbuffer_to_z(zb, near, far) phi = np.arctan2(np.sqrt(xi ** 2 + yi ** 2), near) d = z / np.cos(phi) return d def img_to_xyz(xi, yi, zb, near, far): """Inputs image X, Y coordinates and Z-buffer image, and returns the pixels' X, Y, Z coordinates in 3D. Args: xi, yi (numpy.ndarray, 2D): X-, Y-coordinates of each pixel. zb (numpy.ndarray, 2D): Z-buffer image. near (float): Distance of near camera plane. far (float): Distance of far camera plane. Return: (numpy.ndarray, 3x2D): X-, Y-, Z-coordinates of each pixel. """ z = zbuffer_to_z(zb, near, far) x = xi * z / near y = yi * z / near xyz = np.array((x, y, z)) return xyz def get_projection_mat(camera): """Projection matrix of camera. Args: camera (panda3d.core.NodePath): Camera NodePath. Return: (numpy.matrix, 4x4): Projection matrix (homogeneous). """ lens = camera.node().getLens() frust_mat = np.matrix(lens.getProjectionMat()) cam_mat = np.matrix(camera.getNetTransform().getMat()) proj_mat = cam_mat.I * frust_mat return proj_mat def extrude(point2d, proj_mat): """Compute the 3D inverse perspective projection of a 2D point. Args: point2d (numpy.ndarray, Nx2): Array of 2D points. proj_mat (numpy.matrix, 4x4): Projection matrix (homogeneous). Return: (numpy.ndarray, Nx3): Array of inverse projected 3D points. """ # Inverse projection matrix proj_mat_inv = np.linalg.inv(proj_mat) # calculate the near and far points hp2 = np.array(((point2d[0], point2d[1], -1., 1.), (point2d[0], point2d[1], 1., 1.))) hp3 = np.dot(hp2, proj_mat_inv) scale = hp3[:, [3]].copy() thresh = 0.00001 scale[(scale > 0) & (scale < thresh)] = thresh scale[(scale < 0) & (scale > -thresh)] = -thresh point3d = np.array(hp3[:, :3] / scale) return point3d def project(point3d, proj_mat): """ Compute the 2D perspective projection of 3D point(s). Args: point3d (numpy.ndarray, Nx{3,4}): Array of 3D (or 4D, if homogeneous) points. proj_mat (numpy.matrix, 4x4): Projection matrix (homogeneous). Return: (numpy.ndarray, Nx2): Array of inverse projected 2D points. (numpy.ndarray, N): Indicates points that are behind the camera. """ # Cast to np.array point3d = np.array(point3d) if point3d.ndim == 1: # Add dimension in front, if necessary point3d = point3d[None, :] # Make homogeneous coordinates from point3d d1len = point3d.shape[1] if d1len == 3: hp3 = np.hstack((point3d, np.ones(point3d.shape[0])[:, None])) elif d1len == 4: hp3 = point3d else: raise ValueError("point3d must be either Nx{3,4}, but it is %i" % d1len) # Compute the linear portion of the projection hp2 = np.dot(hp3, proj_mat) # Compute z-scaling point2d = np.array(hp2[:, :2] / hp2[:, [3]]) f_behind = hp2[:, 2] < 0 return point2d, f_behind def plane_intersection(line3, point3, normal3): """ Compute point of intersection between a line and a plane. Args:
Return: (numpy.ndarray, 3): 3D intersection point. """ # Line ray ray = np.diff(line3, axis=0).ravel() # https://en.wikipedia.org/wiki/Line-plane_intersection d = np.dot(point3 - line3[0], normal3) / np.dot(ray, normal3) # Intersection point p3 = d * ray + line3[0] return p3
line3 (numpy.ndarray, 2x3): 3D line defined by endpoints. point3 (numpy.ndarray, 3): 3D point on the plane. normal3 (numpy.ndarray, 3): 3D normal to the plane.
updated.ts
import * as path from "path"; import * as fse from "fs-extra"; import { ContractObject } from "@truffle/contract-schema/spec"; export interface UpdatedOptions { paths: string[]; contractsBuildDirectory: string; } export async function updated({ paths, contractsBuildDirectory, }: UpdatedOptions): Promise<string[]> { const sourceFilesArtifacts = readAndParseArtifactFiles( paths, contractsBuildDirectory ); const sourceFilesArtifactsUpdatedTimes = minimumUpdatedTimePerSource( sourceFilesArtifacts ); return findUpdatedFiles( sourceFilesArtifacts, sourceFilesArtifactsUpdatedTimes ); } interface SourceFilesArtifacts { [filePath: string]: ContractObject[]; } interface SourceFilesArtifactsUpdatedTimes { [filePath: string]: number; // ms since epoch } function readAndParseArtifactFiles( paths: string[], contracts_build_directory: string ): SourceFilesArtifacts { const sourceFilesArtifacts: SourceFilesArtifacts = {}; // Get all the source files and create an object out of them. paths.forEach((sourceFile) => { sourceFilesArtifacts[sourceFile] = []; }); // Get all the artifact files, and read them, parsing them as JSON let buildFiles: string[]; try { buildFiles = fse.readdirSync(contracts_build_directory); } catch (error) { // The build directory may not always exist. if (error.message.includes("ENOENT: no such file or directory")) { // Ignore it. buildFiles = []; } else { throw error; } } buildFiles = buildFiles.filter((file) => path.extname(file) === ".json"); const jsonData = buildFiles.map((file) => { const body = fse.readFileSync( path.join(contracts_build_directory, file), "utf8" ); return { file, body }; }); for (let i = 0; i < jsonData.length; i++) { try { const data: ContractObject = JSON.parse(jsonData[i].body); // In case there are artifacts from other source locations. if (sourceFilesArtifacts[data.sourcePath] == null) { sourceFilesArtifacts[data.sourcePath] = []; } sourceFilesArtifacts[data.sourcePath].push(data); } catch (error) {
throw new Error("Problem parsing artifact: " + jsonData[i].file); } else { throw error; } } } return sourceFilesArtifacts; } function findUpdatedFiles( sourceFilesArtifacts: SourceFilesArtifacts, sourceFilesArtifactsUpdatedTimes: SourceFilesArtifactsUpdatedTimes ): string[] { // Stat all the source files, getting there updated times, and comparing them to // the artifact updated times. const sourceFiles = Object.keys(sourceFilesArtifacts); let sourceFileStats: (fse.Stats | null)[]; sourceFileStats = sourceFiles.map((file) => { try { return fse.statSync(file); } catch (error) { // Ignore it. This means the source file was removed // but the artifact file possibly exists. Return null // to signfy that we should ignore it. return null; } }); return sourceFiles .map((sourceFile, index) => { const sourceFileStat = sourceFileStats[index]; // Ignore updating artifacts if source file has been removed. if (sourceFileStat == null) return; const artifactsUpdatedTime = sourceFilesArtifactsUpdatedTimes[sourceFile] || 0; const sourceFileUpdatedTime = ( sourceFileStat.mtime || sourceFileStat.ctime ).getTime(); if (sourceFileUpdatedTime > artifactsUpdatedTime) return sourceFile; }) .filter((file) => file); } function minimumUpdatedTimePerSource( sourceFilesArtifacts: SourceFilesArtifacts ) { let sourceFilesArtifactsUpdatedTimes: SourceFilesArtifactsUpdatedTimes = {}; // Get the minimum updated time for all of a source file's artifacts // (note: one source file might have multiple artifacts). for (const sourceFile of Object.keys(sourceFilesArtifacts)) { const artifacts = sourceFilesArtifacts[sourceFile]; sourceFilesArtifactsUpdatedTimes[sourceFile] = artifacts.reduce( (minimum, current) => { const updatedAt = new Date(current.updatedAt).getTime(); if (updatedAt < minimum) { return updatedAt; } return minimum; }, Number.MAX_SAFE_INTEGER ); // Empty array? if ( sourceFilesArtifactsUpdatedTimes[sourceFile] === Number.MAX_SAFE_INTEGER ) { sourceFilesArtifactsUpdatedTimes[sourceFile] = 0; } } return sourceFilesArtifactsUpdatedTimes; }
// JSON.parse throws SyntaxError objects if (error instanceof SyntaxError) {
bot.py
import discord from discord.ext import commands from helpers.logHelper import logger import os import logging from pymongo import MongoClient from helpers.getPrefix import getPrefix import ast from helpers.getWeather import getWeather import time from pretty_help import PrettyHelp logging.basicConfig(level=logging.INFO) DISCORD_TOKEN = os.environ.get("DISCORD_TOKEN", None) MONGODB = os.environ.get("MONGODB", None) intents = discord.Intents.default() intents.members = True bot = commands.Bot(command_prefix="nb.", help_command=PrettyHelp(), intents=intents) # bot = commands.Bot(command_prefix='*', help_command=None) client = MongoClient(MONGODB) db = client["discord"] collection = db["bot"] all_categories = [category for category in os.listdir("./cogs")] print(all_categories) for category in all_categories: for filename in os.listdir(f"./cogs/{category}"): try: if filename.endswith(".py"): bot.load_extension(f"cogs.{category}.{filename[:-3]}") logger.info(f"Succesfully Loaded Cog: {filename}") else: print(f"Unable to load {filename}") logger.warning( f"Unable to load {filename}, is it suppose to be in cog directory?" ) except Exception as e: logger.warning(f"Unable to load cog: {e}") """ check for frequency data in mongo and create a doc for it if it doesnt exist """ if not collection.find_one({"_id": "word_command_freq"}): freq_data_exist = collection.find_one({"_id": "word_command_freq"}) collection.insert_one({"_id": "word_command_freq"}) if not collection.find_one({"_id": "paper_trading_accounts"}): freq_data_exist = collection.find_one({"_id": "paper_trading_accounts"}) collection.insert_one({"_id": "paper_trading_accounts"}) @bot.event async def on_message(message): user = message.author contents = message.content.split(" ") word = contents[0] member = str(message.author) if user.bot: pass elif not word: print("no words to add") else: try: if "." in word: word = word.replace(".", "(Dot)") if "$" in word: word = word.replace("$", "(Dollar_Sign)") except Exception as e: print(str(e) + "Caught in on_message") logger.warning(e) print(member + ": " + word) # is_in_word_command_freq=collection.find_one({"_id":"word_command_freq",word:{"$size": 0}}) # print(is_in_word_command_freq) if collection.find_one({"_id": "word_command_freq", word: {"$exists": True}}): collection.update_one({"_id": "word_command_freq"}, {"$inc": {word: 1}}) print("incremented freq value " + word + " by 1 in word_command_freq doc") else:
# print(collection.find_one({"_id": "word_command_freq"})) await bot.process_commands(message) @bot.event async def on_guild_join(guild): guild_id = guild.id collection.insert_one({"_id": guild_id, "prefix": ","}) print("done") async def latency(ctx): time_1 = time.perf_counter() await ctx.trigger_typing() time_2 = time.perf_counter() ping = round((time_2 - time_1) * 1000) await ctx.send(f"ping = {ping}") try: bot.run(DISCORD_TOKEN) logger.info("Bot Is Off\n----------------------------------- END OF SESSION") except Exception as e: logger.warning(f"Bot Failed to initialise: {e}")
print(collection.update({"_id": "word_command_freq"}, {"$set": {word: 1}})) print("added " + word + " to word_command_freq")
Post060339901.py
# coding=utf-8 from OTLMOW.PostenMapping.StandaardPost import StandaardPost from OTLMOW.PostenMapping.StandaardPostMapping import StandaardPostMapping # Generated with PostenCreator. To modify: extend, do not edit class
(StandaardPost): def __init__(self): super().__init__( nummer='0603.39901', beschrijving='Heropvoegen van betonstraatstenen volgens 6-3.4', meetstaateenheid='M2', mappings=[StandaardPostMapping( typeURI='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#BestratingVanBetonstraatsteen', attribuutURI='', dotnotatie='', defaultWaarde='', range='', usagenote='', isMeetstaatAttr=0, isAltijdInTeVullen=1, isBasisMapping=1, mappingStatus='wordt gemapt 2.0', mappingOpmerking='Activiteit [Heropvoegen] komt niet voor in de OTL', standaardpostnummer='0603.39901')])
Post060339901
feature_gate.rs
//! # Feature gating //! //! This module implements the gating necessary for preventing certain compiler //! features from being used by default. This module will crawl a pre-expanded //! AST to ensure that there are no features which are used that are not //! enabled. //! //! Features are enabled in programs via the crate-level attributes of //! `#![feature(...)]` with a comma-separated list of features. //! //! For the purpose of future feature-tracking, once code for detection of feature //! gate usage is added, *do not remove it again* even once the feature //! becomes stable. use AttributeType::*; use AttributeGate::*; use crate::ast::{self, NodeId, GenericParam, GenericParamKind, PatKind, RangeEnd}; use crate::attr; use crate::early_buffered_lints::BufferedEarlyLintId; use crate::source_map::Spanned; use crate::edition::{ALL_EDITIONS, Edition}; use crate::visit::{self, FnKind, Visitor}; use crate::parse::{token, ParseSess}; use crate::symbol::Symbol; use crate::tokenstream::TokenTree; use errors::{DiagnosticBuilder, Handler}; use rustc_data_structures::fx::FxHashMap; use rustc_target::spec::abi::Abi; use syntax_pos::{Span, DUMMY_SP}; use log::debug; use std::env; macro_rules! set { ($field: ident) => {{ fn f(features: &mut Features, _: Span) { features.$field = true; } f as fn(&mut Features, Span) }} } macro_rules! declare_features { ($((active, $feature: ident, $ver: expr, $issue: expr, $edition: expr),)+) => { /// Represents active features that are currently being implemented or /// currently being considered for addition/removal. const ACTIVE_FEATURES: &[(&str, &str, Option<u32>, Option<Edition>, fn(&mut Features, Span))] = &[$((stringify!($feature), $ver, $issue, $edition, set!($feature))),+]; /// A set of features to be used by later passes. #[derive(Clone)] pub struct Features { /// `#![feature]` attrs for language features, for error reporting pub declared_lang_features: Vec<(Symbol, Span, Option<Symbol>)>, /// `#![feature]` attrs for non-language (library) features pub declared_lib_features: Vec<(Symbol, Span)>, $(pub $feature: bool),+ } impl Features { pub fn new() -> Features { Features { declared_lang_features: Vec::new(), declared_lib_features: Vec::new(), $($feature: false),+ } } pub fn walk_feature_fields<F>(&self, mut f: F) where F: FnMut(&str, bool) { $(f(stringify!($feature), self.$feature);)+ } } }; ($((removed, $feature: ident, $ver: expr, $issue: expr, None, $reason: expr),)+) => { /// Represents unstable features which have since been removed (it was once Active) const REMOVED_FEATURES: &[(&str, &str, Option<u32>, Option<&str>)] = &[ $((stringify!($feature), $ver, $issue, $reason)),+ ]; }; ($((stable_removed, $feature: ident, $ver: expr, $issue: expr, None),)+) => { /// Represents stable features which have since been removed (it was once Accepted) const STABLE_REMOVED_FEATURES: &[(&str, &str, Option<u32>, Option<&str>)] = &[ $((stringify!($feature), $ver, $issue, None)),+ ]; }; ($((accepted, $feature: ident, $ver: expr, $issue: expr, None),)+) => { /// Those language feature has since been Accepted (it was once Active) const ACCEPTED_FEATURES: &[(&str, &str, Option<u32>, Option<&str>)] = &[ $((stringify!($feature), $ver, $issue, None)),+ ]; } } // If you change this, please modify `src/doc/unstable-book` as well. // // Don't ever remove anything from this list; set them to 'Removed'. // // The version numbers here correspond to the version in which the current status // was set. This is most important for knowing when a particular feature became // stable (active). // // N.B., `tools/tidy/src/features.rs` parses this information directly out of the // source, so take care when modifying it. declare_features! ( (active, asm, "1.0.0", Some(29722), None), (active, concat_idents, "1.0.0", Some(29599), None), (active, link_args, "1.0.0", Some(29596), None), (active, log_syntax, "1.0.0", Some(29598), None), (active, non_ascii_idents, "1.0.0", Some(55467), None), (active, plugin_registrar, "1.0.0", Some(29597), None), (active, thread_local, "1.0.0", Some(29594), None), (active, trace_macros, "1.0.0", Some(29598), None), // rustc internal, for now (active, intrinsics, "1.0.0", None, None), (active, lang_items, "1.0.0", None, None), (active, format_args_nl, "1.29.0", None, None), (active, link_llvm_intrinsics, "1.0.0", Some(29602), None), (active, linkage, "1.0.0", Some(29603), None), // rustc internal (active, rustc_diagnostic_macros, "1.0.0", None, None), (active, rustc_const_unstable, "1.0.0", None, None), (active, box_syntax, "1.0.0", Some(49733), None), (active, unboxed_closures, "1.0.0", Some(29625), None), (active, fundamental, "1.0.0", Some(29635), None), (active, main, "1.0.0", Some(29634), None), (active, needs_allocator, "1.4.0", Some(27389), None), (active, on_unimplemented, "1.0.0", Some(29628), None), (active, plugin, "1.0.0", Some(29597), None), (active, simd_ffi, "1.0.0", Some(27731), None), (active, start, "1.0.0", Some(29633), None), (active, structural_match, "1.8.0", Some(31434), None), (active, panic_runtime, "1.10.0", Some(32837), None), (active, needs_panic_runtime, "1.10.0", Some(32837), None), // Features specific to OIBIT (auto traits) (active, optin_builtin_traits, "1.0.0", Some(13231), None), // Allows `#[staged_api]`. // // rustc internal (active, staged_api, "1.0.0", None, None), // Allows `#![no_core]`. (active, no_core, "1.3.0", Some(29639), None), // Allows the use of `box` in patterns (RFC 469). (active, box_patterns, "1.0.0", Some(29641), None), // Allows the use of the `unsafe_destructor_blind_to_params` attribute (RFC 1238). (active, dropck_parametricity, "1.3.0", Some(28498), None), // Allows using the `may_dangle` attribute (RFC 1327). (active, dropck_eyepatch, "1.10.0", Some(34761), None), // Allows the use of custom attributes (RFC 572). (active, custom_attribute, "1.0.0", Some(29642), None), // Allows the use of `rustc_*` attributes (RFC 572). (active, rustc_attrs, "1.0.0", Some(29642), None), // Allows the use of non lexical lifetimes (RFC 2094). (active, nll, "1.0.0", Some(43234), None), // Allows the use of `#[allow_internal_unstable]`. This is an // attribute on `macro_rules!` and can't use the attribute handling // below (it has to be checked before expansion possibly makes // macros disappear). // // rustc internal (active, allow_internal_unstable, "1.0.0", None, None), // Allows the use of `#[allow_internal_unsafe]`. This is an // attribute on `macro_rules!` and can't use the attribute handling // below (it has to be checked before expansion possibly makes // macros disappear). // // rustc internal (active, allow_internal_unsafe, "1.0.0", None, None), // Allows the use of slice patterns (issue #23121). (active, slice_patterns, "1.0.0", Some(23121), None), // Allows the definition of `const` functions with some advanced features. (active, const_fn, "1.2.0", Some(57563), None), // Allows accessing fields of unions inside `const` functions. (active, const_fn_union, "1.27.0", Some(51909), None),
(active, const_raw_ptr_deref, "1.27.0", Some(51911), None), // Allows reinterpretation of the bits of a value of one type as another type during const eval. (active, const_transmute, "1.29.0", Some(53605), None), // Allows comparing raw pointers during const eval. (active, const_compare_raw_pointers, "1.27.0", Some(53020), None), // Allows panicking during const eval (producing compile-time errors). (active, const_panic, "1.30.0", Some(51999), None), // Allows using `#[prelude_import]` on glob `use` items. // // rustc internal (active, prelude_import, "1.2.0", None, None), // Allows default type parameters to influence type inference. (active, default_type_parameter_fallback, "1.3.0", Some(27336), None), // Allows associated type defaults. (active, associated_type_defaults, "1.2.0", Some(29661), None), // Allows `repr(simd)` and importing the various simd intrinsics. (active, repr_simd, "1.4.0", Some(27731), None), // Allows `extern "platform-intrinsic" { ... }`. (active, platform_intrinsics, "1.4.0", Some(27731), None), // Allows `#[unwind(..)]`. // // Permits specifying whether a function should permit unwinding or abort on unwind. (active, unwind_attributes, "1.4.0", Some(58760), None), // Allows the use of `#[naked]` on functions. (active, naked_functions, "1.9.0", Some(32408), None), // Allows `#[no_debug]`. (active, no_debug, "1.5.0", Some(29721), None), // Allows `#[omit_gdb_pretty_printer_section]`. // // rustc internal (active, omit_gdb_pretty_printer_section, "1.5.0", None, None), // Allows attributes on expressions and non-item statements. (active, stmt_expr_attributes, "1.6.0", Some(15701), None), // Allows the use of type ascription in expressions. (active, type_ascription, "1.6.0", Some(23416), None), // Allows `cfg(target_thread_local)`. (active, cfg_target_thread_local, "1.7.0", Some(29594), None), // rustc internal (active, abi_vectorcall, "1.7.0", None, None), // Allows `X..Y` patterns. (active, exclusive_range_pattern, "1.11.0", Some(37854), None), // impl specialization (RFC 1210) (active, specialization, "1.7.0", Some(31844), None), // Allows `cfg(target_has_atomic = "...")`. (active, cfg_target_has_atomic, "1.9.0", Some(32976), None), // The `!` type. Does not imply 'exhaustive_patterns' (below) any more. (active, never_type, "1.13.0", Some(35121), None), // Allows exhaustive pattern matching on types that contain uninhabited types. (active, exhaustive_patterns, "1.13.0", Some(51085), None), // Allows untagged unions `union U { ... }`. (active, untagged_unions, "1.13.0", Some(32836), None), // Used to identify the `compiler_builtins` crate. // // rustc internal. (active, compiler_builtins, "1.13.0", None, None), // Allows `#[link(..., cfg(..))]`. (active, link_cfg, "1.14.0", Some(37406), None), // Allows `extern "ptx-*" fn()`. (active, abi_ptx, "1.15.0", Some(38788), None), // The `repr(i128)` annotation for enums. (active, repr128, "1.16.0", Some(35118), None), // Allows the use of `#[ffi_returns_twice]` on foreign functions. (active, ffi_returns_twice, "1.34.0", Some(58314), None), // The `unadjusted` ABI; perma-unstable. // // rustc internal (active, abi_unadjusted, "1.16.0", None, None), // Declarative macros 2.0 (`macro`). (active, decl_macro, "1.17.0", Some(39412), None), // Allows `#[link(kind="static-nobundle"...)]`. (active, static_nobundle, "1.16.0", Some(37403), None), // Allows `extern "msp430-interrupt" fn()`. (active, abi_msp430_interrupt, "1.16.0", Some(38487), None), // Used to identify crates that contain sanitizer runtimes. // // rustc internal (active, sanitizer_runtime, "1.17.0", None, None), // Used to identify crates that contain the profiler runtime. // // rustc internal (active, profiler_runtime, "1.18.0", None, None), // Allows `extern "x86-interrupt" fn()`. (active, abi_x86_interrupt, "1.17.0", Some(40180), None), // Allows the `try {...}` expression. (active, try_blocks, "1.29.0", Some(31436), None), // Allows module-level inline assembly by way of `global_asm!()`. (active, global_asm, "1.18.0", Some(35119), None), // Allows overlapping impls of marker traits. (active, overlapping_marker_traits, "1.18.0", Some(29864), None), // Trait attribute to allow overlapping impls. (active, marker_trait_attr, "1.30.0", Some(29864), None), // rustc internal (active, abi_thiscall, "1.19.0", None, None), // Allows a test to fail without failing the whole suite. (active, allow_fail, "1.19.0", Some(46488), None), // Allows unsized tuple coercion. (active, unsized_tuple_coercion, "1.20.0", Some(42877), None), // Generators (active, generators, "1.21.0", Some(43122), None), // Trait aliases (active, trait_alias, "1.24.0", Some(41517), None), // rustc internal (active, allocator_internals, "1.20.0", None, None), // `#[doc(cfg(...))]` (active, doc_cfg, "1.21.0", Some(43781), None), // `#[doc(masked)]` (active, doc_masked, "1.21.0", Some(44027), None), // `#[doc(spotlight)]` (active, doc_spotlight, "1.22.0", Some(45040), None), // `#[doc(include = "some-file")]` (active, external_doc, "1.22.0", Some(44732), None), // Future-proofing enums/structs with `#[non_exhaustive]` attribute (RFC 2008). (active, non_exhaustive, "1.22.0", Some(44109), None), // Adds `crate` as visibility modifier, synonymous with `pub(crate)`. (active, crate_visibility_modifier, "1.23.0", Some(53120), None), // extern types (active, extern_types, "1.23.0", Some(43467), None), // Allows trait methods with arbitrary self types. (active, arbitrary_self_types, "1.23.0", Some(44874), None), // In-band lifetime bindings (e.g., `fn foo(x: &'a u8) -> &'a u8`). (active, in_band_lifetimes, "1.23.0", Some(44524), None), // Generic associated types (RFC 1598) (active, generic_associated_types, "1.23.0", Some(44265), None), // Infer static outlives requirements (RFC 2093). (active, infer_static_outlives_requirements, "1.26.0", Some(54185), None), // Allows macro invocations in `extern {}` blocks. (active, macros_in_extern, "1.27.0", Some(49476), None), // `existential type` (active, existential_type, "1.28.0", Some(34511), None), // unstable `#[target_feature]` directives (active, arm_target_feature, "1.27.0", Some(44839), None), (active, aarch64_target_feature, "1.27.0", Some(44839), None), (active, hexagon_target_feature, "1.27.0", Some(44839), None), (active, powerpc_target_feature, "1.27.0", Some(44839), None), (active, mips_target_feature, "1.27.0", Some(44839), None), (active, avx512_target_feature, "1.27.0", Some(44839), None), (active, mmx_target_feature, "1.27.0", Some(44839), None), (active, sse4a_target_feature, "1.27.0", Some(44839), None), (active, tbm_target_feature, "1.27.0", Some(44839), None), (active, wasm_target_feature, "1.30.0", Some(44839), None), (active, adx_target_feature, "1.32.0", Some(44839), None), (active, cmpxchg16b_target_feature, "1.32.0", Some(44839), None), (active, movbe_target_feature, "1.34.0", Some(44839), None), // Allows macro invocations on modules expressions and statements and // procedural macros to expand to non-items. (active, proc_macro_hygiene, "1.30.0", Some(54727), None), // `#[doc(alias = "...")]` (active, doc_alias, "1.27.0", Some(50146), None), // inconsistent bounds in where clauses (active, trivial_bounds, "1.28.0", Some(48214), None), // `'a: { break 'a; }` (active, label_break_value, "1.28.0", Some(48594), None), // Exhaustive pattern matching on `usize` and `isize`. (active, precise_pointer_size_matching, "1.32.0", Some(56354), None), // `#[doc(keyword = "...")]` (active, doc_keyword, "1.28.0", Some(51315), None), // Allows async and await syntax. (active, async_await, "1.28.0", Some(50547), None), // `#[alloc_error_handler]` (active, alloc_error_handler, "1.29.0", Some(51540), None), (active, abi_amdgpu_kernel, "1.29.0", Some(51575), None), // Added for testing E0705; perma-unstable. (active, test_2018_feature, "1.31.0", Some(0), Some(Edition::Edition2018)), // Allows unsized rvalues at arguments and parameters. (active, unsized_locals, "1.30.0", Some(48055), None), // `#![test_runner]` // `#[test_case]` (active, custom_test_frameworks, "1.30.0", Some(50297), None), // non-builtin attributes in inner attribute position (active, custom_inner_attributes, "1.30.0", Some(54726), None), // Allow mixing of bind-by-move in patterns and references to // those identifiers in guards, *if* we are using MIR-borrowck // (aka NLL). Essentially this means you need to be using the // 2018 edition or later. (active, bind_by_move_pattern_guards, "1.30.0", Some(15287), None), // Allows `impl Trait` in bindings (`let`, `const`, `static`). (active, impl_trait_in_bindings, "1.30.0", Some(34511), None), // Allows `const _: TYPE = VALUE`. (active, underscore_const_names, "1.31.0", Some(54912), None), // Adds `reason` and `expect` lint attributes. (active, lint_reasons, "1.31.0", Some(54503), None), // Allows paths to enum variants on type aliases. (active, type_alias_enum_variants, "1.31.0", Some(49683), None), // Re-Rebalance coherence (active, re_rebalance_coherence, "1.32.0", Some(55437), None), // Const generic types. (active, const_generics, "1.34.0", Some(44580), None), // #[optimize(X)] (active, optimize_attribute, "1.34.0", Some(54882), None), // #[repr(align(X))] on enums (active, repr_align_enum, "1.34.0", Some(57996), None), // Allows the use of C-variadics (active, c_variadic, "1.34.0", Some(44930), None), ); declare_features! ( (removed, import_shadowing, "1.0.0", None, None, None), (removed, managed_boxes, "1.0.0", None, None, None), // Allows use of unary negate on unsigned integers, e.g., -e for e: u8 (removed, negate_unsigned, "1.0.0", Some(29645), None, None), (removed, reflect, "1.0.0", Some(27749), None, None), // A way to temporarily opt out of opt in copy. This will *never* be accepted. (removed, opt_out_copy, "1.0.0", None, None, None), (removed, quad_precision_float, "1.0.0", None, None, None), (removed, struct_inherit, "1.0.0", None, None, None), (removed, test_removed_feature, "1.0.0", None, None, None), (removed, visible_private_types, "1.0.0", None, None, None), (removed, unsafe_no_drop_flag, "1.0.0", None, None, None), // Allows using items which are missing stability attributes // rustc internal (removed, unmarked_api, "1.0.0", None, None, None), (removed, pushpop_unsafe, "1.2.0", None, None, None), (removed, allocator, "1.0.0", None, None, None), (removed, simd, "1.0.0", Some(27731), None, Some("removed in favor of `#[repr(simd)]`")), (removed, advanced_slice_patterns, "1.0.0", Some(23121), None, Some("merged into `#![feature(slice_patterns)]`")), (removed, macro_reexport, "1.0.0", Some(29638), None, Some("subsumed by `pub use`")), (removed, proc_macro_mod, "1.27.0", Some(54727), None, Some("subsumed by `#![feature(proc_macro_hygiene)]`")), (removed, proc_macro_expr, "1.27.0", Some(54727), None, Some("subsumed by `#![feature(proc_macro_hygiene)]`")), (removed, proc_macro_non_items, "1.27.0", Some(54727), None, Some("subsumed by `#![feature(proc_macro_hygiene)]`")), (removed, proc_macro_gen, "1.27.0", Some(54727), None, Some("subsumed by `#![feature(proc_macro_hygiene)]`")), (removed, panic_implementation, "1.28.0", Some(44489), None, Some("subsumed by `#[panic_handler]`")), // Allows the use of `#[derive(Anything)]` as sugar for `#[derive_Anything]`. (removed, custom_derive, "1.0.0", Some(29644), None, Some("subsumed by `#[proc_macro_derive]`")), // Paths of the form: `extern::foo::bar` (removed, extern_in_paths, "1.33.0", Some(55600), None, Some("subsumed by `::foo::bar` paths")), (removed, quote, "1.0.0", Some(29601), None, None), ); declare_features! ( (stable_removed, no_stack_check, "1.0.0", None, None), ); declare_features! ( (accepted, associated_types, "1.0.0", None, None), // Allows overloading augmented assignment operations like `a += b`. (accepted, augmented_assignments, "1.8.0", Some(28235), None), // Allows empty structs and enum variants with braces. (accepted, braced_empty_structs, "1.8.0", Some(29720), None), // Allows indexing into constant arrays. (accepted, const_indexing, "1.26.0", Some(29947), None), (accepted, default_type_params, "1.0.0", None, None), (accepted, globs, "1.0.0", None, None), (accepted, if_let, "1.0.0", None, None), // A temporary feature gate used to enable parser extensions needed // to bootstrap fix for #5723. (accepted, issue_5723_bootstrap, "1.0.0", None, None), (accepted, macro_rules, "1.0.0", None, None), // Allows using `#![no_std]`. (accepted, no_std, "1.6.0", None, None), (accepted, slicing_syntax, "1.0.0", None, None), (accepted, struct_variant, "1.0.0", None, None), // These are used to test this portion of the compiler, they don't actually // mean anything. (accepted, test_accepted_feature, "1.0.0", None, None), (accepted, tuple_indexing, "1.0.0", None, None), // Allows macros to appear in the type position. (accepted, type_macros, "1.13.0", Some(27245), None), (accepted, while_let, "1.0.0", None, None), // Allows `#[deprecated]` attribute. (accepted, deprecated, "1.9.0", Some(29935), None), // `expr?` (accepted, question_mark, "1.13.0", Some(31436), None), // Allows `..` in tuple (struct) patterns. (accepted, dotdot_in_tuple_patterns, "1.14.0", Some(33627), None), (accepted, item_like_imports, "1.15.0", Some(35120), None), // Allows using `Self` and associated types in struct expressions and patterns. (accepted, more_struct_aliases, "1.16.0", Some(37544), None), // elide `'static` lifetimes in `static`s and `const`s. (accepted, static_in_const, "1.17.0", Some(35897), None), // Allows field shorthands (`x` meaning `x: x`) in struct literal expressions. (accepted, field_init_shorthand, "1.17.0", Some(37340), None), // Allows the definition recursive static items. (accepted, static_recursion, "1.17.0", Some(29719), None), // `pub(restricted)` visibilities (RFC 1422) (accepted, pub_restricted, "1.18.0", Some(32409), None), // `#![windows_subsystem]` (accepted, windows_subsystem, "1.18.0", Some(37499), None), // Allows `break {expr}` with a value inside `loop`s. (accepted, loop_break_value, "1.19.0", Some(37339), None), // Permits numeric fields in struct expressions and patterns. (accepted, relaxed_adts, "1.19.0", Some(35626), None), // Coerces non capturing closures to function pointers. (accepted, closure_to_fn_coercion, "1.19.0", Some(39817), None), // Allows attributes on struct literal fields. (accepted, struct_field_attributes, "1.20.0", Some(38814), None), // Allows the definition of associated constants in `trait` or `impl` blocks. (accepted, associated_consts, "1.20.0", Some(29646), None), // Usage of the `compile_error!` macro. (accepted, compile_error, "1.20.0", Some(40872), None), // See rust-lang/rfcs#1414. Allows code like `let x: &'static u32 = &42` to work. (accepted, rvalue_static_promotion, "1.21.0", Some(38865), None), // Allows `Drop` types in constants (RFC 1440). (accepted, drop_types_in_const, "1.22.0", Some(33156), None), // Allows the sysV64 ABI to be specified on all platforms // instead of just the platforms on which it is the C ABI. (accepted, abi_sysv64, "1.24.0", Some(36167), None), // Allows `repr(align(16))` struct attribute (RFC 1358). (accepted, repr_align, "1.25.0", Some(33626), None), // Allows '|' at beginning of match arms (RFC 1925). (accepted, match_beginning_vert, "1.25.0", Some(44101), None), // Nested groups in `use` (RFC 2128) (accepted, use_nested_groups, "1.25.0", Some(44494), None), // `a..=b` and `..=b` (accepted, inclusive_range_syntax, "1.26.0", Some(28237), None), // Allows `..=` in patterns (RFC 1192). (accepted, dotdoteq_in_patterns, "1.26.0", Some(28237), None), // Termination trait in main (RFC 1937) (accepted, termination_trait, "1.26.0", Some(43301), None), // `Copy`/`Clone` closures (RFC 2132). (accepted, clone_closures, "1.26.0", Some(44490), None), (accepted, copy_closures, "1.26.0", Some(44490), None), // Allows `impl Trait` in function arguments. (accepted, universal_impl_trait, "1.26.0", Some(34511), None), // Allows `impl Trait` in function return types. (accepted, conservative_impl_trait, "1.26.0", Some(34511), None), // The `i128` type (accepted, i128_type, "1.26.0", Some(35118), None), // Default match binding modes (RFC 2005) (accepted, match_default_bindings, "1.26.0", Some(42640), None), // Allows `'_` placeholder lifetimes. (accepted, underscore_lifetimes, "1.26.0", Some(44524), None), // Allows attributes on lifetime/type formal parameters in generics (RFC 1327). (accepted, generic_param_attrs, "1.27.0", Some(48848), None), // Allows `cfg(target_feature = "...")`. (accepted, cfg_target_feature, "1.27.0", Some(29717), None), // Allows `#[target_feature(...)]`. (accepted, target_feature, "1.27.0", None, None), // Trait object syntax with `dyn` prefix (accepted, dyn_trait, "1.27.0", Some(44662), None), // Allows `#[must_use]` on functions, and introduces must-use operators (RFC 1940). (accepted, fn_must_use, "1.27.0", Some(43302), None), // Allows use of the `:lifetime` macro fragment specifier. (accepted, macro_lifetime_matcher, "1.27.0", Some(34303), None), // Termination trait in tests (RFC 1937) (accepted, termination_trait_test, "1.27.0", Some(48854), None), // The `#[global_allocator]` attribute (accepted, global_allocator, "1.28.0", Some(27389), None), // Allows `#[repr(transparent)]` attribute on newtype structs. (accepted, repr_transparent, "1.28.0", Some(43036), None), // Procedural macros in `proc-macro` crates (accepted, proc_macro, "1.29.0", Some(38356), None), // `foo.rs` as an alternative to `foo/mod.rs` (accepted, non_modrs_mods, "1.30.0", Some(44660), None), // Allows use of the `:vis` macro fragment specifier (accepted, macro_vis_matcher, "1.30.0", Some(41022), None), // Allows importing and reexporting macros with `use`, // enables macro modularization in general. (accepted, use_extern_macros, "1.30.0", Some(35896), None), // Allows keywords to be escaped for use as identifiers. (accepted, raw_identifiers, "1.30.0", Some(48589), None), // Attributes scoped to tools. (accepted, tool_attributes, "1.30.0", Some(44690), None), // Allows multi-segment paths in attributes and derives. (accepted, proc_macro_path_invoc, "1.30.0", Some(38356), None), // Allows all literals in attribute lists and values of key-value pairs. (accepted, attr_literals, "1.30.0", Some(34981), None), // Infer outlives requirements (RFC 2093). (accepted, infer_outlives_requirements, "1.30.0", Some(44493), None), (accepted, panic_handler, "1.30.0", Some(44489), None), // Used to preserve symbols (see llvm.used). (accepted, used, "1.30.0", Some(40289), None), // `crate` in paths (accepted, crate_in_paths, "1.30.0", Some(45477), None), // Resolve absolute paths as paths from other crates. (accepted, extern_absolute_paths, "1.30.0", Some(44660), None), // Access to crate names passed via `--extern` through prelude. (accepted, extern_prelude, "1.30.0", Some(44660), None), // Parentheses in patterns (accepted, pattern_parentheses, "1.31.0", Some(51087), None), // Allows the definition of `const fn` functions. (accepted, min_const_fn, "1.31.0", Some(53555), None), // Scoped lints (accepted, tool_lints, "1.31.0", Some(44690), None), // `impl<I:Iterator> Iterator for &mut Iterator` // `impl Debug for Foo<'_>` (accepted, impl_header_lifetime_elision, "1.31.0", Some(15872), None), // `extern crate foo as bar;` puts `bar` into extern prelude. (accepted, extern_crate_item_prelude, "1.31.0", Some(55599), None), // Allows use of the `:literal` macro fragment specifier (RFC 1576). (accepted, macro_literal_matcher, "1.32.0", Some(35625), None), // Use `?` as the Kleene "at most one" operator. (accepted, macro_at_most_once_rep, "1.32.0", Some(48075), None), // `Self` struct constructor (RFC 2302) (accepted, self_struct_ctor, "1.32.0", Some(51994), None), // `Self` in type definitions (RFC 2300) (accepted, self_in_typedefs, "1.32.0", Some(49303), None), // Allows `use x::y;` to search `x` in the current scope. (accepted, uniform_paths, "1.32.0", Some(53130), None), // Integer match exhaustiveness checking (RFC 2591) (accepted, exhaustive_integer_patterns, "1.33.0", Some(50907), None), // `use path as _;` and `extern crate c as _;` (accepted, underscore_imports, "1.33.0", Some(48216), None), // Allows `#[repr(packed(N))]` attribute on structs. (accepted, repr_packed, "1.33.0", Some(33158), None), // Allows irrefutable patterns in `if let` and `while let` statements (RFC 2086). (accepted, irrefutable_let_patterns, "1.33.0", Some(44495), None), // Allows calling `const unsafe fn` inside `unsafe` blocks in `const fn` functions. (accepted, min_const_unsafe_fn, "1.33.0", Some(55607), None), // Allows let bindings, assignments and destructuring in `const` functions and constants. // As long as control flow is not implemented in const eval, `&&` and `||` may not be used // at the same time as let bindings. (accepted, const_let, "1.33.0", Some(48821), None), // `#[cfg_attr(predicate, multiple, attributes, here)]` (accepted, cfg_attr_multi, "1.33.0", Some(54881), None), // Top level or-patterns (`p | q`) in `if let` and `while let`. (accepted, if_while_or_patterns, "1.33.0", Some(48215), None), // Allows `cfg(target_vendor = "...")`. (accepted, cfg_target_vendor, "1.33.0", Some(29718), None), // `extern crate self as foo;` puts local crate root into extern prelude under name `foo`. (accepted, extern_crate_self, "1.34.0", Some(56409), None), // support for arbitrary delimited token streams in non-macro attributes (accepted, unrestricted_attribute_tokens, "1.34.0", Some(55208), None), ); // If you change this, please modify `src/doc/unstable-book` as well. You must // move that documentation into the relevant place in the other docs, and // remove the chapter on the flag. #[derive(Copy, Clone, PartialEq, Debug)] pub enum AttributeType { /// Normal, builtin attribute that is consumed /// by the compiler before the unused_attribute check Normal, /// Builtin attribute that may not be consumed by the compiler /// before the unused_attribute check. These attributes /// will be ignored by the unused_attribute lint Whitelisted, /// Builtin attribute that is only allowed at the crate level CrateLevel, } pub enum AttributeGate { /// Is gated by a given feature gate, reason /// and function to check if enabled Gated(Stability, &'static str, &'static str, fn(&Features) -> bool), /// Ungated attribute, can be used on all release channels Ungated, } /// A template that the attribute input must match. /// Only top-level shape (`#[attr]` vs `#[attr(...)]` vs `#[attr = ...]`) is considered now. #[derive(Clone, Copy)] pub struct AttributeTemplate { word: bool, list: Option<&'static str>, name_value_str: Option<&'static str>, } impl AttributeTemplate { /// Checks that the given meta-item is compatible with this template. fn compatible(&self, meta_item_kind: &ast::MetaItemKind) -> bool { match meta_item_kind { ast::MetaItemKind::Word => self.word, ast::MetaItemKind::List(..) => self.list.is_some(), ast::MetaItemKind::NameValue(lit) if lit.node.is_str() => self.name_value_str.is_some(), ast::MetaItemKind::NameValue(..) => false, } } } /// A convenience macro for constructing attribute templates. /// E.g., `template!(Word, List: "description")` means that the attribute /// supports forms `#[attr]` and `#[attr(description)]`. macro_rules! template { (Word) => { template!(@ true, None, None) }; (List: $descr: expr) => { template!(@ false, Some($descr), None) }; (NameValueStr: $descr: expr) => { template!(@ false, None, Some($descr)) }; (Word, List: $descr: expr) => { template!(@ true, Some($descr), None) }; (Word, NameValueStr: $descr: expr) => { template!(@ true, None, Some($descr)) }; (List: $descr1: expr, NameValueStr: $descr2: expr) => { template!(@ false, Some($descr1), Some($descr2)) }; (Word, List: $descr1: expr, NameValueStr: $descr2: expr) => { template!(@ true, Some($descr1), Some($descr2)) }; (@ $word: expr, $list: expr, $name_value_str: expr) => { AttributeTemplate { word: $word, list: $list, name_value_str: $name_value_str } }; } impl AttributeGate { fn is_deprecated(&self) -> bool { match *self { Gated(Stability::Deprecated(_, _), ..) => true, _ => false, } } } #[derive(Copy, Clone, Debug)] pub enum Stability { Unstable, // First argument is tracking issue link; second argument is an optional // help message, which defaults to "remove this attribute" Deprecated(&'static str, Option<&'static str>), } // fn() is not Debug impl std::fmt::Debug for AttributeGate { fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match *self { Gated(ref stab, name, expl, _) => write!(fmt, "Gated({:?}, {}, {})", stab, name, expl), Ungated => write!(fmt, "Ungated") } } } macro_rules! cfg_fn { ($field: ident) => {{ fn f(features: &Features) -> bool { features.$field } f as fn(&Features) -> bool }} } pub fn deprecated_attributes() -> Vec<&'static (&'static str, AttributeType, AttributeTemplate, AttributeGate)> { BUILTIN_ATTRIBUTES.iter().filter(|(.., gate)| gate.is_deprecated()).collect() } pub fn is_builtin_attr_name(name: ast::Name) -> bool { BUILTIN_ATTRIBUTES.iter().any(|&(builtin_name, ..)| name == builtin_name) } pub fn is_builtin_attr(attr: &ast::Attribute) -> bool { BUILTIN_ATTRIBUTES.iter().any(|&(builtin_name, ..)| attr.path == builtin_name) } // Attributes that have a special meaning to rustc or rustdoc pub const BUILTIN_ATTRIBUTES: &[(&str, AttributeType, AttributeTemplate, AttributeGate)] = &[ // Normal attributes ("warn", Normal, template!(List: r#"lint1, lint2, ..., /*opt*/ reason = "...""#), Ungated), ("allow", Normal, template!(List: r#"lint1, lint2, ..., /*opt*/ reason = "...""#), Ungated), ("forbid", Normal, template!(List: r#"lint1, lint2, ..., /*opt*/ reason = "...""#), Ungated), ("deny", Normal, template!(List: r#"lint1, lint2, ..., /*opt*/ reason = "...""#), Ungated), ("macro_use", Normal, template!(Word, List: "name1, name2, ..."), Ungated), ("macro_export", Normal, template!(Word, List: "local_inner_macros"), Ungated), ("plugin_registrar", Normal, template!(Word), Ungated), ("cfg", Normal, template!(List: "predicate"), Ungated), ("cfg_attr", Normal, template!(List: "predicate, attr1, attr2, ..."), Ungated), ("main", Normal, template!(Word), Ungated), ("start", Normal, template!(Word), Ungated), ("repr", Normal, template!(List: "C, packed, ..."), Ungated), ("path", Normal, template!(NameValueStr: "file"), Ungated), ("automatically_derived", Normal, template!(Word), Ungated), ("no_mangle", Normal, template!(Word), Ungated), ("no_link", Normal, template!(Word), Ungated), ("derive", Normal, template!(List: "Trait1, Trait2, ..."), Ungated), ("should_panic", Normal, template!(Word, List: r#"expected = "reason"#, NameValueStr: "reason"), Ungated), ("ignore", Normal, template!(Word, NameValueStr: "reason"), Ungated), ("no_implicit_prelude", Normal, template!(Word), Ungated), ("reexport_test_harness_main", Normal, template!(NameValueStr: "name"), Ungated), ("link_args", Normal, template!(NameValueStr: "args"), Gated(Stability::Unstable, "link_args", "the `link_args` attribute is experimental and not \ portable across platforms, it is recommended to \ use `#[link(name = \"foo\")] instead", cfg_fn!(link_args))), ("macro_escape", Normal, template!(Word), Ungated), // RFC #1445. ("structural_match", Whitelisted, template!(Word), Gated(Stability::Unstable, "structural_match", "the semantics of constant patterns is \ not yet settled", cfg_fn!(structural_match))), // RFC #2008 ("non_exhaustive", Whitelisted, template!(Word), Gated(Stability::Unstable, "non_exhaustive", "non exhaustive is an experimental feature", cfg_fn!(non_exhaustive))), // RFC #1268 ("marker", Normal, template!(Word), Gated(Stability::Unstable, "marker_trait_attr", "marker traits is an experimental feature", cfg_fn!(marker_trait_attr))), ("plugin", CrateLevel, template!(List: "name|name(args)"), Gated(Stability::Unstable, "plugin", "compiler plugins are experimental \ and possibly buggy", cfg_fn!(plugin))), ("no_std", CrateLevel, template!(Word), Ungated), ("no_core", CrateLevel, template!(Word), Gated(Stability::Unstable, "no_core", "no_core is experimental", cfg_fn!(no_core))), ("lang", Normal, template!(NameValueStr: "name"), Gated(Stability::Unstable, "lang_items", "language items are subject to change", cfg_fn!(lang_items))), ("linkage", Whitelisted, template!(NameValueStr: "external|internal|..."), Gated(Stability::Unstable, "linkage", "the `linkage` attribute is experimental \ and not portable across platforms", cfg_fn!(linkage))), ("thread_local", Whitelisted, template!(Word), Gated(Stability::Unstable, "thread_local", "`#[thread_local]` is an experimental feature, and does \ not currently handle destructors", cfg_fn!(thread_local))), ("rustc_on_unimplemented", Whitelisted, template!(List: r#"/*opt*/ message = "...", /*opt*/ label = "...", /*opt*/ note = "...""#, NameValueStr: "message"), Gated(Stability::Unstable, "on_unimplemented", "the `#[rustc_on_unimplemented]` attribute \ is an experimental feature", cfg_fn!(on_unimplemented))), ("rustc_const_unstable", Normal, template!(List: r#"feature = "name""#), Gated(Stability::Unstable, "rustc_const_unstable", "the `#[rustc_const_unstable]` attribute \ is an internal feature", cfg_fn!(rustc_const_unstable))), ("global_allocator", Normal, template!(Word), Ungated), ("default_lib_allocator", Whitelisted, template!(Word), Gated(Stability::Unstable, "allocator_internals", "the `#[default_lib_allocator]` \ attribute is an experimental feature", cfg_fn!(allocator_internals))), ("needs_allocator", Normal, template!(Word), Gated(Stability::Unstable, "allocator_internals", "the `#[needs_allocator]` \ attribute is an experimental \ feature", cfg_fn!(allocator_internals))), ("panic_runtime", Whitelisted, template!(Word), Gated(Stability::Unstable, "panic_runtime", "the `#[panic_runtime]` attribute is \ an experimental feature", cfg_fn!(panic_runtime))), ("needs_panic_runtime", Whitelisted, template!(Word), Gated(Stability::Unstable, "needs_panic_runtime", "the `#[needs_panic_runtime]` \ attribute is an experimental \ feature", cfg_fn!(needs_panic_runtime))), ("rustc_outlives", Normal, template!(Word), Gated(Stability::Unstable, "rustc_attrs", "the `#[rustc_outlives]` attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_variance", Normal, template!(Word), Gated(Stability::Unstable, "rustc_attrs", "the `#[rustc_variance]` attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_layout", Normal, template!(List: "field1, field2, ..."), Gated(Stability::Unstable, "rustc_attrs", "the `#[rustc_layout]` attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_layout_scalar_valid_range_start", Whitelisted, template!(List: "value"), Gated(Stability::Unstable, "rustc_attrs", "the `#[rustc_layout_scalar_valid_range_start]` attribute \ is just used to enable niche optimizations in libcore \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_layout_scalar_valid_range_end", Whitelisted, template!(List: "value"), Gated(Stability::Unstable, "rustc_attrs", "the `#[rustc_layout_scalar_valid_range_end]` attribute \ is just used to enable niche optimizations in libcore \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_regions", Normal, template!(Word), Gated(Stability::Unstable, "rustc_attrs", "the `#[rustc_regions]` attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_error", Whitelisted, template!(Word), Gated(Stability::Unstable, "rustc_attrs", "the `#[rustc_error]` attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_dump_user_substs", Whitelisted, template!(Word), Gated(Stability::Unstable, "rustc_attrs", "this attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_if_this_changed", Whitelisted, template!(Word, List: "DepNode"), Gated(Stability::Unstable, "rustc_attrs", "the `#[rustc_if_this_changed]` attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_then_this_would_need", Whitelisted, template!(List: "DepNode"), Gated(Stability::Unstable, "rustc_attrs", "the `#[rustc_if_this_changed]` attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_dirty", Whitelisted, template!(List: r#"cfg = "...", /*opt*/ label = "...", /*opt*/ except = "...""#), Gated(Stability::Unstable, "rustc_attrs", "the `#[rustc_dirty]` attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_clean", Whitelisted, template!(List: r#"cfg = "...", /*opt*/ label = "...", /*opt*/ except = "...""#), Gated(Stability::Unstable, "rustc_attrs", "the `#[rustc_clean]` attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_partition_reused", Whitelisted, template!(List: r#"cfg = "...", module = "...""#), Gated(Stability::Unstable, "rustc_attrs", "this attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_partition_codegened", Whitelisted, template!(List: r#"cfg = "...", module = "...""#), Gated(Stability::Unstable, "rustc_attrs", "this attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_expected_cgu_reuse", Whitelisted, template!(List: r#"cfg = "...", module = "...", kind = "...""#), Gated(Stability::Unstable, "rustc_attrs", "this attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_synthetic", Whitelisted, template!(Word), Gated(Stability::Unstable, "rustc_attrs", "this attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_symbol_name", Whitelisted, template!(Word), Gated(Stability::Unstable, "rustc_attrs", "internal rustc attributes will never be stable", cfg_fn!(rustc_attrs))), ("rustc_def_path", Whitelisted, template!(Word), Gated(Stability::Unstable, "rustc_attrs", "internal rustc attributes will never be stable", cfg_fn!(rustc_attrs))), ("rustc_mir", Whitelisted, template!(List: "arg1, arg2, ..."), Gated(Stability::Unstable, "rustc_attrs", "the `#[rustc_mir]` attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_inherit_overflow_checks", Whitelisted, template!(Word), Gated(Stability::Unstable, "rustc_attrs", "the `#[rustc_inherit_overflow_checks]` \ attribute is just used to control \ overflow checking behavior of several \ libcore functions that are inlined \ across crates and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_dump_program_clauses", Whitelisted, template!(Word), Gated(Stability::Unstable, "rustc_attrs", "the `#[rustc_dump_program_clauses]` \ attribute is just used for rustc unit \ tests and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_test_marker", Normal, template!(Word), Gated(Stability::Unstable, "rustc_attrs", "the `#[rustc_test_marker]` attribute \ is used internally to track tests", cfg_fn!(rustc_attrs))), ("rustc_transparent_macro", Whitelisted, template!(Word), Gated(Stability::Unstable, "rustc_attrs", "used internally for testing macro hygiene", cfg_fn!(rustc_attrs))), ("compiler_builtins", Whitelisted, template!(Word), Gated(Stability::Unstable, "compiler_builtins", "the `#[compiler_builtins]` attribute is used to \ identify the `compiler_builtins` crate which \ contains compiler-rt intrinsics and will never be \ stable", cfg_fn!(compiler_builtins))), ("sanitizer_runtime", Whitelisted, template!(Word), Gated(Stability::Unstable, "sanitizer_runtime", "the `#[sanitizer_runtime]` attribute is used to \ identify crates that contain the runtime of a \ sanitizer and will never be stable", cfg_fn!(sanitizer_runtime))), ("profiler_runtime", Whitelisted, template!(Word), Gated(Stability::Unstable, "profiler_runtime", "the `#[profiler_runtime]` attribute is used to \ identify the `profiler_builtins` crate which \ contains the profiler runtime and will never be \ stable", cfg_fn!(profiler_runtime))), ("allow_internal_unstable", Normal, template!(Word, List: "feat1, feat2, ..."), Gated(Stability::Unstable, "allow_internal_unstable", EXPLAIN_ALLOW_INTERNAL_UNSTABLE, cfg_fn!(allow_internal_unstable))), ("allow_internal_unsafe", Normal, template!(Word), Gated(Stability::Unstable, "allow_internal_unsafe", EXPLAIN_ALLOW_INTERNAL_UNSAFE, cfg_fn!(allow_internal_unsafe))), ("fundamental", Whitelisted, template!(Word), Gated(Stability::Unstable, "fundamental", "the `#[fundamental]` attribute \ is an experimental feature", cfg_fn!(fundamental))), ("proc_macro_derive", Normal, template!(List: "TraitName, \ /*opt*/ attributes(name1, name2, ...)"), Ungated), ("rustc_copy_clone_marker", Whitelisted, template!(Word), Gated(Stability::Unstable, "rustc_attrs", "internal implementation detail", cfg_fn!(rustc_attrs))), // FIXME: #14408 whitelist docs since rustdoc looks at them ("doc", Whitelisted, template!(List: "hidden|inline|...", NameValueStr: "string"), Ungated), // FIXME: #14406 these are processed in codegen, which happens after the // lint pass ("cold", Whitelisted, template!(Word), Ungated), ("naked", Whitelisted, template!(Word), Gated(Stability::Unstable, "naked_functions", "the `#[naked]` attribute \ is an experimental feature", cfg_fn!(naked_functions))), ("ffi_returns_twice", Whitelisted, template!(Word), Gated(Stability::Unstable, "ffi_returns_twice", "the `#[ffi_returns_twice]` attribute \ is an experimental feature", cfg_fn!(ffi_returns_twice))), ("target_feature", Whitelisted, template!(List: r#"enable = "name""#), Ungated), ("export_name", Whitelisted, template!(NameValueStr: "name"), Ungated), ("inline", Whitelisted, template!(Word, List: "always|never"), Ungated), ("link", Whitelisted, template!(List: r#"name = "...", /*opt*/ kind = "dylib|static|...", /*opt*/ cfg = "...""#), Ungated), ("link_name", Whitelisted, template!(NameValueStr: "name"), Ungated), ("link_section", Whitelisted, template!(NameValueStr: "name"), Ungated), ("no_builtins", Whitelisted, template!(Word), Ungated), ("no_mangle", Whitelisted, template!(Word), Ungated), ("no_debug", Whitelisted, template!(Word), Gated( Stability::Deprecated("https://github.com/rust-lang/rust/issues/29721", None), "no_debug", "the `#[no_debug]` attribute was an experimental feature that has been \ deprecated due to lack of demand", cfg_fn!(no_debug))), ("omit_gdb_pretty_printer_section", Whitelisted, template!(Word), Gated(Stability::Unstable, "omit_gdb_pretty_printer_section", "the `#[omit_gdb_pretty_printer_section]` \ attribute is just used for the Rust test \ suite", cfg_fn!(omit_gdb_pretty_printer_section))), ("unsafe_destructor_blind_to_params", Normal, template!(Word), Gated(Stability::Deprecated("https://github.com/rust-lang/rust/issues/34761", Some("replace this attribute with `#[may_dangle]`")), "dropck_parametricity", "unsafe_destructor_blind_to_params has been replaced by \ may_dangle and will be removed in the future", cfg_fn!(dropck_parametricity))), ("may_dangle", Normal, template!(Word), Gated(Stability::Unstable, "dropck_eyepatch", "may_dangle has unstable semantics and may be removed in the future", cfg_fn!(dropck_eyepatch))), ("unwind", Whitelisted, template!(List: "allowed|aborts"), Gated(Stability::Unstable, "unwind_attributes", "#[unwind] is experimental", cfg_fn!(unwind_attributes))), ("used", Whitelisted, template!(Word), Ungated), // used in resolve ("prelude_import", Whitelisted, template!(Word), Gated(Stability::Unstable, "prelude_import", "`#[prelude_import]` is for use by rustc only", cfg_fn!(prelude_import))), // FIXME: #14407 these are only looked at on-demand so we can't // guarantee they'll have already been checked ("rustc_deprecated", Whitelisted, template!(List: r#"since = "version", reason = "...""#), Ungated), ("must_use", Whitelisted, template!(Word, NameValueStr: "reason"), Ungated), ("stable", Whitelisted, template!(List: r#"feature = "name", since = "version""#), Ungated), ("unstable", Whitelisted, template!(List: r#"feature = "name", reason = "...", issue = "N""#), Ungated), ("deprecated", Normal, template!( Word, List: r#"/*opt*/ since = "version", /*opt*/ note = "reason"#, NameValueStr: "reason" ), Ungated ), ("rustc_paren_sugar", Normal, template!(Word), Gated(Stability::Unstable, "unboxed_closures", "unboxed_closures are still evolving", cfg_fn!(unboxed_closures))), ("windows_subsystem", Whitelisted, template!(NameValueStr: "windows|console"), Ungated), ("proc_macro_attribute", Normal, template!(Word), Ungated), ("proc_macro", Normal, template!(Word), Ungated), ("rustc_proc_macro_decls", Normal, template!(Word), Gated(Stability::Unstable, "rustc_attrs", "used internally by rustc", cfg_fn!(rustc_attrs))), ("allow_fail", Normal, template!(Word), Gated(Stability::Unstable, "allow_fail", "allow_fail attribute is currently unstable", cfg_fn!(allow_fail))), ("rustc_std_internal_symbol", Whitelisted, template!(Word), Gated(Stability::Unstable, "rustc_attrs", "this is an internal attribute that will \ never be stable", cfg_fn!(rustc_attrs))), // whitelists "identity-like" conversion methods to suggest on type mismatch ("rustc_conversion_suggestion", Whitelisted, template!(Word), Gated(Stability::Unstable, "rustc_attrs", "this is an internal attribute that will \ never be stable", cfg_fn!(rustc_attrs))), ("rustc_args_required_const", Whitelisted, template!(List: "N"), Gated(Stability::Unstable, "rustc_attrs", "never will be stable", cfg_fn!(rustc_attrs))), // RFC 2070 ("panic_handler", Normal, template!(Word), Ungated), ("alloc_error_handler", Normal, template!(Word), Gated(Stability::Unstable, "alloc_error_handler", "#[alloc_error_handler] is an unstable feature", cfg_fn!(alloc_error_handler))), // RFC 2412 ("optimize", Whitelisted, template!(List: "size|speed"), Gated(Stability::Unstable, "optimize_attribute", "#[optimize] attribute is an unstable feature", cfg_fn!(optimize_attribute))), // Crate level attributes ("crate_name", CrateLevel, template!(NameValueStr: "name"), Ungated), ("crate_type", CrateLevel, template!(NameValueStr: "bin|lib|..."), Ungated), ("crate_id", CrateLevel, template!(NameValueStr: "ignored"), Ungated), ("feature", CrateLevel, template!(List: "name1, name1, ..."), Ungated), ("no_start", CrateLevel, template!(Word), Ungated), ("no_main", CrateLevel, template!(Word), Ungated), ("no_builtins", CrateLevel, template!(Word), Ungated), ("recursion_limit", CrateLevel, template!(NameValueStr: "N"), Ungated), ("type_length_limit", CrateLevel, template!(NameValueStr: "N"), Ungated), ("test_runner", CrateLevel, template!(List: "path"), Gated(Stability::Unstable, "custom_test_frameworks", EXPLAIN_CUSTOM_TEST_FRAMEWORKS, cfg_fn!(custom_test_frameworks))), ]; // cfg(...)'s that are feature gated const GATED_CFGS: &[(&str, &str, fn(&Features) -> bool)] = &[ // (name in cfg, feature, function to check if the feature is enabled) ("target_thread_local", "cfg_target_thread_local", cfg_fn!(cfg_target_thread_local)), ("target_has_atomic", "cfg_target_has_atomic", cfg_fn!(cfg_target_has_atomic)), ("rustdoc", "doc_cfg", cfg_fn!(doc_cfg)), ]; #[derive(Debug)] pub struct GatedCfg { span: Span, index: usize, } impl GatedCfg { pub fn gate(cfg: &ast::MetaItem) -> Option<GatedCfg> { GATED_CFGS.iter() .position(|info| cfg.check_name(info.0)) .map(|idx| { GatedCfg { span: cfg.span, index: idx } }) } pub fn check_and_emit(&self, sess: &ParseSess, features: &Features) { let (cfg, feature, has_feature) = GATED_CFGS[self.index]; if !has_feature(features) && !self.span.allows_unstable(feature) { let explain = format!("`cfg({})` is experimental and subject to change", cfg); emit_feature_err(sess, feature, self.span, GateIssue::Language, &explain); } } } struct Context<'a> { features: &'a Features, parse_sess: &'a ParseSess, plugin_attributes: &'a [(String, AttributeType)], } macro_rules! gate_feature_fn { ($cx: expr, $has_feature: expr, $span: expr, $name: expr, $explain: expr, $level: expr) => {{ let (cx, has_feature, span, name, explain, level) = ($cx, $has_feature, $span, $name, $explain, $level); let has_feature: bool = has_feature(&$cx.features); debug!("gate_feature(feature = {:?}, span = {:?}); has? {}", name, span, has_feature); if !has_feature && !span.allows_unstable($name) { leveled_feature_err(cx.parse_sess, name, span, GateIssue::Language, explain, level) .emit(); } }} } macro_rules! gate_feature { ($cx: expr, $feature: ident, $span: expr, $explain: expr) => { gate_feature_fn!($cx, |x:&Features| x.$feature, $span, stringify!($feature), $explain, GateStrength::Hard) }; ($cx: expr, $feature: ident, $span: expr, $explain: expr, $level: expr) => { gate_feature_fn!($cx, |x:&Features| x.$feature, $span, stringify!($feature), $explain, $level) }; } impl<'a> Context<'a> { fn check_attribute(&self, attr: &ast::Attribute, is_macro: bool) { debug!("check_attribute(attr = {:?})", attr); let name = attr.name_or_empty(); for &(n, ty, _template, ref gateage) in BUILTIN_ATTRIBUTES { if name == n { if let Gated(_, name, desc, ref has_feature) = *gateage { if !attr.span.allows_unstable(name) { gate_feature_fn!( self, has_feature, attr.span, name, desc, GateStrength::Hard ); } } else if n == "doc" { if let Some(content) = attr.meta_item_list() { if content.iter().any(|c| c.check_name("include")) { gate_feature!(self, external_doc, attr.span, "#[doc(include = \"...\")] is experimental" ); } } } debug!("check_attribute: {:?} is builtin, {:?}, {:?}", attr.path, ty, gateage); return; } } for &(ref n, ref ty) in self.plugin_attributes { if attr.path == &**n { // Plugins can't gate attributes, so we don't check for it // unlike the code above; we only use this loop to // short-circuit to avoid the checks below. debug!("check_attribute: {:?} is registered by a plugin, {:?}", attr.path, ty); return; } } if !attr::is_known(attr) { if name.starts_with("rustc_") { let msg = "unless otherwise specified, attributes with the prefix `rustc_` \ are reserved for internal compiler diagnostics"; gate_feature!(self, rustc_attrs, attr.span, msg); } else if !is_macro { // Only run the custom attribute lint during regular feature gate // checking. Macro gating runs before the plugin attributes are // registered, so we skip this in that case. let msg = format!("The attribute `{}` is currently unknown to the compiler and \ may have meaning added to it in the future", attr.path); gate_feature!(self, custom_attribute, attr.span, &msg); } } } } pub fn check_attribute(attr: &ast::Attribute, parse_sess: &ParseSess, features: &Features) { let cx = Context { features: features, parse_sess: parse_sess, plugin_attributes: &[] }; cx.check_attribute(attr, true); } fn find_lang_feature_issue(feature: &str) -> Option<u32> { if let Some(info) = ACTIVE_FEATURES.iter().find(|t| t.0 == feature) { let issue = info.2; // FIXME (#28244): enforce that active features have issue numbers // assert!(issue.is_some()) issue } else { // search in Accepted, Removed, or Stable Removed features let found = ACCEPTED_FEATURES.iter().chain(REMOVED_FEATURES).chain(STABLE_REMOVED_FEATURES) .find(|t| t.0 == feature); match found { Some(&(_, _, issue, _)) => issue, None => panic!("Feature `{}` is not declared anywhere", feature), } } } pub enum GateIssue { Language, Library(Option<u32>) } #[derive(Debug, Copy, Clone, PartialEq)] pub enum GateStrength { /// A hard error. (Most feature gates should use this.) Hard, /// Only a warning. (Use this only as backwards-compatibility demands.) Soft, } pub fn emit_feature_err( sess: &ParseSess, feature: &str, span: Span, issue: GateIssue, explain: &str, ) { feature_err(sess, feature, span, issue, explain).emit(); } pub fn feature_err<'a>( sess: &'a ParseSess, feature: &str, span: Span, issue: GateIssue, explain: &str, ) -> DiagnosticBuilder<'a> { leveled_feature_err(sess, feature, span, issue, explain, GateStrength::Hard) } fn leveled_feature_err<'a>( sess: &'a ParseSess, feature: &str, span: Span, issue: GateIssue, explain: &str, level: GateStrength, ) -> DiagnosticBuilder<'a> { let diag = &sess.span_diagnostic; let issue = match issue { GateIssue::Language => find_lang_feature_issue(feature), GateIssue::Library(lib) => lib, }; let mut err = match level { GateStrength::Hard => { diag.struct_span_err_with_code(span, explain, stringify_error_code!(E0658)) } GateStrength::Soft => diag.struct_span_warn(span, explain), }; match issue { None | Some(0) => {} // We still accept `0` as a stand-in for backwards compatibility Some(n) => { err.note(&format!( "for more information, see https://github.com/rust-lang/rust/issues/{}", n, )); } } // #23973: do not suggest `#![feature(...)]` if we are in beta/stable if sess.unstable_features.is_nightly_build() { err.help(&format!("add #![feature({})] to the crate attributes to enable", feature)); } // If we're on stable and only emitting a "soft" warning, add a note to // clarify that the feature isn't "on" (rather than being on but // warning-worthy). if !sess.unstable_features.is_nightly_build() && level == GateStrength::Soft { err.help("a nightly build of the compiler is required to enable this feature"); } err } const EXPLAIN_BOX_SYNTAX: &str = "box expression syntax is experimental; you can call `Box::new` instead"; pub const EXPLAIN_STMT_ATTR_SYNTAX: &str = "attributes on expressions are experimental"; pub const EXPLAIN_ASM: &str = "inline assembly is not stable enough for use and is subject to change"; pub const EXPLAIN_GLOBAL_ASM: &str = "`global_asm!` is not stable enough for use and is subject to change"; pub const EXPLAIN_CUSTOM_TEST_FRAMEWORKS: &str = "custom test frameworks are an unstable feature"; pub const EXPLAIN_LOG_SYNTAX: &str = "`log_syntax!` is not stable enough for use and is subject to change"; pub const EXPLAIN_CONCAT_IDENTS: &str = "`concat_idents` is not stable enough for use and is subject to change"; pub const EXPLAIN_FORMAT_ARGS_NL: &str = "`format_args_nl` is only for internal language use and is subject to change"; pub const EXPLAIN_TRACE_MACROS: &str = "`trace_macros` is not stable enough for use and is subject to change"; pub const EXPLAIN_ALLOW_INTERNAL_UNSTABLE: &str = "allow_internal_unstable side-steps feature gating and stability checks"; pub const EXPLAIN_ALLOW_INTERNAL_UNSAFE: &str = "allow_internal_unsafe side-steps the unsafe_code lint"; pub const EXPLAIN_UNSIZED_TUPLE_COERCION: &str = "unsized tuple coercion is not stable enough for use and is subject to change"; struct PostExpansionVisitor<'a> { context: &'a Context<'a>, } macro_rules! gate_feature_post { ($cx: expr, $feature: ident, $span: expr, $explain: expr) => {{ let (cx, span) = ($cx, $span); if !span.allows_unstable(stringify!($feature)) { gate_feature!(cx.context, $feature, span, $explain) } }}; ($cx: expr, $feature: ident, $span: expr, $explain: expr, $level: expr) => {{ let (cx, span) = ($cx, $span); if !span.allows_unstable(stringify!($feature)) { gate_feature!(cx.context, $feature, span, $explain, $level) } }} } impl<'a> PostExpansionVisitor<'a> { fn check_abi(&self, abi: Abi, span: Span) { match abi { Abi::RustIntrinsic => { gate_feature_post!(&self, intrinsics, span, "intrinsics are subject to change"); }, Abi::PlatformIntrinsic => { gate_feature_post!(&self, platform_intrinsics, span, "platform intrinsics are experimental and possibly buggy"); }, Abi::Vectorcall => { gate_feature_post!(&self, abi_vectorcall, span, "vectorcall is experimental and subject to change"); }, Abi::Thiscall => { gate_feature_post!(&self, abi_thiscall, span, "thiscall is experimental and subject to change"); }, Abi::RustCall => { gate_feature_post!(&self, unboxed_closures, span, "rust-call ABI is subject to change"); }, Abi::PtxKernel => { gate_feature_post!(&self, abi_ptx, span, "PTX ABIs are experimental and subject to change"); }, Abi::Unadjusted => { gate_feature_post!(&self, abi_unadjusted, span, "unadjusted ABI is an implementation detail and perma-unstable"); }, Abi::Msp430Interrupt => { gate_feature_post!(&self, abi_msp430_interrupt, span, "msp430-interrupt ABI is experimental and subject to change"); }, Abi::X86Interrupt => { gate_feature_post!(&self, abi_x86_interrupt, span, "x86-interrupt ABI is experimental and subject to change"); }, Abi::AmdGpuKernel => { gate_feature_post!(&self, abi_amdgpu_kernel, span, "amdgpu-kernel ABI is experimental and subject to change"); }, // Stable Abi::Cdecl | Abi::Stdcall | Abi::Fastcall | Abi::Aapcs | Abi::Win64 | Abi::SysV64 | Abi::Rust | Abi::C | Abi::System => {} } } fn check_builtin_attribute(&mut self, attr: &ast::Attribute, name: &str, template: AttributeTemplate) { // Some special attributes like `cfg` must be checked // before the generic check, so we skip them here. let should_skip = |name| name == "cfg"; // Some of previously accepted forms were used in practice, // report them as warnings for now. let should_warn = |name| name == "doc" || name == "ignore" || name == "inline" || name == "link"; match attr.parse_meta(self.context.parse_sess) { Ok(meta) => if !should_skip(name) && !template.compatible(&meta.node) { let mut msg = "attribute must be of the form ".to_owned(); let mut first = true; if template.word { first = false; msg.push_str(&format!("`#[{}{}]`", name, "")); } if let Some(descr) = template.list { if !first { msg.push_str(" or "); } first = false; msg.push_str(&format!("`#[{}({})]`", name, descr)); } if let Some(descr) = template.name_value_str { if !first { msg.push_str(" or "); } msg.push_str(&format!("`#[{} = \"{}\"]`", name, descr)); } if should_warn(name) { self.context.parse_sess.buffer_lint( BufferedEarlyLintId::IllFormedAttributeInput, meta.span, ast::CRATE_NODE_ID, &msg, ); } else { self.context.parse_sess.span_diagnostic.span_err(meta.span, &msg); } } Err(mut err) => err.emit(), } } } impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { fn visit_attribute(&mut self, attr: &ast::Attribute) { // check for gated attributes self.context.check_attribute(attr, false); if attr.check_name("doc") { if let Some(content) = attr.meta_item_list() { if content.len() == 1 && content[0].check_name("cfg") { gate_feature_post!(&self, doc_cfg, attr.span, "#[doc(cfg(...))] is experimental" ); } else if content.iter().any(|c| c.check_name("masked")) { gate_feature_post!(&self, doc_masked, attr.span, "#[doc(masked)] is experimental" ); } else if content.iter().any(|c| c.check_name("spotlight")) { gate_feature_post!(&self, doc_spotlight, attr.span, "#[doc(spotlight)] is experimental" ); } else if content.iter().any(|c| c.check_name("alias")) { gate_feature_post!(&self, doc_alias, attr.span, "#[doc(alias = \"...\")] is experimental" ); } else if content.iter().any(|c| c.check_name("keyword")) { gate_feature_post!(&self, doc_keyword, attr.span, "#[doc(keyword = \"...\")] is experimental" ); } } } match BUILTIN_ATTRIBUTES.iter().find(|(name, ..)| attr.path == name) { Some(&(name, _, template, _)) => self.check_builtin_attribute(attr, name, template), None => if let Some(TokenTree::Token(_, token::Eq)) = attr.tokens.trees().next() { // All key-value attributes are restricted to meta-item syntax. attr.parse_meta(self.context.parse_sess).map_err(|mut err| err.emit()).ok(); } } } fn visit_name(&mut self, sp: Span, name: ast::Name) { if !name.as_str().is_ascii() { gate_feature_post!( &self, non_ascii_idents, self.context.parse_sess.source_map().def_span(sp), "non-ascii idents are not fully supported" ); } } fn visit_item(&mut self, i: &'a ast::Item) { match i.node { ast::ItemKind::Const(_,_) => { if i.ident.name == "_" { gate_feature_post!(&self, underscore_const_names, i.span, "naming constants with `_` is unstable"); } } ast::ItemKind::ForeignMod(ref foreign_module) => { self.check_abi(foreign_module.abi, i.span); } ast::ItemKind::Fn(..) => { if attr::contains_name(&i.attrs[..], "plugin_registrar") { gate_feature_post!(&self, plugin_registrar, i.span, "compiler plugins are experimental and possibly buggy"); } if attr::contains_name(&i.attrs[..], "start") { gate_feature_post!(&self, start, i.span, "a #[start] function is an experimental \ feature whose signature may change \ over time"); } if attr::contains_name(&i.attrs[..], "main") { gate_feature_post!(&self, main, i.span, "declaration of a nonstandard #[main] \ function may change over time, for now \ a top-level `fn main()` is required"); } } ast::ItemKind::Struct(..) => { for attr in attr::filter_by_name(&i.attrs[..], "repr") { for item in attr.meta_item_list().unwrap_or_else(Vec::new) { if item.check_name("simd") { gate_feature_post!(&self, repr_simd, attr.span, "SIMD types are experimental and possibly buggy"); } } } } ast::ItemKind::Enum(..) => { for attr in attr::filter_by_name(&i.attrs[..], "repr") { for item in attr.meta_item_list().unwrap_or_else(Vec::new) { if item.check_name("align") { gate_feature_post!(&self, repr_align_enum, attr.span, "`#[repr(align(x))]` on enums is experimental"); } } } } ast::ItemKind::Impl(_, polarity, defaultness, _, _, _, _) => { if polarity == ast::ImplPolarity::Negative { gate_feature_post!(&self, optin_builtin_traits, i.span, "negative trait bounds are not yet fully implemented; \ use marker types for now"); } if let ast::Defaultness::Default = defaultness { gate_feature_post!(&self, specialization, i.span, "specialization is unstable"); } } ast::ItemKind::Trait(ast::IsAuto::Yes, ..) => { gate_feature_post!(&self, optin_builtin_traits, i.span, "auto traits are experimental and possibly buggy"); } ast::ItemKind::TraitAlias(..) => { gate_feature_post!( &self, trait_alias, i.span, "trait aliases are experimental" ); } ast::ItemKind::MacroDef(ast::MacroDef { legacy: false, .. }) => { let msg = "`macro` is experimental"; gate_feature_post!(&self, decl_macro, i.span, msg); } ast::ItemKind::Existential(..) => { gate_feature_post!( &self, existential_type, i.span, "existential types are unstable" ); } _ => {} } visit::walk_item(self, i); } fn visit_foreign_item(&mut self, i: &'a ast::ForeignItem) { match i.node { ast::ForeignItemKind::Fn(..) | ast::ForeignItemKind::Static(..) => { let link_name = attr::first_attr_value_str_by_name(&i.attrs, "link_name"); let links_to_llvm = match link_name { Some(val) => val.as_str().starts_with("llvm."), _ => false }; if links_to_llvm { gate_feature_post!(&self, link_llvm_intrinsics, i.span, "linking to LLVM intrinsics is experimental"); } } ast::ForeignItemKind::Ty => { gate_feature_post!(&self, extern_types, i.span, "extern types are experimental"); } ast::ForeignItemKind::Macro(..) => {} } visit::walk_foreign_item(self, i) } fn visit_ty(&mut self, ty: &'a ast::Ty) { match ty.node { ast::TyKind::BareFn(ref bare_fn_ty) => { self.check_abi(bare_fn_ty.abi, ty.span); } ast::TyKind::Never => { gate_feature_post!(&self, never_type, ty.span, "The `!` type is experimental"); } _ => {} } visit::walk_ty(self, ty) } fn visit_fn_ret_ty(&mut self, ret_ty: &'a ast::FunctionRetTy) { if let ast::FunctionRetTy::Ty(ref output_ty) = *ret_ty { if let ast::TyKind::Never = output_ty.node { // Do nothing } else { self.visit_ty(output_ty) } } } fn visit_expr(&mut self, e: &'a ast::Expr) { match e.node { ast::ExprKind::Box(_) => { gate_feature_post!(&self, box_syntax, e.span, EXPLAIN_BOX_SYNTAX); } ast::ExprKind::Type(..) => { // To avoid noise about type ascription in common syntax errors, only emit if it // is the *only* error. if self.context.parse_sess.span_diagnostic.err_count() == 0 { gate_feature_post!(&self, type_ascription, e.span, "type ascription is experimental"); } } ast::ExprKind::ObsoleteInPlace(..) => { // these get a hard error in ast-validation } ast::ExprKind::Yield(..) => { gate_feature_post!(&self, generators, e.span, "yield syntax is experimental"); } ast::ExprKind::TryBlock(_) => { gate_feature_post!(&self, try_blocks, e.span, "`try` expression is experimental"); } ast::ExprKind::Block(_, opt_label) => { if let Some(label) = opt_label { gate_feature_post!(&self, label_break_value, label.ident.span, "labels on blocks are unstable"); } } ast::ExprKind::Closure(_, ast::IsAsync::Async { .. }, ..) => { gate_feature_post!(&self, async_await, e.span, "async closures are unstable"); } ast::ExprKind::Async(..) => { gate_feature_post!(&self, async_await, e.span, "async blocks are unstable"); } _ => {} } visit::walk_expr(self, e); } fn visit_arm(&mut self, arm: &'a ast::Arm) { visit::walk_arm(self, arm) } fn visit_pat(&mut self, pattern: &'a ast::Pat) { match pattern.node { PatKind::Slice(_, Some(ref subslice), _) => { gate_feature_post!(&self, slice_patterns, subslice.span, "syntax for subslices in slice patterns is not yet stabilized"); } PatKind::Box(..) => { gate_feature_post!(&self, box_patterns, pattern.span, "box pattern syntax is experimental"); } PatKind::Range(_, _, Spanned { node: RangeEnd::Excluded, .. }) => { gate_feature_post!(&self, exclusive_range_pattern, pattern.span, "exclusive range pattern syntax is experimental"); } _ => {} } visit::walk_pat(self, pattern) } fn visit_fn(&mut self, fn_kind: FnKind<'a>, fn_decl: &'a ast::FnDecl, span: Span, _node_id: NodeId) { match fn_kind { FnKind::ItemFn(_, header, _, _) => { // Check for const fn and async fn declarations. if header.asyncness.node.is_async() { gate_feature_post!(&self, async_await, span, "async fn is unstable"); } if fn_decl.c_variadic { gate_feature_post!(&self, c_variadic, span, "C-varaidic functions are unstable"); } // Stability of const fn methods are covered in // `visit_trait_item` and `visit_impl_item` below; this is // because default methods don't pass through this point. self.check_abi(header.abi, span); } FnKind::Method(_, sig, _, _) => { self.check_abi(sig.header.abi, span); } _ => {} } visit::walk_fn(self, fn_kind, fn_decl, span); } fn visit_generic_param(&mut self, param: &'a GenericParam) { if let GenericParamKind::Const { .. } = param.kind { gate_feature_post!(&self, const_generics, param.ident.span, "const generics are unstable"); } visit::walk_generic_param(self, param); } fn visit_trait_item(&mut self, ti: &'a ast::TraitItem) { match ti.node { ast::TraitItemKind::Method(ref sig, ref block) => { if block.is_none() { self.check_abi(sig.header.abi, ti.span); } if sig.decl.c_variadic { gate_feature_post!(&self, c_variadic, ti.span, "C-varaidic functions are unstable"); } if sig.header.constness.node == ast::Constness::Const { gate_feature_post!(&self, const_fn, ti.span, "const fn is unstable"); } } ast::TraitItemKind::Type(_, ref default) => { // We use three if statements instead of something like match guards so that all // of these errors can be emitted if all cases apply. if default.is_some() { gate_feature_post!(&self, associated_type_defaults, ti.span, "associated type defaults are unstable"); } if !ti.generics.params.is_empty() { gate_feature_post!(&self, generic_associated_types, ti.span, "generic associated types are unstable"); } if !ti.generics.where_clause.predicates.is_empty() { gate_feature_post!(&self, generic_associated_types, ti.span, "where clauses on associated types are unstable"); } } _ => {} } visit::walk_trait_item(self, ti); } fn visit_impl_item(&mut self, ii: &'a ast::ImplItem) { if ii.defaultness == ast::Defaultness::Default { gate_feature_post!(&self, specialization, ii.span, "specialization is unstable"); } match ii.node { ast::ImplItemKind::Method(..) => {} ast::ImplItemKind::Existential(..) => { gate_feature_post!( &self, existential_type, ii.span, "existential types are unstable" ); } ast::ImplItemKind::Type(_) => { if !ii.generics.params.is_empty() { gate_feature_post!(&self, generic_associated_types, ii.span, "generic associated types are unstable"); } if !ii.generics.where_clause.predicates.is_empty() { gate_feature_post!(&self, generic_associated_types, ii.span, "where clauses on associated types are unstable"); } } _ => {} } visit::walk_impl_item(self, ii); } fn visit_vis(&mut self, vis: &'a ast::Visibility) { if let ast::VisibilityKind::Crate(ast::CrateSugar::JustCrate) = vis.node { gate_feature_post!(&self, crate_visibility_modifier, vis.span, "`crate` visibility modifier is experimental"); } visit::walk_vis(self, vis); } } pub fn get_features(span_handler: &Handler, krate_attrs: &[ast::Attribute], crate_edition: Edition, allow_features: &Option<Vec<String>>) -> Features { fn feature_removed(span_handler: &Handler, span: Span, reason: Option<&str>) { let mut err = struct_span_err!(span_handler, span, E0557, "feature has been removed"); if let Some(reason) = reason { err.span_note(span, reason); } err.emit(); } // Some features are known to be incomplete and using them is likely to have // unanticipated results, such as compiler crashes. We warn the user about these // to alert them. let incomplete_features = ["generic_associated_types", "const_generics"]; let mut features = Features::new(); let mut edition_enabled_features = FxHashMap::default(); for &edition in ALL_EDITIONS { if edition <= crate_edition { // The `crate_edition` implies its respective umbrella feature-gate // (i.e., `#![feature(rust_20XX_preview)]` isn't needed on edition 20XX). edition_enabled_features.insert(Symbol::intern(edition.feature_name()), edition); } } for &(name, .., f_edition, set) in ACTIVE_FEATURES { if let Some(f_edition) = f_edition { if f_edition <= crate_edition { set(&mut features, DUMMY_SP); edition_enabled_features.insert(Symbol::intern(name), crate_edition); } } } // Process the edition umbrella feature-gates first, to ensure // `edition_enabled_features` is completed before it's queried. for attr in krate_attrs { if !attr.check_name("feature") { continue } let list = match attr.meta_item_list() { Some(list) => list, None => continue, }; for mi in list { if !mi.is_word() { continue; } let name = mi.name_or_empty(); if incomplete_features.iter().any(|f| name == *f) { span_handler.struct_span_warn( mi.span(), &format!( "the feature `{}` is incomplete and may cause the compiler to crash", name ) ).emit(); } if let Some(edition) = ALL_EDITIONS.iter().find(|e| name == e.feature_name()) { if *edition <= crate_edition { continue; } for &(name, .., f_edition, set) in ACTIVE_FEATURES { if let Some(f_edition) = f_edition { if f_edition <= *edition { // FIXME(Manishearth) there is currently no way to set // lib features by edition set(&mut features, DUMMY_SP); edition_enabled_features.insert(Symbol::intern(name), *edition); } } } } } } for attr in krate_attrs { if !attr.check_name("feature") { continue } let list = match attr.meta_item_list() { Some(list) => list, None => continue, }; for mi in list { let name = match mi.ident() { Some(ident) if mi.is_word() => ident.name, _ => { span_err!(span_handler, mi.span(), E0556, "malformed feature, expected just one word"); continue } }; if let Some(edition) = edition_enabled_features.get(&name) { struct_span_warn!( span_handler, mi.span(), E0705, "the feature `{}` is included in the Rust {} edition", name, edition, ).emit(); continue; } if ALL_EDITIONS.iter().any(|e| name == e.feature_name()) { // Handled in the separate loop above. continue; } if let Some((.., set)) = ACTIVE_FEATURES.iter().find(|f| name == f.0) { if let Some(allowed) = allow_features.as_ref() { if allowed.iter().find(|f| *f == name.as_str()).is_none() { span_err!(span_handler, mi.span(), E0725, "the feature `{}` is not in the list of allowed features", name); continue; } } set(&mut features, mi.span()); features.declared_lang_features.push((name, mi.span(), None)); continue } let removed = REMOVED_FEATURES.iter().find(|f| name == f.0); let stable_removed = STABLE_REMOVED_FEATURES.iter().find(|f| name == f.0); if let Some((.., reason)) = removed.or(stable_removed) { feature_removed(span_handler, mi.span(), *reason); continue } if let Some((_, since, ..)) = ACCEPTED_FEATURES.iter().find(|f| name == f.0) { let since = Some(Symbol::intern(since)); features.declared_lang_features.push((name, mi.span(), since)); continue } features.declared_lib_features.push((name, mi.span())); } } features } pub fn check_crate(krate: &ast::Crate, sess: &ParseSess, features: &Features, plugin_attributes: &[(String, AttributeType)], unstable: UnstableFeatures) { maybe_stage_features(&sess.span_diagnostic, krate, unstable); let ctx = Context { features, parse_sess: sess, plugin_attributes, }; let visitor = &mut PostExpansionVisitor { context: &ctx }; visit::walk_crate(visitor, krate); } #[derive(Clone, Copy, Hash)] pub enum UnstableFeatures { /// Hard errors for unstable features are active, as on beta/stable channels. Disallow, /// Allow features to be activated, as on nightly. Allow, /// Errors are bypassed for bootstrapping. This is required any time /// during the build that feature-related lints are set to warn or above /// because the build turns on warnings-as-errors and uses lots of unstable /// features. As a result, this is always required for building Rust itself. Cheat } impl UnstableFeatures { pub fn from_environment() -> UnstableFeatures { // Whether this is a feature-staged build, i.e., on the beta or stable channel let disable_unstable_features = option_env!("CFG_DISABLE_UNSTABLE_FEATURES").is_some(); // Whether we should enable unstable features for bootstrapping let bootstrap = env::var("RUSTC_BOOTSTRAP").is_ok(); match (disable_unstable_features, bootstrap) { (_, true) => UnstableFeatures::Cheat, (true, _) => UnstableFeatures::Disallow, (false, _) => UnstableFeatures::Allow } } pub fn is_nightly_build(&self) -> bool { match *self { UnstableFeatures::Allow | UnstableFeatures::Cheat => true, _ => false, } } } fn maybe_stage_features(span_handler: &Handler, krate: &ast::Crate, unstable: UnstableFeatures) { let allow_features = match unstable { UnstableFeatures::Allow => true, UnstableFeatures::Disallow => false, UnstableFeatures::Cheat => true }; if !allow_features { for attr in &krate.attrs { if attr.check_name("feature") { let release_channel = option_env!("CFG_RELEASE_CHANNEL").unwrap_or("(unknown)"); span_err!(span_handler, attr.span, E0554, "#![feature] may not be used on the {} release channel", release_channel); } } } }
// Allows casting raw pointers to `usize` during const eval. (active, const_raw_ptr_to_usize_cast, "1.27.0", Some(51910), None), // Allows dereferencing raw pointers during const eval.
_version.py
# This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build # directories (produced by setup.py build) will contain a much shorter file # that just contains the computed version number. # This file is released into the public domain. Generated by # versioneer-0.18 (https://github.com/warner/python-versioneer) """Git implementation of _version.py.""" import errno import os import re import subprocess import sys def get_keywords(): """Get the keywords needed to look up the version information.""" # these strings will be replaced by git during git-archive. # setup.py/versioneer.py will grep for the variable names, so they must # each be defined on a line of their own. _version.py will just call # get_keywords(). git_refnames = "$Format:%d$" git_full = "$Format:%H$" git_date = "$Format:%ci$" keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} return keywords class VersioneerConfig: """Container for Versioneer configuration parameters.""" def get_config(): """Create, populate and return the VersioneerConfig() object.""" # these strings are filled in when 'setup.py versioneer' creates # _version.py cfg = VersioneerConfig() cfg.VCS = "git" cfg.style = "pep440" cfg.tag_prefix = "" cfg.parentdir_prefix = "None" cfg.versionfile_source = "utility_belt/_version.py" cfg.verbose = False return cfg class NotThisMethod(Exception): """Exception raised if a method is not valid for the current scenario.""" LONG_VERSION_PY = {} HANDLERS = {} def register_vcs_handler(vcs, method): # decorator """Decorator to mark a method as the handler for a particular VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): """Call the given command(s).""" assert isinstance(commands, list) p = None for c in commands: try: dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen([c] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)) break except EnvironmentError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run %s" % dispcmd) print(e) return None, None else: if verbose: print("unable to find command, tried %s" % (commands,)) return None, None stdout = p.communicate()[0].strip() if sys.version_info[0] >= 3: stdout = stdout.decode() if p.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) print("stdout was %s" % stdout) return None, p.returncode return stdout, p.returncode def versions_from_parentdir(parentdir_prefix, root, verbose): """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both the project name and a version string. We will also support searching up two directory levels for an appropriately named parent directory """ rootdirs = [] for i in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None, "date": None} else: rootdirs.append(root) root = os.path.dirname(root) # up a level if verbose: print("Tried directories %s but none started with prefix %s" % (str(rootdirs), parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords = {} try: f = open(versionfile_abs, "r") for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) if line.strip().startswith("git_date ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["date"] = mo.group(1) f.close() except EnvironmentError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" if not keywords: raise NotThisMethod("no keywords at all, weird") date = keywords.get("date") if date is not None: # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because # it's been around since git-1.5.3, and it's too difficult to # discover which version we're using, or to work around using an # older one. date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = set([r.strip() for r in refnames.strip("()").split(",")]) # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = set([r for r in refs if re.search(r'\d', r)]) if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: print("likely tags: %s" % ",".join(sorted(tags))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] if verbose: print("picking %s" % r) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None, "date": date} # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags", "date": None} @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* expanded, and _version.py hasn't already been rewritten with a short version string, meaning we're inside a checked out source tree. """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) if rc != 0: if verbose: print("Directory %s not under git control" % root) raise NotThisMethod("'git rev-parse --git-dir' returned error") # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long", "--match", "%s*" % tag_prefix], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%s'" % describe_out) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces def plus_or_dot(pieces): """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty Exceptions: 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_pre(pieces): """TAG[.post.devDISTANCE] -- No -dirty. Exceptions: 1: no tags. 0.post.devDISTANCE """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += ".post.dev%d" % pieces["distance"] else: # exception #1 rendered = "0.post.dev%d" % pieces["distance"] return rendered def
(pieces): """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards (a dirty tree will appear "older" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%s" % pieces["short"] else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%s" % pieces["short"] return rendered def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Eexceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. The distance/hash is unconditional. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): """Render the given version pieces into the requested style.""" if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"], "date": None} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%s'" % style) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None, "date": pieces.get("date")} def get_versions(): """Get version information or return default if unable to do so.""" # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which # case we can only use expanded keywords. cfg = get_config() verbose = cfg.verbose try: return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) except NotThisMethod: pass try: root = os.path.realpath(__file__) # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. for i in cfg.versionfile_source.split('/'): root = os.path.dirname(root) except NameError: return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to find root of source tree", "date": None} try: pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) return render(pieces, cfg.style) except NotThisMethod: pass try: if cfg.parentdir_prefix: return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) except NotThisMethod: pass return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version", "date": None}
render_pep440_post
0002_token.py
# Generated by Django 2.1 on 2018-09-08 08:56 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration):
dependencies = [ ('authentication', '0001_initial'), ] operations = [ migrations.CreateModel( name='Token', fields=[ ('key', models.CharField(max_length=6, primary_key=True, serialize=False)), ('created_time', models.DateTimeField(auto_now_add=True)), ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='token', to=settings.AUTH_USER_MODEL)), ], ), ]
img.tsx
// Copyright 2021 the Deno authors. All rights reserved. MIT license. /** @jsx h */ import { h, htmlEntities, removeMarkdown } from "../deps.ts"; import { getLibWithVersion } from "../shared.ts"; import { parseURL } from "../util.ts"; const wrap = (s: string) => s.replace(/(?![^\n]{1,48}$)([^\n]{1,48})\s/g, "$1\n"); /** Renders the Deno the denosaur as an SVG. */ function
() { return ( <g id="deno_logo" transform="translate(1022, 455) scale(0.25)"> <mask id="a"> <circle fill="white" cx="256" cy="256" r="230"></circle> </mask> <circle fill="#111827" cx="256" cy="256" r="256"></circle> <path mask="url(#a)" stroke="white" stroke-width="25" stroke-linecap="round" d="M71 319l17-63M107.964 161.095l17-63M36.93 221l17-63M125.964 385l17-63M160.372 486.829l17-63M230 456.329l17-63M206.257 92.587l17-63M326.395 173.004l17-63M452.182 304.693l17-63M409.124 221l17-63M299.027 54.558l17-63M400.624 86.058l17-63" > </path> <path mask="url(#a)" fill="white" stroke="#111827" stroke-width="12" d="M252.225 344.418c-86.65 2.61-144.576-34.5-144.576-94.363 0-61.494 60.33-111.145 138.351-111.145 37.683 0 69.532 10.65 94.392 30.092 21.882 17.113 37.521 40.526 45.519 66.312 2.574 8.301 22.863 83.767 61.112 227.295l1.295 4.86-159.793 74.443-1.101-8.063c-8.85-64.778-16.546-113.338-23.076-145.634-3.237-16.004-6.178-27.96-8.79-35.794-1.227-3.682-2.355-6.361-3.303-7.952a12.56 12.56 0 00-.03-.05z" > </path> <circle mask="url(#a)" fill="#111827" cx="262" cy="203" r="16"></circle> </g> ); } export function IndexCard( { url, description = "" }: { url: string; description?: string }, ) { const lines = wrap( htmlEntities.encode(description).split("\n\n").map((l) => l.replaceAll("\n", " ") ) .join( "\n\n", ), ).split("\n").slice(0, 7); const parsed = parseURL(url); let title; let subtitle; let link; if (parsed) { link = url; title = parsed.module; if (parsed.org) { if (title) { subtitle = `${parsed.org}/${parsed.package}`; } else { title = `${parsed.org}/${parsed.package}`; } } else if (parsed.package) { if (title) { subtitle = parsed.package; } else { title = parsed.package; } } else if (parsed.registry === "deno.land/std") { subtitle = "std"; } } else { [title, subtitle] = getLibWithVersion(url); } return ( <svg width="1200px" height="630px" viewBox="0 0 1200 630" version="1.1" xmlns="http://www.w3.org/2000/svg" > <title>module card</title> <g id="module-card" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd" > <rect fill="#FFFFFF" x="0" y="0" width="1200" height="630"></rect> <DenoLogo /> <text id="title" font-family="Inter-Bold, Inter, sans-serif" font-size="88" font-weight="bold" fill="#111827" > <tspan x="60" y="131">{title}</tspan> </text> <text id="subtitle" font-family="Inter-Bold, Inter, sans-serif" font-size="68" font-weight="bold" fill="#111827" > <tspan x="60" y="197">{subtitle}</tspan> </text> <text id="https://deno.land/x/" font-family="Inter-Regular, Inter, sans-serif" font-size="48" font-weight="normal" fill="#111827" > <tspan x="60" y="260">{link}</tspan> </text> {description.length ? ( <line x1="59.5" y1="291.5" x2="1149.5" y2="291.5" id="Line" stroke="#111827" stroke-width="5" stroke-linecap="square" > </line> ) : undefined} <text id="module_description" fill-rule="nonzero" font-family="JetBrains Mono, monospace" font-size="32" font-weight="normal" fill="#111827" > <tspan x="60" y="346">{lines[0]}</tspan> <tspan x="60" y="385">{lines[1]}</tspan> <tspan x="60" y="424">{lines[2]}</tspan> <tspan x="60" y="463">{lines[3]}</tspan> <tspan x="60" y="502">{lines[4]}</tspan> <tspan x="60" y="541">{lines[5]}</tspan> <tspan x="60" y="580">{lines[6]}</tspan> </text> </g> </svg> ); } export function ModuleCard({ url, doc }: { url: string; doc: string }) { const lines = wrap( htmlEntities.encode(removeMarkdown(doc)).split("\n\n").map((l) => l.replaceAll("\n", " ") ) .join( "\n\n", ), ).split("\n").slice(0, 7); const parsed = parseURL(url); let title; let subtitle; let link; if (parsed) { link = url; title = parsed.module; if (parsed.org) { if (title) { subtitle = `${parsed.org}/${parsed.package}`; } else { title = `${parsed.org}/${parsed.package}`; } } else if (parsed.package) { if (title) { subtitle = parsed.package; } else { title = parsed.package; } } else if (parsed.registry === "deno.land/std") { subtitle = "std"; } } else { [title, subtitle] = getLibWithVersion(url); } return ( <svg width="1200px" height="630px" viewBox="0 0 1200 630" version="1.1" xmlns="http://www.w3.org/2000/svg" > <title>module card</title> <g id="module-card" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd" > <rect fill="#FFFFFF" x="0" y="0" width="1200" height="630"></rect> <DenoLogo /> <text id="title" font-family="Inter-Bold, Inter, sans-serif" font-size="88" font-weight="bold" fill="#111827" > <tspan x="60" y="131">{title}</tspan> </text> <text id="subtitle" font-family="Inter-Bold, Inter, sans-serif" font-size="68" font-weight="bold" fill="#111827" > <tspan x="60" y="197">{subtitle}</tspan> </text> <text id="https://deno.land/x/" font-family="Inter-Regular, Inter, sans-serif" font-size="48" font-weight="normal" fill="#111827" > <tspan x="60" y="260">{link}</tspan> </text> {doc.length ? ( <line x1="59.5" y1="291.5" x2="1149.5" y2="291.5" id="Line" stroke="#111827" stroke-width="5" stroke-linecap="square" > </line> ) : undefined} <text id="module_description" fill-rule="nonzero" font-family="JetBrains Mono, monospace" font-size="32" font-weight="normal" fill="#111827" > <tspan x="60" y="346">{lines[0]}</tspan> <tspan x="60" y="385">{lines[1]}</tspan> <tspan x="60" y="424">{lines[2]}</tspan> <tspan x="60" y="463">{lines[3]}</tspan> <tspan x="60" y="502">{lines[4]}</tspan> <tspan x="60" y="541">{lines[5]}</tspan> <tspan x="60" y="580">{lines[6]}</tspan> </text> </g> </svg> ); } export function SymbolCard( { url, item, doc }: { url: string; item: string; doc: string }, ) { const lines = wrap( htmlEntities.encode(removeMarkdown(doc)).split("\n\n").map((l) => l.replaceAll("\n", " ") ).join( "\n\n", ), ).split("\n").slice(0, 7); let link = url; if (url.startsWith("deno")) { const [label, version] = getLibWithVersion(url); link = `${label}${version ? ` @ ${version}` : ""}`; } return ( <svg width="1200px" height="630px" viewBox="0 0 1200 630" version="1.1" xmlns="http://www.w3.org/2000/svg" > <title>symbol card</title> <g id="symbol-card" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd" > <rect fill="#FFFFFF" x="0" y="0" width="1200" height="630"></rect> <DenoLogo /> <text id="item" font-family="Inter-Bold, Inter" font-size="88" font-weight="bold" fill="#111827" > <tspan x="60" y="131">{item}</tspan> </text> <text id="link" font-family="Inter-Regular, Inter" font-size="48" font-weight="normal" fill="#111827" > <tspan x="60" y="199">{link}</tspan> </text> {doc.length ? ( <line x1="59.5" y1="236.5" x2="1149.5" y2="236.5" id="Line" stroke="#111827" stroke-width="5" stroke-linecap="square" > </line> ) : undefined} <text id="symbol-description" font-family="JetBrains Mono, monospace" font-size="32" font-weight="normal" fill="#111827" > <tspan x="60" y="311">{lines[0]}</tspan> <tspan x="60" y="350">{lines[1]}</tspan> <tspan x="60" y="389">{lines[2]}</tspan> <tspan x="60" y="428">{lines[3]}</tspan> <tspan x="60" y="467">{lines[4]}</tspan> <tspan x="60" y="506">{lines[5]}</tspan> <tspan x="60" y="545">{lines[6]}</tspan> <tspan x="60" y="584">{lines[7]}</tspan> </text> </g> </svg> ); }
DenoLogo
PackageFilter.py
# Copyright 2020 Microsoft Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Requires Python 2.7+ """Package Filter""" from core.src.bootstrap.Constants import Constants import fnmatch class PackageFilter(object): """implements the Package filtering logic""" def __init__(self, execution_config, composite_logger): self.execution_config = execution_config self.composite_logger = composite_logger # Exclusions - note: version based exclusion is not supported self.global_excluded_packages = self.sanitize_str_to_list(self.execution_config.global_exclusion_list) self.installation_excluded_package_masks = self.execution_config.excluded_package_name_mask_list self.installation_excluded_packages, self.installation_excluded_package_versions = self.get_packages_and_versions_from_masks(self.installation_excluded_package_masks) # Inclusions - note: version based inclusion is optionally supported self.installation_included_package_masks = self.execution_config.included_package_name_mask_list self.installation_included_packages, self.installation_included_package_versions = self.get_packages_and_versions_from_masks(self.installation_included_package_masks) self.installation_included_classifications = [] if self.execution_config.included_classifications_list is None else self.execution_config.included_classifications_list # Neutralize global excluded packages, if customer explicitly includes the package packages_to_clear_from_global = [] for package in self.global_excluded_packages: if self.check_for_explicit_inclusion(package): self.composite_logger.log_debug('Removing package from global exclusion list: ' + package) packages_to_clear_from_global.append(package) self.global_excluded_packages = [x for x in self.global_excluded_packages if x not in packages_to_clear_from_global] # Logging self.composite_logger.log("\nAzure globally-excluded packages: " + str(self.global_excluded_packages)) self.composite_logger.log("Included package classifications: " + ', '.join(self.installation_included_classifications)) self.composite_logger.log("Included packages: " + str(self.installation_included_package_masks)) self.composite_logger.log("Excluded packages: " + str(self.installation_excluded_packages)) if '=' in str(self.installation_excluded_package_masks): self.composite_logger.log_error("\n /!\\ Package exclusions do not support version matching in the filter today. " "Due to this, more packages than expected may be excluded from this update deployment.") # region Inclusion / exclusion presence checks def is_exclusion_list_present(self): """Return true if either Global or patch installation specific exclusion list present""" return bool(self.global_excluded_packages) or bool(self.installation_excluded_packages) def is_inclusion_list_present(self): """Return true if patch installation Inclusion is present""" return bool(self.installation_included_packages) # endregion # region Package exclusion checks def check_for_exclusion(self, one_or_more_packages): """Return true if package need to be excluded""" return self.check_for_match(one_or_more_packages, self.installation_excluded_packages) or \ self.check_for_match(one_or_more_packages, self.global_excluded_packages) # endregion # region Package inclusion checks def check_for_inclusion(self, package, package_version=Constants.DEFAULT_UNSPECIFIED_VALUE): """Return true if package should be included (either because no inclusion list is specified, or because of explicit match)""" return not self.is_inclusion_list_present() or self.check_for_explicit_inclusion(package, package_version) def check_for_explicit_inclusion(self, package, package_version=Constants.DEFAULT_UNSPECIFIED_VALUE): """Return true if package should be included due to an explicit match to the inclusion list """ return self.check_for_match(package, self.installation_included_packages, package_version, self.installation_included_package_versions) # endregion # region Inclusion / exclusion common match checker def check_for_match(self, one_or_more_packages, matching_list, linked_package_versions=Constants.DEFAULT_UNSPECIFIED_VALUE, version_matching_list=Constants.DEFAULT_UNSPECIFIED_VALUE):
"""Return true if package(s) (with, optionally, linked version(s)) matches the filter list""" if matching_list: if type(one_or_more_packages) is str: return self.single_package_check_for_match(one_or_more_packages, matching_list, linked_package_versions, version_matching_list) else: for index, each_package in enumerate(one_or_more_packages): if type(linked_package_versions) is str: if self.single_package_check_for_match(each_package, matching_list, linked_package_versions, version_matching_list): return True else: if self.single_package_check_for_match(each_package, matching_list, linked_package_versions[index], version_matching_list): return True return False def single_package_check_for_match(self, package, matching_list, package_version, version_matching_list): """Returns true if a single package (optionally, version) matches the filter list""" for index, matching_package in enumerate(matching_list): if fnmatch.fnmatch(package, matching_package) or fnmatch.fnmatch(self.get_product_name_without_arch(package), matching_package): self.composite_logger.log_debug(' - [Package] {0} matches expression {1}'.format(package, matching_package)) if package_version == Constants.DEFAULT_UNSPECIFIED_VALUE or not version_matching_list or version_matching_list[index] == Constants.DEFAULT_UNSPECIFIED_VALUE: self.composite_logger.log_debug(' - [Version] Check skipped as not specified.') return True elif len(version_matching_list) > index and fnmatch.fnmatch(package_version, version_matching_list[index]): self.composite_logger.log_debug(' - [Version] {0} matches expression {1}'.format(package, version_matching_list[index])) return True elif len(version_matching_list) <= index: # This should never happen - something has gone horribly wrong self.composite_logger.log_error(' - [Version] Index error - ({0} of {1})'.format(index + 1, len(version_matching_list))) else: self.composite_logger.log_debug(' - Package {0} (version={1}) was found, but it did not match filter specified for version ({2})'.format(package, package_version, version_matching_list[index])) return False @staticmethod def get_product_name_without_arch(package_name): """Splits out product name without architecture - if this is changed, review YumPackageManager""" architectures = ['.x86_64', '.noarch', '.i686'] for arch in architectures: if package_name.endswith(arch): return package_name.replace(arch, '') return package_name # endregion # region Get included / excluded package masks def get_packages_and_versions_from_masks(self, package_masks): """Return package names and versions""" packages = [] package_versions = [] if package_masks is not None: for index, package_mask in enumerate(package_masks): package_mask_split = str(package_mask).split('=') if len(package_mask_split) == 1: # no version specified packages.append(package_mask_split[0].strip()) package_versions.append(Constants.DEFAULT_UNSPECIFIED_VALUE) elif len(package_mask_split) == 2: # version also specified packages.append(package_mask_split[0].strip()) package_versions.append(package_mask_split[1].strip()) else: # invalid format self.composite_logger.log_warning("Invalid package format: " + str(package_mask) + " [Ignored]") return packages, package_versions @staticmethod def sanitize_str_to_list(string_input): """Strips excess white-space and converts a comma-separated string to a list""" return [] if (string_input is None) else string_input.strip().split(",") # endregion # region Get installation classifications from execution configuration def is_msft_critsec_classification_only(self): return ('Critical' in self.installation_included_classifications or 'Security' in self.installation_included_classifications) and 'Other' not in self.installation_included_classifications def is_msft_other_classification_only(self): return 'Other' in self.installation_included_classifications and not ('Critical' in self.installation_included_classifications or 'Security' in self.installation_included_classifications) def is_msft_all_classification_included(self): """Returns true if all classifications were individually selected *OR* (nothing was selected AND no inclusion list is present) -- business logic""" all_classifications = [key for key in Constants.PackageClassification.__dict__.keys() if not key.startswith('__')] all_classifications_explicitly_selected = bool(len(self.installation_included_classifications) == (len(all_classifications) - 1)) no_classifications_selected = bool(len(self.installation_included_classifications) == 0) only_unclassified_selected = bool('Unclassified' in self.installation_included_classifications and len(self.installation_included_classifications) == 1) return all_classifications_explicitly_selected or ((no_classifications_selected or only_unclassified_selected) and not self.is_inclusion_list_present()) def is_invalid_classification_combination(self): return ('Other' in self.installation_included_classifications and 'Critical' in self.installation_included_classifications and 'Security' not in self.installation_included_classifications) or \ ('Other' in self.installation_included_classifications and 'Security' in self.installation_included_classifications and 'Critical' not in self.installation_included_classifications) # endregion
# type: (str, object, str, object) -> bool # type hinting to remove a warning
proxy.go
/* Copyright 2018 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package annotations import ( "strings" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" "k8s.io/ingress-nginx/test/e2e/framework" ) var _ = framework.IngressNginxDescribe("Annotations - Proxy", func() { f := framework.NewDefaultFramework("proxy") host := "proxy.foo.com" BeforeEach(func() { f.NewEchoDeploymentWithReplicas(2) }) AfterEach(func() { }) It("should set proxy_redirect to off", func() { annotations := map[string]string{ "nginx.ingress.kubernetes.io/proxy-redirect-from": "off", "nginx.ingress.kubernetes.io/proxy-redirect-to": "goodbye.com", } ing := framework.NewSingleIngress(host, "/", host, f.Namespace, "http-svc", 80, &annotations) f.EnsureIngress(ing) f.WaitForNginxServer(host, func(server string) bool { return Expect(server).Should(ContainSubstring("proxy_redirect off;")) }) }) It("should set proxy_redirect to default", func() { annotations := map[string]string{ "nginx.ingress.kubernetes.io/proxy-redirect-from": "default", "nginx.ingress.kubernetes.io/proxy-redirect-to": "goodbye.com", } ing := framework.NewSingleIngress(host, "/", host, f.Namespace, "http-svc", 80, &annotations) f.EnsureIngress(ing) f.WaitForNginxServer(host, func(server string) bool { return Expect(server).Should(ContainSubstring("proxy_redirect default;")) }) }) It("should set proxy_redirect to hello.com goodbye.com", func() { annotations := map[string]string{ "nginx.ingress.kubernetes.io/proxy-redirect-from": "hello.com", "nginx.ingress.kubernetes.io/proxy-redirect-to": "goodbye.com", } ing := framework.NewSingleIngress(host, "/", host, f.Namespace, "http-svc", 80, &annotations) f.EnsureIngress(ing) f.WaitForNginxServer(host, func(server string) bool { return Expect(server).Should(ContainSubstring("proxy_redirect hello.com goodbye.com;")) }) })
} ing := framework.NewSingleIngress(host, "/", host, f.Namespace, "http-svc", 80, &annotations) f.EnsureIngress(ing) f.WaitForNginxServer(host, func(server string) bool { return Expect(server).Should(ContainSubstring("client_max_body_size 8m;")) }) }) It("should not set proxy client-max-body-size to incorrect value", func() { annotations := map[string]string{ "nginx.ingress.kubernetes.io/proxy-body-size": "15r", } ing := framework.NewSingleIngress(host, "/", host, f.Namespace, "http-svc", 80, &annotations) f.EnsureIngress(ing) f.WaitForNginxServer(host, func(server string) bool { return Expect(server).ShouldNot(ContainSubstring("client_max_body_size 15r;")) }) }) It("should set valid proxy timeouts", func() { annotations := map[string]string{ "nginx.ingress.kubernetes.io/proxy-connect-timeout": "50", "nginx.ingress.kubernetes.io/proxy-send-timeout": "20", "nginx.ingress.kubernetes.io/proxy-read-timeout": "20", } ing := framework.NewSingleIngress(host, "/", host, f.Namespace, "http-svc", 80, &annotations) f.EnsureIngress(ing) f.WaitForNginxServer(host, func(server string) bool { return strings.Contains(server, "proxy_connect_timeout 50s;") && strings.Contains(server, "proxy_send_timeout 20s;") && strings.Contains(server, "proxy_read_timeout 20s;") }) }) It("should not set invalid proxy timeouts", func() { annotations := map[string]string{ "nginx.ingress.kubernetes.io/proxy-connect-timeout": "50k", "nginx.ingress.kubernetes.io/proxy-send-timeout": "20k", "nginx.ingress.kubernetes.io/proxy-read-timeout": "20k", } ing := framework.NewSingleIngress(host, "/", host, f.Namespace, "http-svc", 80, &annotations) f.EnsureIngress(ing) f.WaitForNginxServer(host, func(server string) bool { return !strings.Contains(server, "proxy_connect_timeout 50ks;") && !strings.Contains(server, "proxy_send_timeout 20ks;") && !strings.Contains(server, "proxy_read_timeout 20ks;") }) }) It("should turn on proxy-buffering", func() { annotations := map[string]string{ "nginx.ingress.kubernetes.io/proxy-buffering": "on", "nginx.ingress.kubernetes.io/proxy-buffers-number": "8", "nginx.ingress.kubernetes.io/proxy-buffer-size": "8k", } ing := framework.NewSingleIngress(host, "/", host, f.Namespace, "http-svc", 80, &annotations) f.EnsureIngress(ing) f.WaitForNginxServer(host, func(server string) bool { return strings.Contains(server, "proxy_buffering on;") && strings.Contains(server, "proxy_buffer_size 8k;") && strings.Contains(server, "proxy_buffers 8 8k;") && strings.Contains(server, "proxy_request_buffering on;") }) }) It("should turn off proxy-request-buffering", func() { annotations := map[string]string{ "nginx.ingress.kubernetes.io/proxy-request-buffering": "off", } ing := framework.NewSingleIngress(host, "/", host, f.Namespace, "http-svc", 80, &annotations) f.EnsureIngress(ing) f.WaitForNginxServer(host, func(server string) bool { return Expect(server).Should(ContainSubstring("proxy_request_buffering off;")) }) }) It("should build proxy next upstream", func() { annotations := map[string]string{ "nginx.ingress.kubernetes.io/proxy-next-upstream": "error timeout http_502", "nginx.ingress.kubernetes.io/proxy-next-upstream-timeout": "10", "nginx.ingress.kubernetes.io/proxy-next-upstream-tries": "5", } ing := framework.NewSingleIngress(host, "/", host, f.Namespace, "http-svc", 80, &annotations) f.EnsureIngress(ing) f.WaitForNginxServer(host, func(server string) bool { return strings.Contains(server, "proxy_next_upstream error timeout http_502;") && strings.Contains(server, "proxy_next_upstream_timeout 10;") && strings.Contains(server, "proxy_next_upstream_tries 5;") }) }) It("should build proxy next upstream using configmap values", func() { annotations := map[string]string{} ing := framework.NewSingleIngress(host, "/", host, f.Namespace, "http-svc", 80, &annotations) f.EnsureIngress(ing) f.SetNginxConfigMapData(map[string]string{ "proxy-next-upstream": "timeout http_502", "proxy-next-upstream-timeout": "53", "proxy-next-upstream-tries": "44", }) f.WaitForNginxServer(host, func(server string) bool { return strings.Contains(server, "proxy_next_upstream timeout http_502;") && strings.Contains(server, "proxy_next_upstream_timeout 53;") && strings.Contains(server, "proxy_next_upstream_tries 44;") }) }) It("should setup proxy cookies", func() { annotations := map[string]string{ "nginx.ingress.kubernetes.io/proxy-cookie-domain": "localhost example.org", "nginx.ingress.kubernetes.io/proxy-cookie-path": "/one/ /", } ing := framework.NewSingleIngress(host, "/", host, f.Namespace, "http-svc", 80, &annotations) f.EnsureIngress(ing) f.WaitForNginxServer(host, func(server string) bool { return strings.Contains(server, "proxy_cookie_domain localhost example.org;") && strings.Contains(server, "proxy_cookie_path /one/ /;") }) }) It("should change the default proxy HTTP version", func() { annotations := map[string]string{ "nginx.ingress.kubernetes.io/proxy-http-version": "1.0", } ing := framework.NewSingleIngress(host, "/", host, f.Namespace, "http-svc", 80, &annotations) f.EnsureIngress(ing) f.WaitForNginxServer(host, func(server string) bool { return strings.Contains(server, "proxy_http_version 1.0;") }) }) })
It("should set proxy client-max-body-size to 8m", func() { annotations := map[string]string{ "nginx.ingress.kubernetes.io/proxy-body-size": "8m",
lib.rs
//! This crate primarily exists to serve the `common/eth2_network_configs` crate, by providing the //! canonical list of built-in-networks and some tooling to help include those configurations in the //! `lighthouse` binary. //! //! It also provides some additional structs which are useful to other components of `lighthouse` //! (e.g., `Eth2Config`). use std::env; use std::path::PathBuf; use types::{ChainSpec, EthSpecId}; pub use paste::paste; // A macro is used to define this constant so it can be used with `include_bytes!`. #[macro_export] macro_rules! predefined_networks_dir { () => { "built_in_network_configs" }; } pub const PREDEFINED_NETWORKS_DIR: &str = predefined_networks_dir!(); pub const GENESIS_FILE_NAME: &str = "genesis.ssz"; pub const GENESIS_ZIP_FILE_NAME: &str = "genesis.ssz.zip"; /// The core configuration of a Lighthouse beacon node. #[derive(Debug, Clone)] pub struct Eth2Config { pub eth_spec_id: EthSpecId, pub spec: ChainSpec, } impl Default for Eth2Config { fn default() -> Self { Self { eth_spec_id: EthSpecId::Minimal, spec: ChainSpec::minimal(), } } } impl Eth2Config { pub fn
() -> Self { Self { eth_spec_id: EthSpecId::Mainnet, spec: ChainSpec::mainnet(), } } pub fn minimal() -> Self { Self { eth_spec_id: EthSpecId::Minimal, spec: ChainSpec::minimal(), } } pub fn gnosis() -> Self { Self { eth_spec_id: EthSpecId::Gnosis, spec: ChainSpec::gnosis(), } } } /// A directory that can be built by downloading files via HTTP. /// /// Used by the `eth2_network_config` crate to initialize the network directories during build and /// access them at runtime. #[derive(Copy, Clone, Debug, PartialEq)] pub struct Eth2NetArchiveAndDirectory<'a> { pub name: &'a str, pub unique_id: &'a str, pub genesis_is_known: bool, } impl<'a> Eth2NetArchiveAndDirectory<'a> { /// The directory that should be used to store files downloaded for this net. pub fn dir(&self) -> PathBuf { env::var("CARGO_MANIFEST_DIR") .expect("should know manifest dir") .parse::<PathBuf>() .expect("should parse manifest dir as path") .join(PREDEFINED_NETWORKS_DIR) .join(self.unique_id) } pub fn genesis_state_archive(&self) -> PathBuf { self.dir().join(GENESIS_ZIP_FILE_NAME) } } /// Indicates that the `genesis.ssz.zip` file is present on the filesystem. This means that the /// deposit ceremony has concluded and the final genesis `BeaconState` is known. const GENESIS_STATE_IS_KNOWN: bool = true; #[derive(Copy, Clone, Debug, PartialEq)] pub struct HardcodedNet { pub name: &'static str, pub genesis_is_known: bool, pub config: &'static [u8], pub deploy_block: &'static [u8], pub boot_enr: &'static [u8], pub genesis_state_bytes: &'static [u8], } /// Defines an `Eth2NetArchiveAndDirectory` for some network. /// /// It also defines a `include_<title>_file!` macro which provides a wrapper around /// `std::include_bytes`, allowing the inclusion of bytes from the specific testnet directory. macro_rules! define_archive { ($name_ident: ident, $name_str: tt, $genesis_is_known: ident) => { paste! { #[macro_use] pub mod $name_ident { use super::*; pub const ETH2_NET_DIR: Eth2NetArchiveAndDirectory = Eth2NetArchiveAndDirectory { name: $name_str, unique_id: $name_str, genesis_is_known: $genesis_is_known, }; /// A wrapper around `std::include_bytes` which includes a file from a specific network /// directory. Used by upstream crates to import files at compile time. #[macro_export] macro_rules! [<include_ $name_ident _file>] { ($this_crate: ident, $base_dir: tt, $filename: tt) => { include_bytes!(concat!( $base_dir, "/", $this_crate::predefined_networks_dir!(), "/", $name_str, "/", $filename )) }; } } } }; } /// Creates a `HardcodedNet` definition for some network. #[macro_export] macro_rules! define_net { ($this_crate: ident, $mod: ident, $include_file: tt) => {{ use $this_crate::$mod::ETH2_NET_DIR; $this_crate::HardcodedNet { name: ETH2_NET_DIR.name, genesis_is_known: ETH2_NET_DIR.genesis_is_known, config: $this_crate::$include_file!($this_crate, "../", "config.yaml"), deploy_block: $this_crate::$include_file!($this_crate, "../", "deploy_block.txt"), boot_enr: $this_crate::$include_file!($this_crate, "../", "boot_enr.yaml"), genesis_state_bytes: $this_crate::$include_file!($this_crate, "../", "genesis.ssz"), } }}; } /// Calls `define_net` on a list of networks, and then defines two more lists: /// /// - `HARDCODED_NETS`: a list of all the networks defined by this macro. /// - `HARDCODED_NET_NAMES`: a list of the *names* of the networks defined by this macro. #[macro_export] macro_rules! define_nets { ($this_crate: ident, $($name_ident: ident, $name_str: tt,)+) => { $this_crate::paste! { $( const [<$name_ident:upper>]: $this_crate::HardcodedNet = $this_crate::define_net!($this_crate, $name_ident, [<include_ $name_ident _file>]); )+ const HARDCODED_NETS: &[$this_crate::HardcodedNet] = &[$([<$name_ident:upper>],)+]; pub const HARDCODED_NET_NAMES: &[&'static str] = &[$($name_str,)+]; } }; } /// The canonical macro for defining built-in network configurations. /// /// This macro will provide: /// /// - An `Eth2NetArchiveAndDirectory` for each network. /// - `ETH2_NET_DIRS`: a list of all the above `Eth2NetArchiveAndDirectory`. /// - The `instantiate_hardcoded_nets` macro (see its documentation). /// /// ## Design Justification /// /// Ultimately, this macro serves as a single list of all the networks. The reason it is structured /// in such a complex web-of-macros way is because two requirements of built-in (hard-coded) networks: /// /// 1. We must use `std::include_bytes!` to "bake" arbitrary bytes (genesis states, etc) into the binary. /// 2. We must use a `build.rs` script to decompress the genesis state from a zip file, before we /// can include those bytes. /// /// Because of these two constraints, we must first define all of the networks and the paths to /// their files in this crate. Then, we must use another crate (`eth2_network_configs`) to run a /// `build.rs` which will unzip the genesis states. Then, that `eth2_network_configs` crate can /// perform the final step of using `std::include_bytes` to bake the files (bytes) into the binary. macro_rules! define_hardcoded_nets { ($(($name_ident: ident, $name_str: tt, $genesis_is_known: ident)),+) => { $( define_archive!($name_ident, $name_str, $genesis_is_known); )+ pub const ETH2_NET_DIRS: &[Eth2NetArchiveAndDirectory<'static>] = &[$($name_ident::ETH2_NET_DIR,)+]; /// This macro is designed to be called by an external crate. When called, it will /// define in that external crate: /// /// - A `HardcodedNet` for each network. /// - `HARDCODED_NETS`: a list of all the above `HardcodedNet`. /// - `HARDCODED_NET_NAMES`: a list of all the names of the above `HardcodedNet` (as `&str`). #[macro_export] macro_rules! instantiate_hardcoded_nets { ($this_crate: ident) => { $this_crate::define_nets!($this_crate, $($name_ident, $name_str,)+); } } }; } // Add a new "built-in" network by adding it to the list below. // // The last entry must not end with a comma, otherwise compilation will fail. // // This is the canonical place for defining the built-in network configurations that are present in // the `common/eth2_network_config/built_in_network_configs` directory. // // Each net is defined as a three-tuple: // // 0. The name of the testnet as an "ident" (i.e. something that can be a Rust variable name). // 1. The human-friendly name of the testnet (i.e. usually with "-" instead of "_"). // 2. A bool indicating if the genesis state is known and present as a `genesis.ssz.zip`. // // The directory containing the testnet files should match the human-friendly name (element 1). define_hardcoded_nets!( (mainnet, "mainnet", GENESIS_STATE_IS_KNOWN), (prater, "prater", GENESIS_STATE_IS_KNOWN), (gnosis, "gnosis", GENESIS_STATE_IS_KNOWN), (kiln, "kiln", GENESIS_STATE_IS_KNOWN) );
mainnet
mod.rs
//! Representations of various x86 specific structures and descriptor tables. use crate::VirtAddr; pub mod gdt; // idt needs `feature(abi_x86_interrupt)`, which is not available on stable rust #[cfg(feature = "abi_x86_interrupt")] pub mod idt; pub mod paging; pub mod port; pub mod tss; /// A struct describing a pointer to a descriptor table (GDT / IDT). /// This is in a format suitable for giving to 'lgdt' or 'lidt'. #[derive(Debug, Clone, Copy)] #[repr(C, packed)] pub struct DescriptorTablePointer { /// Size of the DT.
pub limit: u16, /// Pointer to the memory region containing the DT. pub base: VirtAddr, }
binarytree.py
""" Definition of TreeNode: class TreeNode: def __init__(self, val): self.val = val self.left, self.right = None, None """ class Solution:
""" @param inorder : A list of integers that inorder traversal of a tree @param postorder : A list of integers that postorder traversal of a tree @return : Root of a tree """ def buildTree(self, inorder, postorder): def genTree(inorder,postorder): if len(inorder)==0: return None root_val = postorder[-1] root = TreeNode(root_val) n = inorder.index(root_val) left_inorder = inorder[:n] left_postorder = postorder[:n] right_inorder = inorder[n+1:] right_postorder= postorder[n:len(postorder)-1] if len(left_inorder) > 0: root.left = genTree(left_inorder, left_postorder) if len(right_inorder) > 0: root.right = genTree(right_inorder, right_postorder) return root root = genTree(inorder, postorder) return root
firstrun.go
//+build !nofirstrun // Copyright 2018. Akamai Technologies, Inc // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package main import ( "fmt" "os" "path/filepath" "runtime" "strconv" "strings" akamai "github.com/ProcellaTech/cli-common-golang" "github.com/fatih/color" "github.com/kardianos/osext" "github.com/mattn/go-isatty" ) func firstRun() error
func firstRunCheckInPath() (bool, error) { selfPath, err := osext.Executable() if err != nil { return false, err } os.Args[0] = selfPath dirPath := filepath.Dir(selfPath) if runtime.GOOS == "windows" { dirPath = strings.ToLower(dirPath) } sysPath := os.Getenv("PATH") paths := filepath.SplitList(sysPath) inPath := false writablePaths := []string{} var bannerShown bool if getConfigValue("cli", "install-in-path") == "no" { inPath = true bannerShown = firstRunCheckUpgrade(!inPath) } if len(paths) == 0 { inPath = true bannerShown = firstRunCheckUpgrade(!inPath) } for _, path := range paths { if len(strings.TrimSpace(path)) == 0 { continue } if runtime.GOOS == "windows" { path = strings.ToLower(path) } if _, err := os.Stat(path); !os.IsNotExist(err) { if err := checkAccess(path, ACCESS_W_OK); err == nil { writablePaths = append(writablePaths, path) } } if path == dirPath { inPath = true bannerShown = firstRunCheckUpgrade(false) } } if !inPath && len(writablePaths) > 0 { if !bannerShown { showBanner() bannerShown = true } fmt.Fprint(akamai.App.Writer, "Akamai CLI is not installed in your PATH, would you like to install it? [Y/n]: ") answer := "" fmt.Scanln(&answer) if answer != "" && strings.ToLower(answer) != "y" { setConfigValue("cli", "install-in-path", "no") saveConfig() firstRunCheckUpgrade(true) return true, nil } choosePath(writablePaths, answer, selfPath) } return bannerShown, nil } func choosePath(writablePaths []string, answer string, selfPath string) { fmt.Fprintln(akamai.App.Writer, color.YellowString("Choose where you would like to install Akamai CLI:")) for i, path := range writablePaths { fmt.Fprintf(akamai.App.Writer, "(%d) %s\n", i+1, path) } fmt.Fprint(akamai.App.Writer, "Enter a number: ") answer = "" fmt.Scanln(&answer) index, err := strconv.Atoi(answer) if err != nil { fmt.Fprintln(akamai.App.Writer, color.RedString("Invalid choice, try again")) choosePath(writablePaths, answer, selfPath) } if answer == "" || index < 1 || index > len(writablePaths) { fmt.Fprintln(akamai.App.Writer, color.RedString("Invalid choice, try again")) choosePath(writablePaths, answer, selfPath) } suffix := "" if runtime.GOOS == "windows" { suffix = ".exe" } newPath := filepath.Join(writablePaths[index-1], "akamai"+suffix) akamai.StartSpinner( "Installing to "+newPath+"...", "Installing to "+newPath+"...... ["+color.GreenString("OK")+"]\n", ) err = moveFile(selfPath, newPath) os.Args[0] = newPath if err != nil { akamai.StopSpinnerFail() fmt.Fprintln(akamai.App.Writer, color.RedString(err.Error())) } akamai.StopSpinnerOk() } func firstRunCheckUpgrade(bannerShown bool) bool { if getConfigValue("cli", "last-upgrade-check") == "" { if !bannerShown { bannerShown = true showBanner() } fmt.Fprint(akamai.App.Writer, "Akamai CLI can auto-update itself, would you like to enable daily checks? [Y/n]: ") answer := "" fmt.Scanln(&answer) if answer != "" && strings.ToLower(answer) != "y" { setConfigValue("cli", "last-upgrade-check", "ignore") saveConfig() return bannerShown } setConfigValue("cli", "last-upgrade-check", "never") saveConfig() } return bannerShown }
{ if !isatty.IsTerminal(os.Stdout.Fd()) && !isatty.IsCygwinTerminal(os.Stdout.Fd()) { return nil } bannerShown, err := firstRunCheckInPath() if err != nil { return err } bannerShown = firstRunCheckUpgrade(bannerShown) firstRunCheckStats(bannerShown) return nil }
util.ts
/*! * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0 */ import { PropType } from 'vue' /** * Creates an anonymous class whose constructor automatically applies default values. * * Mostly just used for Vue as the types expect a class. Otherwise one would need to * manually describe constructor parameters and assignments for every field. * * Example (type inferred): * ```ts * export const MyClass = createClass({ * foo: 0, * bar: 'a' as 'a' | 'b', * }) * ``` * * @param defaults Defaults to use during construction. * * @returns Anonymous class. Use `typeof MyClass` to extract its type. */ export function createClass<T>(defaults: T): { new (initial?: Partial<T>): T } export function createClass<T>(defaults: Partial<T>, required: true): { new (initial: T): T } export function
<T>(defaults: T): { new (initial?: Partial<T>): T } | { new (initial: T): T } { return class { constructor(initial: T | Partial<T> = {}) { Object.assign(this, defaults, initial) } } as any } /** * Creates a Vue 'type' from a class. Note that nothing is actually created; a type is only inferred from the `Model`. * * @param Model Model class, usually created from {@link createClass}. * * @returns The {@link Object} class with the typed coerced as a {@link PropType} for the `Model` instance. */ export function createType<T extends new (obj: any) => any>(Model: T): PropType<InstanceType<T>> { return Object }
createClass
start.go
/* Copyright 2020 The cert-manager Authors.
http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package app import ( "context" "fmt" "io" "time" "github.com/go-logr/logr" "github.com/spf13/cobra" "github.com/spf13/pflag" "golang.org/x/sync/errgroup" _ "k8s.io/client-go/plugin/pkg/client/auth" ctrl "sigs.k8s.io/controller-runtime" "github.com/jetstack/cert-manager/pkg/api" "github.com/jetstack/cert-manager/pkg/controller/cainjector" logf "github.com/jetstack/cert-manager/pkg/logs" "github.com/jetstack/cert-manager/pkg/util" ) type InjectorControllerOptions struct { Namespace string LeaderElect bool LeaderElectionNamespace string StdOut io.Writer StdErr io.Writer // logger to be used by this controller log logr.Logger } func (o *InjectorControllerOptions) AddFlags(fs *pflag.FlagSet) { fs.StringVar(&o.Namespace, "namespace", "", ""+ "If set, this limits the scope of cainjector to a single namespace. "+ "If set, cainjector will not update resources with certificates outside of the "+ "configured namespace.") fs.BoolVar(&o.LeaderElect, "leader-elect", true, ""+ "If true, cainjector will perform leader election between instances to ensure no more "+ "than one instance of cainjector operates at a time") fs.StringVar(&o.LeaderElectionNamespace, "leader-election-namespace", "", ""+ "Namespace used to perform leader election (defaults to controller's namespace). "+ "Only used if leader election is enabled") } func NewInjectorControllerOptions(out, errOut io.Writer) *InjectorControllerOptions { o := &InjectorControllerOptions{ StdOut: out, StdErr: errOut, } return o } // NewCommandStartInjectorController is a CLI handler for starting cert-manager func NewCommandStartInjectorController(ctx context.Context, out, errOut io.Writer) *cobra.Command { o := NewInjectorControllerOptions(out, errOut) cmd := &cobra.Command{ Use: "ca-injector", Short: fmt.Sprintf("CA Injection Controller for Kubernetes (%s) (%s)", util.AppVersion, util.AppGitCommit), Long: ` cert-manager CA injector is a Kubernetes addon to automate the injection of CA data into webhooks and APIServices from cert-manager certificates. It will ensure that annotated webhooks and API services always have the correct CA data from the referenced certificates, which can then be used to serve API servers and webhook servers.`, // TODO: Refactor this function from this package RunE: func(cmd *cobra.Command, args []string) error { o.log = logf.Log.WithName("ca-injector") logf.V(logf.InfoLevel).InfoS("starting", "version", util.AppVersion, "revision", util.AppGitCommit) return o.RunInjectorController(ctx) }, } flags := cmd.Flags() o.AddFlags(flags) return cmd } func (o InjectorControllerOptions) RunInjectorController(ctx context.Context) error { mgr, err := ctrl.NewManager(ctrl.GetConfigOrDie(), ctrl.Options{ Scheme: api.Scheme, Namespace: o.Namespace, LeaderElection: o.LeaderElect, LeaderElectionNamespace: o.LeaderElectionNamespace, LeaderElectionID: "cert-manager-cainjector-leader-election", MetricsBindAddress: "0", }) if err != nil { return fmt.Errorf("error creating manager: %v", err) } g, gctx := errgroup.WithContext(ctx) g.Go(func() (err error) { defer func() { o.log.Error(err, "manager goroutine exited") }() if err = mgr.Start(gctx.Done()); err != nil { return fmt.Errorf("error running manager: %v", err) } return nil }) // Don't launch the controllers unless we have been elected leader <-mgr.Elected() // Exit early if the Elected channel gets closed because we are shutting down. select { case <-gctx.Done(): return g.Wait() default: } // Retry the start up of the certificate based controller in case the // cert-manager CRDs have not been installed yet or in case the CRD API is // not working. E.g. The conversion webhook has not yet had its CA bundle // injected by the secret based controller, which is launched in its own // goroutine. // When shutting down, return the last error if there is one. // Never retry if the controller exits cleanly. g.Go(func() (err error) { for { err = cainjector.RegisterCertificateBased(gctx, mgr) if err == nil { return } o.log.Error(err, "Error registering certificate based controllers. Retrying after 5 seconds.") select { case <-time.After(time.Second * 5): case <-gctx.Done(): return } } }) // Secrets based controller is started in its own goroutine so that it can // perform injection of the CA bundle into any webhooks required by the // cert-manager CRD API. // We do not retry this controller because it only interacts with core APIs // which should always be in a working state. g.Go(func() (err error) { if err = cainjector.RegisterSecretBased(gctx, mgr); err != nil { return fmt.Errorf("error registering secret controller: %v", err) } return }) return g.Wait() }
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
step.py
from dagster import check from .system import SystemStepExecutionContext class StepExecutionContext(object):
__slots__ = ['_system_step_execution_context', '_legacy_context'] def __init__(self, system_step_execution_context): self._system_step_execution_context = check.inst_param( system_step_execution_context, 'system_step_execution_context', SystemStepExecutionContext, ) @property def file_manager(self): return self._system_step_execution_context.file_manager @property def resources(self): return self._system_step_execution_context.resources @property def run_id(self): return self._system_step_execution_context.run_id @property def environment_dict(self): return self._system_step_execution_context.environment_dict @property def pipeline_def(self): return self._system_step_execution_context.pipeline_def @property def mode_def(self): return self._system_step_execution_context.mode_def @property def log(self): return self._system_step_execution_context.log @property def solid_handle(self): return self._system_step_execution_context.solid_handle @property def solid(self): return self._system_step_execution_context.pipeline_def.get_solid(self.solid_handle) @property def solid_def(self): return self._system_step_execution_context.pipeline_def.get_solid( self.solid_handle ).definition def has_tag(self, key): return self._system_step_execution_context.has_tag(key) def get_tag(self, key): return self._system_step_execution_context.get_tag(key) def get_system_context(self): ''' This allows advanced users (e.g. framework authors) to punch through to the underlying system context. ''' return self._system_step_execution_context
pool_test.go
// Copyright (C) MongoDB, Inc. 2017-present. // // Licensed under the Apache License, Version 2.0 (the "License"); you may // not use this file except in compliance with the License. You may obtain // a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 package connection import ( "context" "errors" "net" "runtime" "sync" "sync/atomic" "testing" "time" "github.com/mongodb/mongo-go-driver/x/network/address" ) func TestPool(t *testing.T) { noerr := func(t *testing.T, err error) { t.Helper() if err != nil { t.Errorf("Unepexted error: %v", err) t.FailNow() } } t.Run("NewPool", func(t *testing.T) { t.Run("should be connected", func(t *testing.T) { P, err := NewPool(address.Address(""), 1, 2) p := P.(*pool) noerr(t, err) err = p.Connect(context.Background()) noerr(t, err) if p.connected != connected { t.Errorf("Expected new pool to be connected. got %v; want %v", p.connected, connected) } }) t.Run("size cannot be larger than capcity", func(t *testing.T) { _, err := NewPool(address.Address(""), 5, 1) if err != ErrSizeLargerThanCapacity { t.Errorf("Should receive error when size is larger than capacity. got %v; want %v", err, ErrSizeLargerThanCapacity) } }) }) t.Run("Disconnect", func(t *testing.T) { t.Run("cannot disconnect twice", func(t *testing.T) { p, err := NewPool(address.Address(""), 1, 2) noerr(t, err) err = p.Connect(context.Background()) noerr(t, err) err = p.Disconnect(context.Background()) noerr(t, err) err = p.Disconnect(context.Background()) if err != ErrPoolDisconnected { t.Errorf("Should not be able to call disconnect twice. got %v; want %v", err, ErrPoolDisconnected) } }) t.Run("closes idle connections", func(t *testing.T) { cleanup := make(chan struct{}) addr := bootstrapConnections(t, 3, func(nc net.Conn) { <-cleanup nc.Close() }) d := newdialer(&net.Dialer{}) p, err := NewPool(address.Address(addr.String()), 3, 3, WithDialer(func(Dialer) Dialer { return d })) noerr(t, err) err = p.Connect(context.Background()) noerr(t, err) conns := [3]Connection{} for idx := range [3]struct{}{} { conns[idx], _, err = p.Get(context.Background()) noerr(t, err) } for idx := range [3]struct{}{} { err = conns[idx].Close() noerr(t, err) } if d.lenopened() != 3 { t.Errorf("Should have opened 3 connections, but didn't. got %d; want %d", d.lenopened(), 3) } ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second) defer cancel() err = p.Disconnect(ctx) noerr(t, err) if d.lenclosed() != 3 { t.Errorf("Should have closed 3 connections, but didn't. got %d; want %d", d.lenclosed(), 3) } close(cleanup) ok := p.(*pool).sem.TryAcquire(int64(p.(*pool).capacity)) if !ok { t.Errorf("clean shutdown should acquire and release semaphore, but semaphore still held") } else { p.(*pool).sem.Release(int64(p.(*pool).capacity)) } }) t.Run("closes inflight connections when context expires", func(t *testing.T) { cleanup := make(chan struct{}) addr := bootstrapConnections(t, 3, func(nc net.Conn) { <-cleanup nc.Close() }) d := newdialer(&net.Dialer{}) p, err := NewPool(address.Address(addr.String()), 3, 3, WithDialer(func(Dialer) Dialer { return d })) noerr(t, err) err = p.Connect(context.Background()) noerr(t, err) conns := [3]Connection{} for idx := range [3]struct{}{} { conns[idx], _, err = p.Get(context.Background()) noerr(t, err) } for idx := range [2]struct{}{} { err = conns[idx].Close() noerr(t, err) } if d.lenopened() != 3 { t.Errorf("Should have opened 3 connections, but didn't. got %d; want %d", d.lenopened(), 3) } ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second) cancel() err = p.Disconnect(ctx) noerr(t, err) if d.lenclosed() != 3 { t.Errorf("Should have closed 3 connections, but didn't. got %d; want %d", d.lenclosed(), 3) } close(cleanup) err = conns[2].Close() noerr(t, err) ok := p.(*pool).sem.TryAcquire(int64(p.(*pool).capacity)) if !ok { t.Errorf("clean shutdown should acquire and release semaphore, but semaphore still held") } else { p.(*pool).sem.Release(int64(p.(*pool).capacity)) } }) t.Run("properly sets the connection state on return", func(t *testing.T) { cleanup := make(chan struct{}) addr := bootstrapConnections(t, 3, func(nc net.Conn) { <-cleanup nc.Close() }) d := newdialer(&net.Dialer{}) p, err := NewPool(address.Address(addr.String()), 3, 3, WithDialer(func(Dialer) Dialer { return d })) noerr(t, err) err = p.Connect(context.Background()) noerr(t, err) c, _, err := p.Get(context.Background()) noerr(t, err) err = c.Close() noerr(t, err) if d.lenopened() != 1 { t.Errorf("Should have opened 3 connections, but didn't. got %d; want %d", d.lenopened(), 1) } ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second) defer cancel() err = p.Disconnect(ctx) noerr(t, err) if d.lenclosed() != 1 { t.Errorf("Should have closed 3 connections, but didn't. got %d; want %d", d.lenclosed(), 1) } close(cleanup) state := atomic.LoadInt32(&(p.(*pool)).connected) if state != disconnected { t.Errorf("Should have set the connection state on return. got %d; want %d", state, disconnected) } }) }) t.Run("Connect", func(t *testing.T) { t.Run("can reconnect a disconnected pool", func(t *testing.T) { cleanup := make(chan struct{}) addr := bootstrapConnections(t, 3, func(nc net.Conn) { <-cleanup nc.Close() }) d := newdialer(&net.Dialer{}) p, err := NewPool(address.Address(addr.String()), 3, 3, WithDialer(func(Dialer) Dialer { return d })) noerr(t, err) err = p.Connect(context.Background()) noerr(t, err) c, _, err := p.Get(context.Background()) noerr(t, err) gen := c.(*acquired).Connection.(*pooledConnection).generation if gen != 1 { t.Errorf("Connection should have a newer generation. got %d; want %d", gen, 1) } err = c.Close() noerr(t, err) if d.lenopened() != 1 { t.Errorf("Should have opened 3 connections, but didn't. got %d; want %d", d.lenopened(), 1) } ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second) defer cancel() err = p.Disconnect(ctx) noerr(t, err) if d.lenclosed() != 1 { t.Errorf("Should have closed 3 connections, but didn't. got %d; want %d", d.lenclosed(), 1) } close(cleanup) state := atomic.LoadInt32(&(p.(*pool)).connected) if state != disconnected { t.Errorf("Should have set the connection state on return. got %d; want %d", state, disconnected) } err = p.Connect(context.Background()) noerr(t, err) c, _, err = p.Get(context.Background()) noerr(t, err) gen = atomic.LoadUint64(&(c.(*acquired).Connection.(*pooledConnection)).generation) if gen != 2 { t.Errorf("Connection should have a newer generation. got %d; want %d", gen, 2) } err = c.Close() noerr(t, err) if d.lenopened() != 2 { t.Errorf("Should have opened 3 connections, but didn't. got %d; want %d", d.lenopened(), 2) } }) t.Run("cannot connect multiple times without disconnect", func(t *testing.T) { p, err := NewPool(address.Address(""), 3, 3) noerr(t, err) err = p.Connect(context.Background()) noerr(t, err) err = p.Connect(context.Background()) if err != ErrPoolConnected { t.Errorf("Shouldn't be able to connect to already connected pool. got %v; want %v", err, ErrPoolConnected) } err = p.Connect(context.Background()) if err != ErrPoolConnected { t.Errorf("Shouldn't be able to connect to already connected pool. got %v; want %v", err, ErrPoolConnected) } err = p.Disconnect(context.Background()) noerr(t, err) err = p.Connect(context.Background()) if err != nil { t.Errorf("Should be able to connect to pool after disconnect. got %v; want <nil>", err) } }) t.Run("can disconnect and reconnect multiple times", func(t *testing.T) { p, err := NewPool(address.Address(""), 3, 3) noerr(t, err) err = p.Connect(context.Background()) noerr(t, err) err = p.Disconnect(context.Background()) noerr(t, err) err = p.Connect(context.Background()) if err != nil { t.Errorf("Should be able to connect to disconnected pool. got %v; want <nil>", err) } err = p.Disconnect(context.Background()) noerr(t, err) err = p.Connect(context.Background()) if err != nil { t.Errorf("Should be able to connect to disconnected pool. got %v; want <nil>", err) } err = p.Disconnect(context.Background()) noerr(t, err) err = p.Connect(context.Background()) if err != nil { t.Errorf("Should be able to connect to pool after disconnect. got %v; want <nil>", err) } }) }) t.Run("Get", func(t *testing.T) { t.Run("return context error when already cancelled", func(t *testing.T) { cleanup := make(chan struct{}) addr := bootstrapConnections(t, 3, func(nc net.Conn) { <-cleanup nc.Close() }) d := newdialer(&net.Dialer{}) p, err := NewPool(address.Address(addr.String()), 3, 3, WithDialer(func(Dialer) Dialer { return d })) noerr(t, err) err = p.Connect(context.Background()) noerr(t, err) ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second) cancel() _, _, err = p.Get(ctx) if err != context.Canceled { t.Errorf("Should return context error when already cancelled. got %v; want %v", err, context.Canceled) } close(cleanup) }) t.Run("return context error when attempting to acquire semaphore", func(t *testing.T) { cleanup := make(chan struct{}) addr := bootstrapConnections(t, 3, func(nc net.Conn) { <-cleanup nc.Close() }) d := newdialer(&net.Dialer{}) p, err := NewPool(address.Address(addr.String()), 3, 3, WithDialer(func(Dialer) Dialer { return d })) noerr(t, err) err = p.Connect(context.Background()) noerr(t, err) ctx, cancel := context.WithTimeout(context.Background(), 50*time.Millisecond) defer cancel() ok := p.(*pool).sem.TryAcquire(3) if !ok { t.Errorf("Could not acquire the entire semaphore.") } _, _, err = p.Get(ctx) if err != context.DeadlineExceeded { t.Errorf("Should return context error when already canclled. got %v; want %v", err, context.DeadlineExceeded) } close(cleanup) }) t.Run("return error when attempting to create new connection", func(t *testing.T) { want := errors.New("create new connection error") var dialer DialerFunc = func(context.Context, string, string) (net.Conn, error) { return nil, want } p, err := NewPool(address.Address(""), 1, 2, WithDialer(func(Dialer) Dialer { return dialer })) noerr(t, err) err = p.Connect(context.Background()) noerr(t, err) _, _, got := p.Get(context.Background()) if got != want { t.Errorf("Should return error from calling New. got %v; want %v", got, want) } }) t.Run("adds connection to inflight pool", func(t *testing.T) { cleanup := make(chan struct{}) addr := bootstrapConnections(t, 1, func(nc net.Conn) { <-cleanup nc.Close() }) d := newdialer(&net.Dialer{}) p, err := NewPool(address.Address(addr.String()), 3, 3, WithDialer(func(Dialer) Dialer { return d })) noerr(t, err) err = p.Connect(context.Background()) noerr(t, err) ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second) defer cancel() c, _, err := p.Get(ctx) noerr(t, err) inflight := len(p.(*pool).inflight) if inflight != 1 { t.Errorf("Incorrect number of inlight connections. got %d; want %d", inflight, 1) } err = c.Close() noerr(t, err) close(cleanup) }) t.Run("closes expired connections", func(t *testing.T) { cleanup := make(chan struct{}) addr := bootstrapConnections(t, 2, func(nc net.Conn) { <-cleanup nc.Close() }) d := newdialer(&net.Dialer{}) p, err := NewPool( address.Address(addr.String()), 3, 3, WithDialer(func(Dialer) Dialer { return d }), WithIdleTimeout(func(time.Duration) time.Duration { return 10 * time.Millisecond }), ) noerr(t, err) err = p.Connect(context.Background()) noerr(t, err) ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second) defer cancel() c, _, err := p.Get(ctx) noerr(t, err) if d.lenopened() != 1 { t.Errorf("Should have opened 1 connection, but didn't. got %d; want %d", d.lenopened(), 1) } err = c.Close() noerr(t, err) time.Sleep(15 * time.Millisecond) if d.lenclosed() != 0 { t.Errorf("Should have closed 0 connections, but didn't. got %d; want %d", d.lenopened(), 0) } c, _, err = p.Get(ctx) noerr(t, err) if d.lenopened() != 2 { t.Errorf("Should have opened 2 connections, but didn't. got %d; want %d", d.lenopened(), 2) } time.Sleep(10 * time.Millisecond) if d.lenclosed() != 1 { t.Errorf("Should have closed 1 connection, but didn't. got %d; want %d", d.lenopened(), 1) } close(cleanup) }) t.Run("recycles connections", func(t *testing.T) { cleanup := make(chan struct{}) addr := bootstrapConnections(t, 3, func(nc net.Conn) { <-cleanup nc.Close() }) d := newdialer(&net.Dialer{}) p, err := NewPool(address.Address(addr.String()), 3, 3, WithDialer(func(Dialer) Dialer { return d })) noerr(t, err) err = p.Connect(context.Background()) noerr(t, err) for range [3]struct{}{} { c, _, err := p.Get(context.Background()) noerr(t, err) err = c.Close() noerr(t, err) if d.lenopened() != 1 { t.Errorf("Should have opened 1 connection, but didn't. got %d; want %d", d.lenopened(), 1) } } close(cleanup) }) t.Run("cannot get from disconnected pool", func(t *testing.T) { cleanup := make(chan struct{}) addr := bootstrapConnections(t, 3, func(nc net.Conn) { <-cleanup nc.Close() }) d := newdialer(&net.Dialer{}) p, err := NewPool(address.Address(addr.String()), 3, 3, WithDialer(func(Dialer) Dialer { return d })) noerr(t, err) err = p.Connect(context.Background()) noerr(t, err) ctx, cancel := context.WithTimeout(context.Background(), 10*time.Microsecond) defer cancel() err = p.Disconnect(ctx) noerr(t, err) _, _, err = p.Get(context.Background()) if err != ErrPoolClosed { t.Errorf("Should get error from disconnected pool. got %v; want %v", err, ErrPoolClosed) } close(cleanup) }) t.Run("pool closes excess connections when returned", func(t *testing.T) { cleanup := make(chan struct{}) addr := bootstrapConnections(t, 3, func(nc net.Conn) { <-cleanup nc.Close() }) d := newdialer(&net.Dialer{}) p, err := NewPool(address.Address(addr.String()), 1, 3, WithDialer(func(Dialer) Dialer { return d })) noerr(t, err) err = p.Connect(context.Background()) noerr(t, err) conns := [3]Connection{} for idx := range [3]struct{}{} { conns[idx], _, err = p.Get(context.Background()) noerr(t, err) } for idx := range [3]struct{}{} { err = conns[idx].Close() noerr(t, err) } if d.lenopened() != 3 { t.Errorf("Should have opened 3 connections, but didn't. got %d; want %d", d.lenopened(), 3) } if d.lenclosed() != 2 { t.Errorf("Should have closed 2 connections, but didn't. got %d; want %d", d.lenopened(), 2) } close(cleanup) }) t.Run("cannot get more than capacity connections", func(t *testing.T) {
cleanup := make(chan struct{}) addr := bootstrapConnections(t, 3, func(nc net.Conn) { <-cleanup nc.Close() }) d := newdialer(&net.Dialer{}) p, err := NewPool(address.Address(addr.String()), 1, 2, WithDialer(func(Dialer) Dialer { return d })) noerr(t, err) err = p.Connect(context.Background()) noerr(t, err) conns := [2]Connection{} for idx := range [2]struct{}{} { conns[idx], _, err = p.Get(context.Background()) noerr(t, err) } if d.lenopened() != 2 { t.Errorf("Should have opened 2 connections, but didn't. got %d; want %d", d.lenopened(), 2) } ctx, cancel := context.WithTimeout(context.Background(), 10*time.Millisecond) defer cancel() _, _, err = p.Get(ctx) if err != context.DeadlineExceeded { t.Errorf("Should not be able to get more than capacity connections. got %v; want %v", err, context.DeadlineExceeded) } err = conns[0].Close() noerr(t, err) ctx, cancel = context.WithTimeout(context.Background(), 10*time.Millisecond) defer cancel() c, _, err := p.Get(ctx) noerr(t, err) err = c.Close() noerr(t, err) err = p.Drain() noerr(t, err) err = conns[1].Close() noerr(t, err) ctx, cancel = context.WithTimeout(context.Background(), 10*time.Millisecond) defer cancel() c, _, err = p.Get(ctx) noerr(t, err) if d.lenopened() != 3 { t.Errorf("Should have opened 3 connections, but didn't. got %d; want %d", d.lenopened(), 3) } close(cleanup) }) t.Run("Cannot starve connection request", func(t *testing.T) { cleanup := make(chan struct{}) addr := bootstrapConnections(t, 3, func(nc net.Conn) { <-cleanup nc.Close() }) d := newdialer(&net.Dialer{}) p, err := NewPool(address.Address(addr.String()), 1, 1, WithDialer(func(Dialer) Dialer { return d })) noerr(t, err) err = p.Connect(context.Background()) noerr(t, err) conn, _, err := p.Get(context.Background()) if d.lenopened() != 1 { t.Errorf("Should have opened 1 connections, but didn't. got %d; want %d", d.lenopened(), 1) } var wg sync.WaitGroup wg.Add(1) ch := make(chan struct{}) go func() { ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second) defer cancel() ch <- struct{}{} _, _, err := p.Get(ctx) if err != nil { t.Errorf("Should not be able to starve connection request, but got error: %v", err) } wg.Done() }() <-ch runtime.Gosched() err = conn.Close() noerr(t, err) wg.Wait() close(cleanup) }) t.Run("Does not leak permit from failure to dial connection", func(t *testing.T) { cleanup := make(chan struct{}) addr := bootstrapConnections(t, 0, func(nc net.Conn) { <-cleanup nc.Close() }) close(cleanup) want := errors.New("dialing error") p, err := NewPool( address.Address(addr.String()), 1, 2, WithDialer( func(Dialer) Dialer { return DialerFunc(func(ctx context.Context, network, address string) (net.Conn, error) { return nil, want }) }), ) noerr(t, err) err = p.Connect(context.Background()) noerr(t, err) _, _, err = p.Get(context.Background()) if err != want { t.Errorf("Expected dial failure but got: %v", err) } ok := p.(*pool).sem.TryAcquire(int64(p.(*pool).capacity)) if !ok { t.Errorf("Dial failure should not leak semaphore permit") } else { p.(*pool).sem.Release(int64(p.(*pool).capacity)) } }) t.Run("Does not leak permit from cancelled context", func(t *testing.T) { cleanup := make(chan struct{}) addr := bootstrapConnections(t, 1, func(nc net.Conn) { <-cleanup nc.Close() }) close(cleanup) d := newdialer(&net.Dialer{}) p, err := NewPool(address.Address(addr.String()), 1, 2, WithDialer(func(Dialer) Dialer { return d })) noerr(t, err) err = p.Connect(context.Background()) noerr(t, err) ctx, cancel := context.WithCancel(context.Background()) cancel() _, _, err = p.Get(ctx) if err != context.Canceled { t.Errorf("Expected context cancelled error. got %v; want %v", err, context.Canceled) } ok := p.(*pool).sem.TryAcquire(int64(p.(*pool).capacity)) if !ok { t.Errorf("Canceled context should not leak semaphore permit") } else { p.(*pool).sem.Release(int64(p.(*pool).capacity)) } }) t.Run("Get does not acquire multiple permits", func(t *testing.T) { cleanup := make(chan struct{}) addr := bootstrapConnections(t, 2, func(nc net.Conn) { <-cleanup nc.Close() }) close(cleanup) d := newdialer(&net.Dialer{}) p, err := NewPool(address.Address(addr.String()), 1, 2, WithDialer(func(Dialer) Dialer { return d })) noerr(t, err) err = p.Connect(context.Background()) noerr(t, err) c, _, err := p.Get(context.Background()) noerr(t, err) err = c.Close() noerr(t, err) err = p.Drain() noerr(t, err) c, _, err = p.Get(context.Background()) noerr(t, err) err = c.Close() noerr(t, err) ok := p.(*pool).sem.TryAcquire(int64(p.(*pool).capacity)) if !ok { t.Errorf("Get should not acquire multiple permits (when expired conn in idle pool)") } else { p.(*pool).sem.Release(int64(p.(*pool).capacity)) } }) }) t.Run("Connection", func(t *testing.T) { t.Run("Connection Close Does Not Error After Pool Is Disconnected", func(t *testing.T) { cleanup := make(chan struct{}) defer close(cleanup) addr := bootstrapConnections(t, 3, func(nc net.Conn) { <-cleanup nc.Close() }) d := newdialer(&net.Dialer{}) p, err := NewPool(address.Address(addr.String()), 2, 4, WithDialer(func(Dialer) Dialer { return d })) noerr(t, err) err = p.Connect(context.Background()) noerr(t, err) c1, _, err := p.Get(context.Background()) noerr(t, err) ctx, cancel := context.WithCancel(context.Background()) cancel() err = p.Disconnect(ctx) noerr(t, err) err = c1.Close() if err != nil { t.Errorf("Connection Close should not error after Pool is Disconnected, but got error: %v", err) } }) t.Run("Does not return to pool twice", func(t *testing.T) { cleanup := make(chan struct{}) defer close(cleanup) addr := bootstrapConnections(t, 1, func(nc net.Conn) { <-cleanup nc.Close() }) d := newdialer(&net.Dialer{}) P, err := NewPool(address.Address(addr.String()), 2, 4, WithDialer(func(Dialer) Dialer { return d })) p := P.(*pool) noerr(t, err) err = p.Connect(context.Background()) noerr(t, err) c1, _, err := p.Get(context.Background()) noerr(t, err) if len(p.conns) != 0 { t.Errorf("Should be no connections in pool. got %d; want %d", len(p.conns), 0) } err = c1.Close() noerr(t, err) err = c1.Close() noerr(t, err) if len(p.conns) != 1 { t.Errorf("Should not return connection to pool twice. got %d; want %d", len(p.conns), 1) } }) }) }
modelFunc.go
package model import "github.com/gomodule/redigo/redis" var RedisPool redis.Pool //连接池 //获取图片验证码 func GetImgCode(uuid string)(string,error){ //获取redis链接 conn := RedisPool.Get() //获取数据 return redis.String(conn.Do("get",uuid)) } //存短信验证码 func SaveSmsCode(phone,vcode string)error{ //获取redis链接 conn := RedisPool.Get() //存储验证码 _,err := conn.Do("setex",phone+"_code",60 * 5,vcode) return err } //存储用户名和密码 mysql func SaveUser(mobile,password_hash string)error{ //链接数据库 gorm插入数据 var user User user.Mobile = mobile user.Password_hash = password_hash user.Name = mobile return GlobalDB.Create(&user).Error } //校验短信验证码是否正确 func GetSmsCode(phone string)(string,error){ //获取redis链接 conn := RedisPool.Get() //获取数据 return redis.String(conn.Do("get",phone+"_code")) } //校验登录信息 func CheckU
"password_hash = ?",pwd_hash).Find(&user).Error return user,err }
ser(mobile,pwd_hash string)(User,error){ //连接数据库 var user User err := GlobalDB.Where("mobile = ?",mobile).Where(
committer.go
/* Copyright IBM Corp. All Rights Reserved. SPDX-License-Identifier: Apache-2.0 */ package generic import ( pb "github.com/hyperledger/fabric-protos-go/peer" "github.com/pkg/errors" "github.com/hyperledger-labs/fabric-smart-client/platform/fabric/driver" ) func (c *channel) Status(txid string) (driver.ValidationCode, []string, error) { vc, err := c.vault.Status(txid) if err != nil { return driver.Unknown, nil, err } if c.externalCommitter == nil { return vc, nil, nil } _, dependantTxIDs, _, err := c.externalCommitter.Status(txid) if err != nil { return driver.Unknown, nil, err } if vc == driver.Unknown && len(dependantTxIDs) != 0 { return driver.HasDependencies, dependantTxIDs, nil } return vc, dependantTxIDs, nil } func (c *channel) ProcessNamespace(nss ...string) error { c.processNamespaces = append(c.processNamespaces, nss...) return nil } func (c *channel) GetProcessNamespace() []string { return c.processNamespaces } func (c *channel) DiscardTx(txid string) error { logger.Debugf("Discarding transaction [%s]", txid) vc, deps, err := c.Status(txid) if err != nil { return errors.WithMessagef(err, "failed getting tx's status in state db [%s]", txid) } if vc == driver.Unknown
c.vault.DiscardTx(txid) for _, dep := range deps { c.vault.DiscardTx(dep) } return nil } func (c *channel) CommitTX(txid string, block uint64, indexInBlock int, envelope []byte) (err error) { logger.Debugf("Committing transaction [%s,%d,%d]", txid, block, indexInBlock) defer logger.Debugf("Committing transaction [%s,%d,%d] done [%s]", txid, block, indexInBlock, err) vc, deps, err := c.Status(txid) if err != nil { return errors.WithMessagef(err, "failed getting tx's status in state db [%s]", txid) } switch vc { case driver.Valid: // This should generate a panic logger.Debugf("[%s] is already valid", txid) return errors.Errorf("[%s] is already valid", txid) case driver.Invalid: // This should generate a panic logger.Debugf("[%s] is invalid", txid) return errors.Errorf("[%s] is invalid", txid) case driver.Unknown: return c.commitUnknown(txid, block, indexInBlock) case driver.HasDependencies: return c.commitDeps(txid, block, indexInBlock) case driver.Busy: return c.commit(txid, deps, block, indexInBlock, envelope) default: return errors.Errorf("invalid status code [%d] for [%s]", vc, txid) } } func (c *channel) commitUnknown(txid string, block uint64, indexInBlock int) error { if len(c.processNamespaces) == 0 { // This should be ignored logger.Debugf("[%s] is unknown and will be ignored", txid) return nil } logger.Debugf("[%s] is unknown but will be processed for known namespaces", txid) pt, err := c.GetTransactionByID(txid) if err != nil { return errors.WithMessagef(err, "failed fetching tx [%s]", txid) } if !pt.IsValid() { return errors.Errorf("fetched tx [%s] should have been valid, instead it is [%s]", txid, pb.TxValidationCode_name[pt.ValidationCode()]) } rws, err := c.GetRWSet(txid, pt.Results()) if err != nil { return errors.WithMessagef(err, "failed getting rwset for tx [%s]", txid) } found := false logger.Debugf("[%s] contains namespaces [%v]", txid, rws.Namespaces()) for _, ns := range rws.Namespaces() { for _, pns := range c.processNamespaces { if ns == pns { found = true break } } if found { break } } rws.Done() if !found { logger.Debugf("[%s] no known namespaces found", txid) // nothing to commit return nil } // commit this transaction because it contains one of ne namespace to be processed anyway logger.Debugf("[%s] known namespaces found, commit", txid) return c.commit(txid, nil, block, indexInBlock, nil) } func (c *channel) commit(txid string, deps []string, block uint64, indexInBlock int, envelope []byte) error { logger.Debugf("[%s] is known.", txid) switch { case len(deps) != 0: if err := c.commitExternal(txid, block, indexInBlock); err != nil { return err } default: if err := c.commitLocal(txid, block, indexInBlock, envelope); err != nil { return err } } return nil } func (c *channel) commitDeps(txid string, block uint64, indexInBlock int) error { // This should not generate a panic if the transaction is deemed invalid logger.Debugf("[%s] is unknown but have dependencies, commit as multi-shard pvt", txid) // Validate and commit vc, err := c.externalCommitter.Validate(txid) if err != nil { return errors.WithMessagef(err, "failed validating transaction [%s]", txid) } switch vc { case driver.Valid: if err := c.externalCommitter.CommitTX(txid, block, indexInBlock); err != nil { return errors.WithMessagef(err, "failed committing tx [%s]", txid) } return nil case driver.Invalid: if err := c.externalCommitter.DiscardTX(txid); err != nil { logger.Errorf("failed committing tx [%s] with err [%s]", txid, err) } return nil } return nil } func (c *channel) commitExternal(txid string, block uint64, indexInBlock int) error { logger.Debugf("[%s] Committing as multi-shard pvt.", txid) // Ask for finality _, _, parties, err := c.externalCommitter.Status(txid) if err != nil { return errors.Wrapf(err, "failed getting parties for [%s]", txid) } if err := c.IsFinalForParties(txid, parties...); err != nil { return err } // Validate and commit vc, err := c.externalCommitter.Validate(txid) if err != nil { return errors.WithMessagef(err, "failed validating transaction [%s]", txid) } switch vc { case driver.Valid: if err := c.externalCommitter.CommitTX(txid, block, indexInBlock); err != nil { return errors.WithMessagef(err, "failed committing tx [%s]", txid) } return nil case driver.Invalid: if err := c.externalCommitter.DiscardTX(txid); err != nil { logger.Errorf("failed committing tx [%s] with err [%s]", txid, err) } return nil } return nil } func (c *channel) commitLocal(txid string, block uint64, indexInBlock int, envelope []byte) error { // This is a normal transaction, validated by Fabric. // Commit it cause Fabric says it is valid. logger.Debugf("[%s] Committing", txid) // Match rwsets if envelope is not empty if len(envelope) != 0 { pt, err := newProcessedTransactionFromEnvelopeRaw(envelope) if err != nil { return err } if err := c.vault.Match(txid, pt.Results()); err != nil { return err } } // Post-Processes logger.Debugf("[%s] Post Processes", txid) if err := c.postProcessTx(txid); err != nil { // This should generate a panic return err } // Commit logger.Debugf("[%s] Commit in vault", txid) if err := c.vault.CommitTX(txid, block, indexInBlock); err != nil { // This should generate a panic return err } return nil } func (c *channel) postProcessTx(txid string) error { if err := c.network.ProcessorManager().ProcessByID(c.name, txid); err != nil { // This should generate a panic return err } return nil }
{ return nil }
defaults.go
package defaults import ( "context" "encoding/json" "errors" "fmt" "io/ioutil" "net/http" "strings" "github.com/sirupsen/logrus" coreapi "k8s.io/api/core/v1" kapierrors "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/util/sets" coreclientset "k8s.io/client-go/kubernetes/typed/core/v1" "k8s.io/client-go/rest" ctrlruntimeclient "sigs.k8s.io/controller-runtime/pkg/client" "sigs.k8s.io/yaml" "github.com/openshift/api/image/docker10" imagev1 "github.com/openshift/api/image/v1" templateapi "github.com/openshift/api/template/v1" buildclientset "github.com/openshift/client-go/build/clientset/versioned/typed/build/v1" templateclientset "github.com/openshift/client-go/template/clientset/versioned/typed/template/v1" "github.com/openshift/ci-tools/pkg/api" "github.com/openshift/ci-tools/pkg/lease" "github.com/openshift/ci-tools/pkg/release" "github.com/openshift/ci-tools/pkg/release/candidate" "github.com/openshift/ci-tools/pkg/release/official" "github.com/openshift/ci-tools/pkg/release/prerelease" "github.com/openshift/ci-tools/pkg/results" "github.com/openshift/ci-tools/pkg/secrets" "github.com/openshift/ci-tools/pkg/steps" "github.com/openshift/ci-tools/pkg/steps/clusterinstall" "github.com/openshift/ci-tools/pkg/steps/loggingclient" releasesteps "github.com/openshift/ci-tools/pkg/steps/release" "github.com/openshift/ci-tools/pkg/steps/secretrecordingclient" "github.com/openshift/ci-tools/pkg/steps/utils" ) type inputImageSet map[api.InputImage]struct{} // FromConfig interprets the human-friendly fields in // the release build configuration and generates steps for // them, returning the full set of steps requires for the // build, including defaulted steps, generated steps and // all raw steps that the user provided. func FromConfig( ctx context.Context, config *api.ReleaseBuildConfiguration, jobSpec *api.JobSpec, templates []*templateapi.Template, paramFile string, promote bool, clusterConfig *rest.Config, leaseClient *lease.Client, requiredTargets []string, cloneAuthConfig *steps.CloneAuthConfig, pullSecret, pushSecret *coreapi.Secret, censor *secrets.DynamicCensor, hiveKubeconfig *rest.Config, ) ([]api.Step, []api.Step, error) { crclient, err := ctrlruntimeclient.NewWithWatch(clusterConfig, ctrlruntimeclient.Options{}) crclient = secretrecordingclient.Wrap(crclient, censor) if err != nil { return nil, nil, fmt.Errorf("failed to construct client: %w", err) } client := loggingclient.New(crclient) buildGetter, err := buildclientset.NewForConfig(clusterConfig) if err != nil { return nil, nil, fmt.Errorf("could not get build client for cluster config: %w", err) } buildClient := steps.NewBuildClient(client, buildGetter.RESTClient()) templateGetter, err := templateclientset.NewForConfig(clusterConfig) if err != nil { return nil, nil, fmt.Errorf("could not get template client for cluster config: %w", err) } templateClient := steps.NewTemplateClient(client, templateGetter.RESTClient()) coreGetter, err := coreclientset.NewForConfig(clusterConfig) if err != nil { return nil, nil, fmt.Errorf("could not get core client for cluster config: %w", err) } podClient := steps.NewPodClient(client, clusterConfig, coreGetter.RESTClient()) var hiveClient ctrlruntimeclient.Client if hiveKubeconfig != nil { hiveClient, err = ctrlruntimeclient.New(hiveKubeconfig, ctrlruntimeclient.Options{}) if err != nil { return nil, nil, fmt.Errorf("could not get Hive client for Hive kube config: %w", err) } } return fromConfig(ctx, config, jobSpec, templates, paramFile, promote, client, buildClient, templateClient, podClient, leaseClient, hiveClient, &http.Client{}, requiredTargets, cloneAuthConfig, pullSecret, pushSecret, api.NewDeferredParameters(nil)) } func fromConfig( ctx context.Context, config *api.ReleaseBuildConfiguration, jobSpec *api.JobSpec, templates []*templateapi.Template, paramFile string, promote bool, client loggingclient.LoggingClient, buildClient steps.BuildClient, templateClient steps.TemplateClient, podClient steps.PodClient, leaseClient *lease.Client, hiveClient ctrlruntimeclient.Client, httpClient release.HTTPClient, requiredTargets []string, cloneAuthConfig *steps.CloneAuthConfig, pullSecret, pushSecret *coreapi.Secret, params *api.DeferredParameters, ) ([]api.Step, []api.Step, error) { requiredNames := sets.NewString() for _, target := range requiredTargets { requiredNames.Insert(target) } params.Add("JOB_NAME", func() (string, error) { return jobSpec.Job, nil }) params.Add("JOB_NAME_HASH", func() (string, error) { return jobSpec.JobNameHash(), nil }) params.Add("JOB_NAME_SAFE", func() (string, error) { return strings.Replace(jobSpec.Job, "_", "-", -1), nil }) params.Add("NAMESPACE", func() (string, error) { return jobSpec.Namespace(), nil }) inputImages := make(inputImageSet) var overridableSteps, buildSteps, postSteps []api.Step var imageStepLinks []api.StepLink var hasReleaseStep bool // A pointer to a slice of pointers is weird, but necessary. Since the slice mutates inside of the other functions, // we need to pass the pointer - otherwise we will lose the updates after leaving the function scope. imageConfigs := &[]*api.InputImageTagStepConfiguration{} resolver := rootImageResolver(client, ctx) rawSteps, err := stepConfigsForBuild(config, jobSpec, ioutil.ReadFile, resolver, imageConfigs) if err != nil { return nil, nil, fmt.Errorf("failed to get stepConfigsForBuild: %w", err) } for _, rawStep := range rawSteps { if testStep := rawStep.TestStepConfiguration; testStep != nil { steps, err := stepForTest(config, params, podClient, leaseClient, templateClient, client, hiveClient, jobSpec, inputImages, testStep, imageConfigs) if err != nil { return nil, nil, err } buildSteps = append(buildSteps, steps...) continue } if resolveConfig := rawStep.ResolvedReleaseImagesStepConfiguration; resolveConfig != nil { // we need to expose the release step as 'step' so that it's in the // graph and can be targeted with '--target', but we can't let it get // removed via env-var, since release steps are apparently not subject // to that mechanism ... // // this is a disgusting hack but the simplest implementation until we // factor release steps into something more reusable hasReleaseStep = true var value string if env := utils.ReleaseImageEnv(resolveConfig.Name); params.HasInput(env) { value, err = params.Get(env) if err != nil { return nil, nil, results.ForReason("resolving_release").ForError(fmt.Errorf("failed to get %q parameter: %w", env, err)) } logrus.Infof("Using explicitly provided pull-spec for release %s (%s)", resolveConfig.Name, value) } else { switch { case resolveConfig.Candidate != nil: value, err = candidate.ResolvePullSpec(httpClient, *resolveConfig.Candidate) case resolveConfig.Release != nil: value, _, err = official.ResolvePullSpecAndVersion(httpClient, *resolveConfig.Release) case resolveConfig.Prerelease != nil: value, err = prerelease.ResolvePullSpec(httpClient, *resolveConfig.Prerelease) } if err != nil { return nil, nil, results.ForReason("resolving_release").ForError(fmt.Errorf("failed to resolve release %s: %w", resolveConfig.Name, err)) } logrus.Infof("Resolved release %s to %s", resolveConfig.Name, value) } step := releasesteps.ImportReleaseStep(resolveConfig.Name, value, false, config.Resources, podClient, jobSpec, pullSecret) buildSteps = append(buildSteps, step) addProvidesForStep(step, params) continue } var step api.Step var stepLinks []api.StepLink if rawStep.InputImageTagStepConfiguration != nil { conf := *rawStep.InputImageTagStepConfiguration if _, ok := inputImages[conf.InputImage]; ok { continue } step = steps.InputImageTagStep(&conf, client, jobSpec) inputImages[conf.InputImage] = struct{}{} } else if rawStep.PipelineImageCacheStepConfiguration != nil { step = steps.PipelineImageCacheStep(*rawStep.PipelineImageCacheStepConfiguration, config.Resources, buildClient, jobSpec, pullSecret) } else if rawStep.SourceStepConfiguration != nil { step = steps.SourceStep(*rawStep.SourceStepConfiguration, config.Resources, buildClient, jobSpec, cloneAuthConfig, pullSecret) } else if rawStep.BundleSourceStepConfiguration != nil { step = steps.BundleSourceStep(*rawStep.BundleSourceStepConfiguration, config, config.Resources, buildClient, jobSpec, pullSecret) } else if rawStep.IndexGeneratorStepConfiguration != nil { step = steps.IndexGeneratorStep(*rawStep.IndexGeneratorStepConfiguration, config, config.Resources, buildClient, jobSpec, pullSecret) } else if rawStep.ProjectDirectoryImageBuildStepConfiguration != nil { step = steps.ProjectDirectoryImageBuildStep(*rawStep.ProjectDirectoryImageBuildStepConfiguration, config, config.Resources, podClient, buildClient, jobSpec, pullSecret) } else if rawStep.ProjectDirectoryImageBuildInputs != nil { step = steps.GitSourceStep(*rawStep.ProjectDirectoryImageBuildInputs, config.Resources, buildClient, jobSpec, cloneAuthConfig, pullSecret) } else if rawStep.RPMImageInjectionStepConfiguration != nil { step = steps.RPMImageInjectionStep(*rawStep.RPMImageInjectionStepConfiguration, config.Resources, buildClient, jobSpec, pullSecret) } else if rawStep.RPMServeStepConfiguration != nil { step = steps.RPMServerStep(*rawStep.RPMServeStepConfiguration, client, jobSpec) } else if rawStep.OutputImageTagStepConfiguration != nil { step = steps.OutputImageTagStep(*rawStep.OutputImageTagStepConfiguration, client, jobSpec) // all required or non-optional output images are considered part of [images] if requiredNames.Has(string(rawStep.OutputImageTagStepConfiguration.From)) || !rawStep.OutputImageTagStepConfiguration.Optional { stepLinks = append(stepLinks, step.Creates()...) } } else if rawStep.ReleaseImagesTagStepConfiguration != nil { // if the user has specified a tag_specification we always // will import those images to the stable stream step = releasesteps.ReleaseImagesTagStep(*rawStep.ReleaseImagesTagStepConfiguration, client, params, jobSpec) stepLinks = append(stepLinks, step.Creates()...) hasReleaseStep = true // However, this user may have specified $RELEASE_IMAGE_foo // as well. For backwards compatibility, we explicitly support // 'initial' and 'latest': if not provided, we will build them. // If a pull spec was provided, however, it will be used. for _, name := range []string{api.InitialReleaseName, api.LatestReleaseName} { var releaseStep api.Step envVar := utils.ReleaseImageEnv(name) if params.HasInput(envVar) { pullSpec, err := params.Get(envVar) if err != nil { return nil, nil, results.ForReason("reading_release").ForError(fmt.Errorf("failed to read input release pullSpec %s: %w", name, err)) } logrus.Infof("Resolved release %s to %s", name, pullSpec) releaseStep = releasesteps.ImportReleaseStep(name, pullSpec, true, config.Resources, podClient, jobSpec, pullSecret) } else { releaseStep = releasesteps.AssembleReleaseStep(name, rawStep.ReleaseImagesTagStepConfiguration, config.Resources, podClient, jobSpec) } overridableSteps = append(overridableSteps, releaseStep) addProvidesForStep(releaseStep, params) } } step, ok := checkForFullyQualifiedStep(step, params) if ok { logrus.Infof("Task %s is satisfied by environment variables and will be skipped", step.Name()) } else { imageStepLinks = append(imageStepLinks, stepLinks...) } overridableSteps = append(overridableSteps, step) } for _, template := range templates { step := steps.TemplateExecutionStep(template, params, podClient, templateClient, jobSpec, config.Resources) var hasClusterType, hasUseLease bool for _, p := range template.Parameters { hasClusterType = hasClusterType || p.Name == "CLUSTER_TYPE" hasUseLease = hasUseLease || p.Name == "USE_LEASE_CLIENT" if hasClusterType && hasUseLease { clusterType, err := params.Get("CLUSTER_TYPE") if err != nil { return nil, nil, fmt.Errorf("failed to get \"CLUSTER_TYPE\" parameter: %w", err) } lease, err := api.LeaseTypeFromClusterType(clusterType) if err != nil { return nil, nil, fmt.Errorf("cannot resolve lease type from cluster type: %w", err) } leases := []api.StepLease{{ ResourceType: lease, Env: steps.DefaultLeaseEnv, Count: 1, }} step = steps.LeaseStep(leaseClient, leases, step, jobSpec.Namespace) break } } buildSteps = append(buildSteps, step) addProvidesForStep(step, params) } if len(paramFile) > 0 { step := steps.WriteParametersStep(params, paramFile) buildSteps = append(buildSteps, step) addProvidesForStep(step, params) } if !hasReleaseStep { step := releasesteps.StableImagesTagStep(client, jobSpec) buildSteps = append(buildSteps, step) addProvidesForStep(step, params) } step := steps.ImagesReadyStep(imageStepLinks) buildSteps = append(buildSteps, step) addProvidesForStep(step, params) if promote { if pushSecret == nil { return nil, nil, errors.New("--image-mirror-push-secret is required for promoting images") } if config.PromotionConfiguration == nil { return nil, nil, fmt.Errorf("cannot promote images, no promotion configuration defined") } postSteps = append(postSteps, releasesteps.PromotionStep(config, requiredNames, jobSpec, podClient, pushSecret)) } return append(overridableSteps, buildSteps...), postSteps, nil } // stepForTest creates the appropriate step for each test type. // Test steps are always leaves and often pruned. Each one is given its own // copy of `params` and their values from `Provides` only affect themselves, // thus avoiding conflicts with other tests pre-pruning. func stepForTest( config *api.ReleaseBuildConfiguration, params *api.DeferredParameters, podClient steps.PodClient, leaseClient *lease.Client, templateClient steps.TemplateClient, client loggingclient.LoggingClient, hiveClient ctrlruntimeclient.Client, jobSpec *api.JobSpec, inputImages inputImageSet, c *api.TestStepConfiguration, imageConfigs *[]*api.InputImageTagStepConfiguration, ) ([]api.Step, error) { if test := c.MultiStageTestConfigurationLiteral; test != nil { leases := leasesForTest(test) if len(leases) != 0 { params = api.NewDeferredParameters(params) } step := steps.MultiStageTestStep(*c, config, params, podClient, jobSpec, leases) if len(leases) != 0 { step = steps.LeaseStep(leaseClient, leases, step, jobSpec.Namespace) addProvidesForStep(step, params) } if c.ClusterClaim != nil { step = steps.ClusterClaimStep(c.As, c.ClusterClaim, hiveClient, client, jobSpec, step) } newSteps := stepsForStepImages(client, jobSpec, inputImages, test, imageConfigs) return append([]api.Step{step}, newSteps...), nil } if test := c.OpenshiftInstallerClusterTestConfiguration; test != nil { if !test.Upgrade { return nil, nil } params = api.NewDeferredParameters(params) step, err := clusterinstall.E2ETestStep(*c.OpenshiftInstallerClusterTestConfiguration, *c, params, podClient, templateClient, jobSpec, config.Resources) if err != nil { return nil, fmt.Errorf("unable to create end to end test step: %w", err) } step = steps.LeaseStep(leaseClient, []api.StepLease{{ ResourceType: test.ClusterProfile.LeaseType(), Env: steps.DefaultLeaseEnv, Count: 1, }}, step, jobSpec.Namespace) addProvidesForStep(step, params) return []api.Step{step}, nil } step := steps.TestStep(*c, config.Resources, podClient, jobSpec) if c.ClusterClaim != nil { step = steps.ClusterClaimStep(c.As, c.ClusterClaim, hiveClient, client, jobSpec, step) } return []api.Step{step}, nil } // stepsForStepImages creates steps that import images referenced in test steps. func stepsForStepImages( client loggingclient.LoggingClient, jobSpec *api.JobSpec, inputImages inputImageSet, test *api.MultiStageTestConfigurationLiteral, imageConfigs *[]*api.InputImageTagStepConfiguration, ) (ret []api.Step) { for _, subStep := range append(append(test.Pre, test.Test...), test.Post...) { if link, ok := subStep.FromImageTag(); ok { source := api.ImageStreamSource{SourceType: api.ImageStreamSourceTest, Name: subStep.As} config := api.InputImageTagStepConfiguration{ InputImage: api.InputImage{ BaseImage: *subStep.FromImage, To: link, }, Sources: []api.ImageStreamSource{source}, } // Determine if there are any other steps with the same BaseImage/To. if _, ok := inputImages[config.InputImage]; ok { for _, existingImageConfig := range *imageConfigs { // If the existing step is an image tag step and it has the same image, then add the current step as a // source of that same image if existingImageConfig.Matches(config.InputImage) { existingImageConfig.AddSources(source) } } } else { // This image doesn't already exist, so add it. inputImages[config.InputImage] = struct{}{} step := steps.InputImageTagStep(&config, client, jobSpec) ret = append(ret, step) *imageConfigs = append(*imageConfigs, &config) } } } return } // addProvidesForStep adds any required parameters to the deferred parameters map. // Use this when a step may still need to run even if all parameters are provided // by the caller as environment variables. func addProvidesForStep(step api.Step, params *api.DeferredParameters)
// checkForFullyQualifiedStep if all output parameters of this step are part of the // environment, replace the step with a shim that automatically provides those variables. // Returns true if the step was replaced. func checkForFullyQualifiedStep(step api.Step, params *api.DeferredParameters) (api.Step, bool) { provides := step.Provides() if values, ok := paramsHasAllParametersAsInput(params, provides); ok { step = steps.NewInputEnvironmentStep(step.Name(), values, step.Creates()) for k, v := range values { params.Set(k, v) } return step, true } for name, fn := range provides { params.Add(name, fn) } return step, false } // leasesForTest aggregates all the lease configurations in a test. // It is assumed that they have been validated and contain only valid and // unique values. func leasesForTest(s *api.MultiStageTestConfigurationLiteral) (ret []api.StepLease) { if p := s.ClusterProfile; p != "" { ret = append(ret, api.StepLease{ ResourceType: p.LeaseType(), Env: steps.DefaultLeaseEnv, Count: 1, }) } for _, step := range append(s.Pre, append(s.Test, s.Post...)...) { ret = append(ret, step.Leases...) } ret = append(ret, s.Leases...) return } // rootImageResolver creates a resolver for the root image import step. We attempt to resolve the root image and // the build cache. If we are able to successfully determine that the build cache is up-to-date, we import it as // the root image. func rootImageResolver(client loggingclient.LoggingClient, ctx context.Context) func(root, cache *api.ImageStreamTagReference) (*api.ImageStreamTagReference, error) { return func(root, cache *api.ImageStreamTagReference) (*api.ImageStreamTagReference, error) { logrus.Debugf("Determining if build cache %s can be used in place of root %s\n", cache.ISTagName(), root.ISTagName()) cacheTag := &imagev1.ImageStreamTag{} if err := client.Get(ctx, ctrlruntimeclient.ObjectKey{Namespace: cache.Namespace, Name: fmt.Sprintf("%s:%s", cache.Name, cache.Tag)}, cacheTag); err != nil { if kapierrors.IsNotFound(err) { logrus.Debugf("Build cache %s not found, falling back to %s\n", cache.ISTagName(), root.ISTagName()) // no build cache, use the normal root return root, nil } return nil, fmt.Errorf("could not resolve build cache image stream tag %s: %w", cache.ISTagName(), err) } logrus.Debugf("Resolved build cache %s to %s\n", cache.ISTagName(), cacheTag.Image.Name) metadata := &docker10.DockerImage{} if len(cacheTag.Image.DockerImageMetadata.Raw) == 0 { return nil, fmt.Errorf("could not fetch Docker image metadata build cache %s", cache.ISTagName()) } if err := json.Unmarshal(cacheTag.Image.DockerImageMetadata.Raw, metadata); err != nil { return nil, fmt.Errorf("malformed Docker image metadata on build cache %s: %w", cache.ISTagName(), err) } prior := metadata.Config.Labels[api.ImageVersionLabel(api.PipelineImageStreamTagReferenceRoot)] logrus.Debugf("Build cache %s is based on root image at %s\n", cache.ISTagName(), prior) rootTag := &imagev1.ImageStreamTag{} if err := client.Get(ctx, ctrlruntimeclient.ObjectKey{Namespace: root.Namespace, Name: fmt.Sprintf("%s:%s", root.Name, root.Tag)}, rootTag); err != nil { return nil, fmt.Errorf("could not resolve build root image stream tag %s: %w", root.ISTagName(), err) } logrus.Debugf("Resolved root image %s to %s\n", root.ISTagName(), rootTag.Image.Name) current := rootTag.Image.Name if prior == current { logrus.Debugf("Using build cache %s as root image.\n", cache.ISTagName()) return cache, nil } logrus.Debugf("Using default image %s as root image.\n", root.ISTagName()) return root, nil } } type readFile func(string) ([]byte, error) type resolveRoot func(root, cache *api.ImageStreamTagReference) (*api.ImageStreamTagReference, error) func stepConfigsForBuild( config *api.ReleaseBuildConfiguration, jobSpec *api.JobSpec, readFile readFile, resolveRoot resolveRoot, imageConfigs *[]*api.InputImageTagStepConfiguration) ([]api.StepConfiguration, error) { var buildSteps []api.StepConfiguration if config.InputConfiguration.BaseImages == nil { config.InputConfiguration.BaseImages = make(map[string]api.ImageStreamTagReference) } if config.InputConfiguration.BaseRPMImages == nil { config.InputConfiguration.BaseRPMImages = make(map[string]api.ImageStreamTagReference) } // ensure the "As" field is set to the provided alias. for alias, target := range config.InputConfiguration.BaseImages { target.As = alias config.InputConfiguration.BaseImages[alias] = target } for alias, target := range config.InputConfiguration.BaseRPMImages { target.As = alias config.InputConfiguration.BaseRPMImages[alias] = target } if target := config.InputConfiguration.BuildRootImage; target != nil { if target.FromRepository { istTagRef, err := buildRootImageStreamFromRepository(readFile) if err != nil { return nil, fmt.Errorf("failed to read buildRootImageStream from repository: %w", err) } target.ImageStreamTagReference = istTagRef } if isTagRef := target.ImageStreamTagReference; isTagRef != nil { if config.InputConfiguration.BuildRootImage.UseBuildCache { cache := api.BuildCacheFor(config.Metadata) root, err := resolveRoot(isTagRef, &cache) if err != nil { return nil, fmt.Errorf("could not resolve build root: %w", err) } isTagRef = root } config := api.InputImageTagStepConfiguration{ InputImage: api.InputImage{ BaseImage: *isTagRef, To: api.PipelineImageStreamTagReferenceRoot, }, Sources: []api.ImageStreamSource{{SourceType: api.ImageStreamSourceRoot}}, } buildSteps = append(buildSteps, api.StepConfiguration{ InputImageTagStepConfiguration: &config, }) *imageConfigs = append(*imageConfigs, &config) } else if gitSourceRef := target.ProjectImageBuild; gitSourceRef != nil { buildSteps = append(buildSteps, api.StepConfiguration{ ProjectDirectoryImageBuildInputs: gitSourceRef, }) } } if jobSpec.Refs != nil || len(jobSpec.ExtraRefs) > 0 { step := api.StepConfiguration{SourceStepConfiguration: &api.SourceStepConfiguration{ From: api.PipelineImageStreamTagReferenceRoot, To: api.PipelineImageStreamTagReferenceSource, ClonerefsImage: api.ImageStreamTagReference{ Namespace: "ci", Name: "managed-clonerefs", Tag: "latest", }, ClonerefsPath: "/clonerefs", }} buildSteps = append(buildSteps, step) } if len(config.BinaryBuildCommands) > 0 { buildSteps = append(buildSteps, api.StepConfiguration{PipelineImageCacheStepConfiguration: &api.PipelineImageCacheStepConfiguration{ From: api.PipelineImageStreamTagReferenceSource, To: api.PipelineImageStreamTagReferenceBinaries, Commands: config.BinaryBuildCommands, }}) } if len(config.TestBinaryBuildCommands) > 0 { buildSteps = append(buildSteps, api.StepConfiguration{PipelineImageCacheStepConfiguration: &api.PipelineImageCacheStepConfiguration{ From: api.PipelineImageStreamTagReferenceSource, To: api.PipelineImageStreamTagReferenceTestBinaries, Commands: config.TestBinaryBuildCommands, }}) } if len(config.RpmBuildCommands) > 0 { var from api.PipelineImageStreamTagReference if len(config.BinaryBuildCommands) > 0 { from = api.PipelineImageStreamTagReferenceBinaries } else { from = api.PipelineImageStreamTagReferenceSource } var out string if config.RpmBuildLocation != "" { out = config.RpmBuildLocation } else { out = api.DefaultRPMLocation } buildSteps = append(buildSteps, api.StepConfiguration{PipelineImageCacheStepConfiguration: &api.PipelineImageCacheStepConfiguration{ From: from, To: api.PipelineImageStreamTagReferenceRPMs, Commands: fmt.Sprintf(`%s; ln -s $( pwd )/%s %s`, config.RpmBuildCommands, out, api.RPMServeLocation), }}) buildSteps = append(buildSteps, api.StepConfiguration{RPMServeStepConfiguration: &api.RPMServeStepConfiguration{ From: api.PipelineImageStreamTagReferenceRPMs, }}) } for alias, baseImage := range config.BaseImages { config := api.InputImageTagStepConfiguration{ InputImage: api.InputImage{ BaseImage: defaultImageFromReleaseTag(baseImage, config.ReleaseTagConfiguration), To: api.PipelineImageStreamTagReference(alias), }, Sources: []api.ImageStreamSource{{SourceType: api.ImageStreamSourceBase}}, } buildSteps = append(buildSteps, api.StepConfiguration{InputImageTagStepConfiguration: &config}) *imageConfigs = append(*imageConfigs, &config) } for alias, target := range config.InputConfiguration.BaseRPMImages { intermediateTag := api.PipelineImageStreamTagReference(fmt.Sprintf("%s-without-rpms", alias)) config := api.InputImageTagStepConfiguration{ InputImage: api.InputImage{ BaseImage: defaultImageFromReleaseTag(target, config.ReleaseTagConfiguration), To: intermediateTag, }, Sources: []api.ImageStreamSource{{SourceType: api.ImageStreamSourceBaseRpm}}, } buildSteps = append(buildSteps, api.StepConfiguration{InputImageTagStepConfiguration: &config}) *imageConfigs = append(*imageConfigs, &config) buildSteps = append(buildSteps, api.StepConfiguration{RPMImageInjectionStepConfiguration: &api.RPMImageInjectionStepConfiguration{ From: intermediateTag, To: api.PipelineImageStreamTagReference(alias), }}) } for i := range config.Images { image := &config.Images[i] buildSteps = append(buildSteps, api.StepConfiguration{ProjectDirectoryImageBuildStepConfiguration: image}, api.StepConfiguration{OutputImageTagStepConfiguration: &api.OutputImageTagStepConfiguration{ From: image.To, To: api.ImageStreamTagReference{ Name: api.StableImageStream, Tag: string(image.To), }, Optional: image.Optional, }}) } if config.Operator != nil { // Build a bundle source image that substitutes all values in `substitutions` in all `manifests` directories buildSteps = append(buildSteps, api.StepConfiguration{BundleSourceStepConfiguration: &api.BundleSourceStepConfiguration{ Substitutions: config.Operator.Substitutions, }}) // Build bundles // First build named bundles and corresponding indices // store list of indices for unnamed bundles var unnamedBundles []int for index, bundleConfig := range config.Operator.Bundles { if bundleConfig.As == "" { unnamedBundles = append(unnamedBundles, index) continue } bundle := &api.ProjectDirectoryImageBuildStepConfiguration{ To: api.PipelineImageStreamTagReference(bundleConfig.As), ProjectDirectoryImageBuildInputs: api.ProjectDirectoryImageBuildInputs{ ContextDir: bundleConfig.ContextDir, DockerfilePath: bundleConfig.DockerfilePath, }, } buildSteps = append(buildSteps, api.StepConfiguration{ProjectDirectoryImageBuildStepConfiguration: bundle}) // Build index generator indexName := api.PipelineImageStreamTagReference(api.IndexName(bundleConfig.As)) updateGraph := bundleConfig.UpdateGraph if updateGraph == "" { updateGraph = api.IndexUpdateSemver } buildSteps = append(buildSteps, api.StepConfiguration{IndexGeneratorStepConfiguration: &api.IndexGeneratorStepConfiguration{ To: api.IndexGeneratorName(indexName), OperatorIndex: []string{bundleConfig.As}, BaseIndex: bundleConfig.BaseIndex, UpdateGraph: updateGraph, }}) // Build the index index := &api.ProjectDirectoryImageBuildStepConfiguration{ To: indexName, ProjectDirectoryImageBuildInputs: api.ProjectDirectoryImageBuildInputs{ DockerfilePath: steps.IndexDockerfileName, }, } buildSteps = append(buildSteps, api.StepConfiguration{ProjectDirectoryImageBuildStepConfiguration: index}) } // Build non-named bundles following old naming system var bundles []string for _, bundleIndex := range unnamedBundles { bundle := config.Operator.Bundles[bundleIndex] bundleName := api.BundleName(bundleIndex) bundles = append(bundles, bundleName) image := &api.ProjectDirectoryImageBuildStepConfiguration{ To: api.PipelineImageStreamTagReference(bundleName), ProjectDirectoryImageBuildInputs: api.ProjectDirectoryImageBuildInputs{ ContextDir: bundle.ContextDir, DockerfilePath: bundle.DockerfilePath, }, } buildSteps = append(buildSteps, api.StepConfiguration{ProjectDirectoryImageBuildStepConfiguration: image}) } if len(bundles) > 0 { // Build index generator buildSteps = append(buildSteps, api.StepConfiguration{IndexGeneratorStepConfiguration: &api.IndexGeneratorStepConfiguration{ To: api.PipelineImageStreamTagReferenceIndexImageGenerator, OperatorIndex: bundles, UpdateGraph: api.IndexUpdateSemver, }}) // Build the index image := &api.ProjectDirectoryImageBuildStepConfiguration{ To: api.PipelineImageStreamTagReferenceIndexImage, ProjectDirectoryImageBuildInputs: api.ProjectDirectoryImageBuildInputs{ DockerfilePath: steps.IndexDockerfileName, }, } buildSteps = append(buildSteps, api.StepConfiguration{ProjectDirectoryImageBuildStepConfiguration: image}) } } for i := range config.Tests { test := &config.Tests[i] if test.ContainerTestConfiguration != nil || test.MultiStageTestConfigurationLiteral != nil || (test.OpenshiftInstallerClusterTestConfiguration != nil && test.OpenshiftInstallerClusterTestConfiguration.Upgrade) { if test.Secret != nil { test.Secrets = append(test.Secrets, test.Secret) } buildSteps = append(buildSteps, api.StepConfiguration{TestStepConfiguration: test}) } } if config.ReleaseTagConfiguration != nil { buildSteps = append(buildSteps, api.StepConfiguration{ReleaseImagesTagStepConfiguration: config.ReleaseTagConfiguration}) } for name := range config.Releases { buildSteps = append(buildSteps, api.StepConfiguration{ResolvedReleaseImagesStepConfiguration: &api.ReleaseConfiguration{ Name: name, UnresolvedRelease: config.Releases[name], }}) } buildSteps = append(buildSteps, config.RawSteps...) return buildSteps, nil } func paramsHasAllParametersAsInput(p api.Parameters, params map[string]func() (string, error)) (map[string]string, bool) { if len(params) == 0 { return nil, false } var values map[string]string for k := range params { if !p.HasInput(k) { return nil, false } if values == nil { values = make(map[string]string) } v, err := p.Get(k) if err != nil { return nil, false } values[k] = v } return values, true } func defaultImageFromReleaseTag(base api.ImageStreamTagReference, release *api.ReleaseTagConfiguration) api.ImageStreamTagReference { if release == nil { return base } if len(base.Tag) == 0 || len(base.Name) > 0 || len(base.Namespace) > 0 { return base } base.Name = release.Name base.Namespace = release.Namespace return base } func buildRootImageStreamFromRepository(readFile readFile) (*api.ImageStreamTagReference, error) { data, err := readFile(api.CIOperatorInrepoConfigFileName) if err != nil { return nil, fmt.Errorf("failed to read %s file: %w", api.CIOperatorInrepoConfigFileName, err) } config := api.CIOperatorInrepoConfig{} if err := yaml.Unmarshal(data, &config); err != nil { return nil, fmt.Errorf("failed to unmarshal %s: %w", api.CIOperatorInrepoConfigFileName, err) } return &config.BuildRootImage, nil }
{ for name, fn := range step.Provides() { params.Add(name, fn) } }
app.rs
use sauron_native::{ widget::{attribute::*, event::*, *}, Component, Node, }; pub struct App { click_count: u32, text: String, paragraph_text: String, } #[derive(Debug, Clone)] pub enum Msg { Click, ChangeText(String), Decrement, ParagraphChanged(String), } impl App { pub fn new(count: u32) -> App
} impl Component<Msg> for App { fn update(&mut self, msg: Msg) { match msg { Msg::Click => { self.click_count += 1; eprintln!("button is clicked"); } Msg::Decrement => self.click_count -= 1, Msg::ChangeText(txt) => { self.text = txt; } Msg::ParagraphChanged(txt) => { self.text = txt.to_string(); self.paragraph_text = txt; } } } fn view(&self) -> Node<Msg> { column( vec![], vec![ menu_bar( vec![], vec![ menu_item( vec![], vec![ text_label(vec![value("File")]), menu( vec![], vec![ menu_item( vec![], vec![text_label(vec![value( "Open", )])], ), menu_item( vec![], vec![text_label(vec![value( "Close", )])], ), ], ), ], ), menu_item( vec![], vec![text_label(vec![value("About")])], ), menu_item( vec![], vec![text_label(vec![value("Quit")])], ), ], ), header_bar( vec![], vec![ button(vec![label("Header button1")]), button(vec![label("Header button2")]), ], ), column( vec![], vec![ textarea(vec![ value(self.text.clone()), on_input(|input| { Msg::ParagraphChanged( input .value .as_str() .expect("must be a string") .to_owned(), ) }), ]), button(vec![label("btn1")]), button(vec![label("btn2")]), textarea(vec![value(self.paragraph_text.clone())]), ], ), ], ) } }
{ App { click_count: count, text: String::from("Some text"), paragraph_text: String::from("paragraph text"), } }
ze_generated_example_policies_client_test.go
//go:build go1.18 // +build go1.18 // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. package armdevtestlabs_test import ( "context" "log" "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" "github.com/Azure/azure-sdk-for-go/sdk/azidentity" "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/devtestlabs/armdevtestlabs" ) // Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/devtestlabs/resource-manager/Microsoft.DevTestLab/stable/2018-09-15/examples/Policies_List.json func
() { cred, err := azidentity.NewDefaultAzureCredential(nil) if err != nil { log.Fatalf("failed to obtain a credential: %v", err) } ctx := context.Background() client, err := armdevtestlabs.NewPoliciesClient("{subscriptionId}", cred, nil) if err != nil { log.Fatalf("failed to create client: %v", err) } pager := client.NewListPager("resourceGroupName", "{labName}", "{policySetName}", &armdevtestlabs.PoliciesClientListOptions{Expand: nil, Filter: nil, Top: nil, Orderby: nil, }) for pager.More() { nextResult, err := pager.NextPage(ctx) if err != nil { log.Fatalf("failed to advance page: %v", err) } for _, v := range nextResult.Value { // TODO: use page item _ = v } } } // Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/devtestlabs/resource-manager/Microsoft.DevTestLab/stable/2018-09-15/examples/Policies_Get.json func ExamplePoliciesClient_Get() { cred, err := azidentity.NewDefaultAzureCredential(nil) if err != nil { log.Fatalf("failed to obtain a credential: %v", err) } ctx := context.Background() client, err := armdevtestlabs.NewPoliciesClient("{subscriptionId}", cred, nil) if err != nil { log.Fatalf("failed to create client: %v", err) } res, err := client.Get(ctx, "resourceGroupName", "{labName}", "{policySetName}", "{policyName}", &armdevtestlabs.PoliciesClientGetOptions{Expand: nil}) if err != nil { log.Fatalf("failed to finish the request: %v", err) } // TODO: use response item _ = res } // Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/devtestlabs/resource-manager/Microsoft.DevTestLab/stable/2018-09-15/examples/Policies_CreateOrUpdate.json func ExamplePoliciesClient_CreateOrUpdate() { cred, err := azidentity.NewDefaultAzureCredential(nil) if err != nil { log.Fatalf("failed to obtain a credential: %v", err) } ctx := context.Background() client, err := armdevtestlabs.NewPoliciesClient("{subscriptionId}", cred, nil) if err != nil { log.Fatalf("failed to create client: %v", err) } res, err := client.CreateOrUpdate(ctx, "resourceGroupName", "{labName}", "{policySetName}", "{policyName}", armdevtestlabs.Policy{ Location: to.Ptr("{location}"), Tags: map[string]*string{ "tagName1": to.Ptr("tagValue1"), }, Properties: &armdevtestlabs.PolicyProperties{ Description: to.Ptr("{policyDescription}"), EvaluatorType: to.Ptr(armdevtestlabs.PolicyEvaluatorType("{policyEvaluatorType}")), FactData: to.Ptr("{policyFactData}"), FactName: to.Ptr(armdevtestlabs.PolicyFactName("{policyFactName}")), Status: to.Ptr(armdevtestlabs.PolicyStatus("{policyStatus}")), Threshold: to.Ptr("{policyThreshold}"), }, }, nil) if err != nil { log.Fatalf("failed to finish the request: %v", err) } // TODO: use response item _ = res } // Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/devtestlabs/resource-manager/Microsoft.DevTestLab/stable/2018-09-15/examples/Policies_Delete.json func ExamplePoliciesClient_Delete() { cred, err := azidentity.NewDefaultAzureCredential(nil) if err != nil { log.Fatalf("failed to obtain a credential: %v", err) } ctx := context.Background() client, err := armdevtestlabs.NewPoliciesClient("{subscriptionId}", cred, nil) if err != nil { log.Fatalf("failed to create client: %v", err) } _, err = client.Delete(ctx, "resourceGroupName", "{labName}", "{policySetName}", "{policyName}", nil) if err != nil { log.Fatalf("failed to finish the request: %v", err) } } // Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/devtestlabs/resource-manager/Microsoft.DevTestLab/stable/2018-09-15/examples/Policies_Update.json func ExamplePoliciesClient_Update() { cred, err := azidentity.NewDefaultAzureCredential(nil) if err != nil { log.Fatalf("failed to obtain a credential: %v", err) } ctx := context.Background() client, err := armdevtestlabs.NewPoliciesClient("{subscriptionId}", cred, nil) if err != nil { log.Fatalf("failed to create client: %v", err) } res, err := client.Update(ctx, "resourceGroupName", "{labName}", "{policySetName}", "{policyName}", armdevtestlabs.PolicyFragment{ Tags: map[string]*string{ "tagName1": to.Ptr("tagValue1"), }, }, nil) if err != nil { log.Fatalf("failed to finish the request: %v", err) } // TODO: use response item _ = res }
ExamplePoliciesClient_NewListPager
up-skin-selector.js
/* * Licensed to Jasig under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Jasig licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /** * The SkinSelector component houses the uPortal skin selection functionality and works * with a jQuery UI Dialog configured in the initializeSkinMenu() function, which lives * in the ajax-preferences-jquery.js file. A list of available skins is obtained through * an ajax GET request, which retrieves the skinList.xml file. The skinList.xml file is * parsed and a list of skins is dynamically generated and added to the associated * jQuery UI Dialog. * * ------------------- * Available selectors * ------------------- * * skinList * :Reference to the .skin-list DOM container that houses the list of available skins * dynamically created from parsing the skinList.xml file. * * listItem * :Reference to <li> DOM container with .list-item class name, which is housed under * the <ul class="skin-list"> container. * * skinWidget * :Reference to <div> DOM container with the .fl-widget class name, which is housed under * the <ul class="skin-list"> container. * * skinName * :Reference to <h2> DOM container with the .skin-name class name, which is housed in the * <ul class="skin-list"> container. * * skinThumbnail * :Reference to <img> DOM container with the .skin-thumbnail class name, which is housed * under the <ul class="skin-list"> container. * * ---------------- * Available events * ---------------- * * onSelectSkin * :Fired when a user "clicks" on a skin rendered to end users. * * --------------------------- * Other Configuration Options * --------------------------- * * currentSkin * :Reference to the currently selected skin. * * skinListURL * :URL utilized in ajax GET request to retrieve the skinList.xml file. * * mediaPath * :Reference to src attribute value for skin thumbnails. * * activeSkin * :Reference to .skin-active class name. The .skin-active class name is applied to the * currently selected skin in the list of skins rendered to end users. */ "use strict"; var up = up || {}; (function ($, fluid) { /** * Private. Builds and returns dataModel object based upon parsed * skinList.xml values. * * @param {String} key - references the value of the <skin-key> node, found within the skinList.xml file. * @param {String} name - references the value of the <skin-name> node, found within the skinList.xml file. * @param {String} description - references the value of the <skin-description> node, found within the skinList.xml file. * @param {String} thumbnailPath - references the media path to the skin's thumbnail image. */ var buildDataModel = function (key, name, description, thumbnailPath) { return { key: key, name: name, description: description, thumbnailPath: thumbnailPath, thumbnailAlt: (name + " Thumbnail") }; };//end:function. /** * Private. Build the cutpoints array, which defines renderer IDs * for each HTML element that will be rendered. * * @param {Object} that - reference to up.SkinSelector component. */ var buildCutPoints = function (that) { return [ {id: "listItem-row:", selector: that.options.selectors.listItem}, {id: "skinWrapper", selector: that.options.selectors.skinWrapper},
{id: "skinName", selector: that.options.selectors.skinName}, {id: "skinThumbnail", selector: that.options.selectors.skinThumbnail} ]; };//end:function. /** * Private. Build skin list component subtree. * * @param {Object} that - reference to up.SkinSelector component. */ var buildSkinListTree = function (that) { var treeChildren, skinRows; treeChildren = []; skinRows = fluid.transform(that.state.model, function (obj, index) { return { ID: "listItem-row:", children: [ { ID: "skinWrapper", decorators: [ { type: "addClass", classes: obj.key === that.options.currentSkin ? that.options.activeSkin : "" } ] }, { ID: "skinLink", decorators: [ { type: "jQuery", func: "click", args: function () { var skinList, li, active; // Remove 'skin-active' class from previous. skinList = that.locate("skinList"); skinList.find("." + that.options.activeSkin).removeClass(that.options.activeSkin); // Apply 'skin-active' class to current. li = $(this); li.addClass(that.options.activeSkin); // Fire onSelectSkin event. that.events.onSelectSkin.fire(obj); } } ] }, { ID: "skinName", value: obj.name, decorators: [ { type: "attrs", attributes: { title: obj.name } } ] }, { ID: "skinThumbnail", decorators: [ { type: "attrs", attributes: { style: 'background: url(' + obj.thumbnailPath + ') top left no-repeat;' } } ] } ] }; }); return treeChildren.concat(skinRows); };//end:function. /** * Private. Builds component tree utilized by the fluid.renderer component. * * @param {Object} that - reference to up.SkinSelector component. */ var buildComponentTree = function (that) { return { children: buildSkinListTree(that) }; };//end:function. /** * Private. Configues & executes the fluid.renderer component. * * @param {Object} that - reference to up.SkinSelector component. */ var doRender = function (that) { var skinList, options; skinList = that.locate("skinList"); options = { cutpoints: buildCutPoints(that), model: that.state.model, autoBind: true }; // Run renderer. that.state.templates = fluid.selfRender(skinList, buildComponentTree(that), options); // Highlight active skin. skinList.find('input[value="' + that.options.currentSkin + '"]').parents(".widget").addClass(that.options.activeSkin); };//end:function. /** * Private. Parses the skinList.xml file, which is located in following directory: trunk/uportal-war/src/main/webapp/media/skins/universality * Once parsed, this function extracts meta data for each <skin> defined in the skinList.xml file and constructs a data model through * the buildDataModel() function. Once built, the doRender() function is called, which houses the fluid.renderer implemenentation. * * @param {Object} that - reference to up.SkinSelector component. */ var parseSkinListXML = function (that) { // Obtain skinList.xml. $.ajax({ url: that.options.skinListURL, async: true, dataType: "xml", type: "GET", success: function (xml) { var root, skinNodes, skinList; root = $(xml); skinNodes = root.find("skin"); skinList = that.locate("skinList"); // Parse skinList.xml & construct ui. $.each(skinNodes, function (idx, obj) { var skin, key, name, description, thumbnailPath; skin = $(obj); key = skin.children("skin-key").text(); name = skin.children("skin-name").text(); description = skin.children("skin-description").text(); thumbnailPath = (that.options.mediaPath + "/" + key + "/" + "thumb.gif"); // Build data model. that.state.model.push(buildDataModel(key, name, description, thumbnailPath)); });//end:loop. // Render UI. doRender(that); }, error: function (XMLHttpRequest, textStatus, errorThrown) { if (console) { console.log("AJAX Failure: ", XMLHttpRequest, textStatus, errorThrown); } } }); };//end:function. /** * Private. Entry point for the SkinSelector component. Prepares UI * for SkinSelector component. This function calls the parseSkinListXML() * function and surpresses the submit event for the SkinSelector form. * * @param {Object} that - reference to up.SkinSelector component. */ var initialize = function (that) { var form; // Initialize state map & model array. that.state = {}; that.state.model = []; // Parse skinList.xml parseSkinListXML(that); };//end:function. /** * SkinSelector creator function for the SkinSelector component. * * @param {Object} container - reference to HTML DOM element by ID. * @param {Object} options - reference to object containing all configurations. */ up.SkinSelector = function (container, options) { var that; that = fluid.initView("up.SkinSelector", container, options); /** * Refresh. */ that.refresh = function () { var options; options = { cutpoints: buildCutPoints(that), model: that.state.model, autoBind: true }; fluid.reRender(that.state.templates, that.locate("skinList"), buildComponentTree(that), options); }; // Run initialization. initialize(that); return that; };//end:component. /** * SkinSelector * Defaults function for the SkinSelector component. ---------------------------------*/ fluid.defaults("up.SkinSelector", { selectors: { skinList: ".skins-list", listItem: ".skin", skinWrapper: ".skins-wrapper", skinLink: ".skin-link", skinName: ".skin-titlebar", skinThumbnail: ".skin-thumb" }, events: { onSelectSkin: null }, currentSkin: null, skinListURL: null, mediaPath: null, activeSkin: "selected" }); })(jQuery, fluid);
{id: "skinLink", selector: that.options.selectors.skinLink},
elst.rs
use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt}; #[cfg(feature = "use_serde")] use serde::Serialize;
#[derive(Debug, Clone, PartialEq, Default)] #[cfg_attr(feature = "use_serde", derive(Serialize))] pub struct ElstBox { pub version: u8, pub flags: u32, #[cfg_attr(feature = "use_serde", serde(skip_serializing))] pub entries: Vec<ElstEntry>, } #[derive(Debug, Clone, PartialEq, Default)] #[cfg_attr(feature = "use_serde", derive(Serialize))] pub struct ElstEntry { pub segment_duration: u64, pub media_time: u64, pub media_rate: u16, pub media_rate_fraction: u16, } impl ElstBox { pub fn get_type(&self) -> BoxType { BoxType::ElstBox } pub fn get_size(&self) -> u64 { let mut size = HEADER_SIZE + HEADER_EXT_SIZE + 4; if self.version == 1 { size += self.entries.len() as u64 * 20; } else { assert_eq!(self.version, 0); size += self.entries.len() as u64 * 12; } size } } impl Mp4Box for ElstBox { fn box_type(&self) -> BoxType { return self.get_type(); } fn box_size(&self) -> u64 { return self.get_size(); } #[cfg(feature = "use_serde")] #[cfg(feature = "use_serde")] fn to_json(&self) -> Result<String> { Ok(serde_json::to_string(&self).unwrap()) } fn summary(&self) -> Result<String> { let s = format!("elst_entries={}", self.entries.len()); Ok(s) } } impl<R: Read + Seek> ReadBox<&mut R> for ElstBox { fn read_box(reader: &mut R, size: u64) -> Result<Self> { let start = box_start(reader)?; let (version, flags) = read_box_header_ext(reader)?; let entry_count = reader.read_u32::<BigEndian>()?; let mut entries = Vec::with_capacity(entry_count as usize); for _ in 0..entry_count { let (segment_duration, media_time) = if version == 1 { ( reader.read_u64::<BigEndian>()?, reader.read_u64::<BigEndian>()?, ) } else { ( reader.read_u32::<BigEndian>()? as u64, reader.read_u32::<BigEndian>()? as u64, ) }; let entry = ElstEntry { segment_duration, media_time, media_rate: reader.read_u16::<BigEndian>()?, media_rate_fraction: reader.read_u16::<BigEndian>()?, }; entries.push(entry); } skip_bytes_to(reader, start + size)?; Ok(ElstBox { version, flags, entries, }) } } impl<W: Write> WriteBox<&mut W> for ElstBox { fn write_box(&self, writer: &mut W) -> Result<u64> { let size = self.box_size(); BoxHeader::new(self.box_type(), size).write(writer)?; write_box_header_ext(writer, self.version, self.flags)?; writer.write_u32::<BigEndian>(self.entries.len() as u32)?; for entry in self.entries.iter() { if self.version == 1 { writer.write_u64::<BigEndian>(entry.segment_duration)?; writer.write_u64::<BigEndian>(entry.media_time)?; } else { writer.write_u32::<BigEndian>(entry.segment_duration as u32)?; writer.write_u32::<BigEndian>(entry.media_time as u32)?; } writer.write_u16::<BigEndian>(entry.media_rate)?; writer.write_u16::<BigEndian>(entry.media_rate_fraction)?; } Ok(size) } } #[cfg(test)] mod tests { use super::*; use crate::mp4box::BoxHeader; use std::io::Cursor; #[test] fn test_elst32() { let src_box = ElstBox { version: 0, flags: 0, entries: vec![ElstEntry { segment_duration: 634634, media_time: 0, media_rate: 1, media_rate_fraction: 0, }], }; let mut buf = Vec::new(); src_box.write_box(&mut buf).unwrap(); assert_eq!(buf.len(), src_box.box_size() as usize); let mut reader = Cursor::new(&buf); let header = BoxHeader::read(&mut reader).unwrap(); assert_eq!(header.name, BoxType::ElstBox); assert_eq!(src_box.box_size(), header.size); let dst_box = ElstBox::read_box(&mut reader, header.size).unwrap(); assert_eq!(src_box, dst_box); } #[test] fn test_elst64() { let src_box = ElstBox { version: 1, flags: 0, entries: vec![ElstEntry { segment_duration: 634634, media_time: 0, media_rate: 1, media_rate_fraction: 0, }], }; let mut buf = Vec::new(); src_box.write_box(&mut buf).unwrap(); assert_eq!(buf.len(), src_box.box_size() as usize); let mut reader = Cursor::new(&buf); let header = BoxHeader::read(&mut reader).unwrap(); assert_eq!(header.name, BoxType::ElstBox); assert_eq!(src_box.box_size(), header.size); let dst_box = ElstBox::read_box(&mut reader, header.size).unwrap(); assert_eq!(src_box, dst_box); } }
use std::io::{Read, Seek, Write}; use crate::mp4box::*;
test_workertrial.py
# Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details. """ Tests for L{twisted.trial._dist.workertrial}. """ import errno import sys from io import BytesIO from twisted.protocols.amp import AMP from twisted.test.proto_helpers import StringTransport from twisted.trial._dist import ( _WORKER_AMP_STDIN, _WORKER_AMP_STDOUT, managercommands, workercommands, workertrial, ) from twisted.trial._dist.workertrial import WorkerLogObserver, main from twisted.trial.unittest import TestCase class FakeAMP(AMP): """ A fake amp protocol. """ class WorkerLogObserverTests(TestCase):
class MainTests(TestCase): """ Tests for L{main}. """ def setUp(self): self.readStream = BytesIO() self.writeStream = BytesIO() self.patch( workertrial, "startLoggingWithObserver", self.startLoggingWithObserver ) self.addCleanup(setattr, sys, "argv", sys.argv) sys.argv = ["trial"] def fdopen(self, fd, mode=None): """ Fake C{os.fdopen} implementation which returns C{self.readStream} for the stdin fd and C{self.writeStream} for the stdout fd. """ if fd == _WORKER_AMP_STDIN: self.assertEqual("rb", mode) return self.readStream elif fd == _WORKER_AMP_STDOUT: self.assertEqual("wb", mode) return self.writeStream else: raise AssertionError(f"Unexpected fd {fd!r}") def startLoggingWithObserver(self, emit, setStdout): """ Override C{startLoggingWithObserver} for not starting logging. """ self.assertFalse(setStdout) def test_empty(self): """ If no data is ever written, L{main} exits without writing data out. """ main(self.fdopen) self.assertEqual(b"", self.writeStream.getvalue()) def test_forwardCommand(self): """ L{main} forwards data from its input stream to a L{WorkerProtocol} instance which writes data to the output stream. """ client = FakeAMP() clientTransport = StringTransport() client.makeConnection(clientTransport) client.callRemote(workercommands.Run, testCase="doesntexist") self.readStream = clientTransport.io self.readStream.seek(0, 0) main(self.fdopen) self.assertIn(b"No module named 'doesntexist'", self.writeStream.getvalue()) def test_readInterrupted(self): """ If reading the input stream fails with a C{IOError} with errno C{EINTR}, L{main} ignores it and continues reading. """ excInfos = [] class FakeStream: count = 0 def read(oself, size): oself.count += 1 if oself.count == 1: raise OSError(errno.EINTR) else: excInfos.append(sys.exc_info()) return b"" self.readStream = FakeStream() main(self.fdopen) self.assertEqual(b"", self.writeStream.getvalue()) self.assertEqual([(None, None, None)], excInfos) def test_otherReadError(self): """ L{main} only ignores C{IOError} with C{EINTR} errno: otherwise, the error pops out. """ class FakeStream: count = 0 def read(oself, size): oself.count += 1 if oself.count == 1: raise OSError("Something else") return "" self.readStream = FakeStream() self.assertRaises(IOError, main, self.fdopen)
""" Tests for L{WorkerLogObserver}. """ def test_emit(self): """ L{WorkerLogObserver} forwards data to L{managercommands.TestWrite}. """ calls = [] class FakeClient: def callRemote(self, method, **kwargs): calls.append((method, kwargs)) observer = WorkerLogObserver(FakeClient()) observer.emit({"message": ["Some log"]}) self.assertEqual(calls, [(managercommands.TestWrite, {"out": "Some log"})])
openslr.py
# coding=utf-8 # Copyright 2021 The HuggingFace Datasets Authors and the current dataset script contributor. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ OpenSLR Dataset""" from __future__ import absolute_import, division, print_function import os import re from pathlib import Path import datasets _DATA_URL = "https://openslr.org/resources/{}" _CITATION = """\ SLR35, SLR36: @inproceedings{kjartansson-etal-sltu2018, title = {{Crowd-Sourced Speech Corpora for Javanese, Sundanese, Sinhala, Nepali, and Bangladeshi Bengali}}, author = {Oddur Kjartansson and Supheakmungkol Sarin and Knot Pipatsrisawat and Martin Jansche and Linne Ha}, booktitle = {Proc. The 6th Intl. Workshop on Spoken Language Technologies for Under-Resourced Languages (SLTU)}, year = {2018}, address = {Gurugram, India}, month = aug, pages = {52--55}, URL = {http://dx.doi.org/10.21437/SLTU.2018-11}, } SLR41, SLR42, SLR43, SLR44: @inproceedings{kjartansson-etal-tts-sltu2018, title = {{A Step-by-Step Process for Building TTS Voices Using Open Source Data and Framework for Bangla, Javanese, Khmer, Nepali, Sinhala, and Sundanese}}, author = {Keshan Sodimana and Knot Pipatsrisawat and Linne Ha and Martin Jansche and Oddur Kjartansson and Pasindu De Silva and Supheakmungkol Sarin}, booktitle = {Proc. The 6th Intl. Workshop on Spoken Language Technologies for Under-Resourced Languages (SLTU)}, year = {2018}, address = {Gurugram, India}, month = aug, pages = {66--70}, URL = {http://dx.doi.org/10.21437/SLTU.2018-14} } SLR63, SLR64, SLR65, SLR66, SLR78, SLR79: @inproceedings{he-etal-2020-open, title = {{Open-source Multi-speaker Speech Corpora for Building Gujarati, Kannada, Malayalam, Marathi, Tamil and Telugu Speech Synthesis Systems}}, author = {He, Fei and Chu, Shan-Hui Cathy and Kjartansson, Oddur and Rivera, Clara and Katanova, Anna and Gutkin, Alexander and Demirsahin, Isin and Johny, Cibu and Jansche, Martin and Sarin, Supheakmungkol and Pipatsrisawat, Knot}, booktitle = {Proceedings of The 12th Language Resources and Evaluation Conference (LREC)}, month = may, year = {2020}, address = {Marseille, France}, publisher = {European Language Resources Association (ELRA)}, pages = {6494--6503}, url = {https://www.aclweb.org/anthology/2020.lrec-1.800}, ISBN = "{979-10-95546-34-4}, } SLR69, SLR76, SLR77: @inproceedings{kjartansson-etal-2020-open, title = {{Open-Source High Quality Speech Datasets for Basque, Catalan and Galician}}, author = {Kjartansson, Oddur and Gutkin, Alexander and Butryna, Alena and Demirsahin, Isin and Rivera, Clara}, booktitle = {Proceedings of the 1st Joint Workshop on Spoken Language Technologies for Under-resourced languages (SLTU) and Collaboration and Computing for Under-Resourced Languages (CCURL)}, year = {2020}, pages = {21--27}, month = may, address = {Marseille, France}, publisher = {European Language Resources association (ELRA)}, url = {https://www.aclweb.org/anthology/2020.sltu-1.3}, ISBN = {979-10-95546-35-1}, } SLR71, SLR71, SLR72, SLR73, SLR74, SLR75: @inproceedings{guevara-rukoz-etal-2020-crowdsourcing, title = {{Crowdsourcing Latin American Spanish for Low-Resource Text-to-Speech}}, author = {Guevara-Rukoz, Adriana and Demirsahin, Isin and He, Fei and Chu, Shan-Hui Cathy and Sarin, Supheakmungkol and Pipatsrisawat, Knot and Gutkin, Alexander and Butryna, Alena and Kjartansson, Oddur}, booktitle = {Proceedings of The 12th Language Resources and Evaluation Conference (LREC)}, year = {2020}, month = may, address = {Marseille, France}, publisher = {European Language Resources Association (ELRA)}, url = {https://www.aclweb.org/anthology/2020.lrec-1.801}, pages = {6504--6513}, ISBN = {979-10-95546-34-4}, } SLR80 @inproceedings{oo-etal-2020-burmese, title = {{Burmese Speech Corpus, Finite-State Text Normalization and Pronunciation Grammars with an Application to Text-to-Speech}}, author = {Oo, Yin May and Wattanavekin, Theeraphol and Li, Chenfang and De Silva, Pasindu and Sarin, Supheakmungkol and Pipatsrisawat, Knot and Jansche, Martin and Kjartansson, Oddur and Gutkin, Alexander}, booktitle = {Proceedings of The 12th Language Resources and Evaluation Conference (LREC)}, month = may, year = {2020}, pages = "6328--6339", address = {Marseille, France}, publisher = {European Language Resources Association (ELRA)}, url = {https://www.aclweb.org/anthology/2020.lrec-1.777}, ISBN = {979-10-95546-34-4}, }
SLR86 @inproceedings{gutkin-et-al-yoruba2020, title = {{Developing an Open-Source Corpus of Yoruba Speech}}, author = {Alexander Gutkin and Işın Demirşahin and Oddur Kjartansson and Clara Rivera and Kọ́lá Túbọ̀sún}, booktitle = {Proceedings of Interspeech 2020}, pages = {404--408}, month = {October}, year = {2020}, address = {Shanghai, China}, publisher = {International Speech and Communication Association (ISCA)}, doi = {10.21437/Interspeech.2020-1096}, url = {https://dx.doi.org/10.21437/Interspeech.2020-1096}, } """ _DESCRIPTION = """\ OpenSLR is a site devoted to hosting speech and language resources, such as training corpora for speech recognition, and software related to speech recognition. We intend to be a convenient place for anyone to put resources that they have created, so that they can be downloaded publicly. """ _HOMEPAGE = "https://openslr.org/" _LICENSE = "" _RESOURCES = { "SLR35": { "Language": "Javanese", "LongName": "Large Javanese ASR training data set", "Category": "Speech", "Summary": "Javanese ASR training data set containing ~185K utterances", "Files": [ "asr_javanese_0.zip", "asr_javanese_1.zip", "asr_javanese_2.zip", "asr_javanese_3.zip", "asr_javanese_4.zip", "asr_javanese_5.zip", "asr_javanese_6.zip", "asr_javanese_7.zip", "asr_javanese_8.zip", "asr_javanese_9.zip", "asr_javanese_a.zip", "asr_javanese_b.zip", "asr_javanese_c.zip", "asr_javanese_d.zip", "asr_javanese_e.zip", "asr_javanese_f.zip", ], "IndexFiles": ["asr_javanese/utt_spk_text.tsv"] * 16, "DataDirs": ["asr_javanese/data"] * 16, }, "SLR36": { "Language": "Sundanese", "LongName": "Large Sundanese ASR training data set", "Category": "Speech", "Summary": "Sundanese ASR training data set containing ~220K utterances", "Files": [ "asr_sundanese_0.zip", "asr_sundanese_1.zip", "asr_sundanese_2.zip", "asr_sundanese_3.zip", "asr_sundanese_4.zip", "asr_sundanese_5.zip", "asr_sundanese_6.zip", "asr_sundanese_7.zip", "asr_sundanese_8.zip", "asr_sundanese_9.zip", "asr_sundanese_a.zip", "asr_sundanese_b.zip", "asr_sundanese_c.zip", "asr_sundanese_d.zip", "asr_sundanese_e.zip", "asr_sundanese_f.zip", ], "IndexFiles": ["asr_sundanese/utt_spk_text.tsv"] * 16, "DataDirs": ["asr_sundanese/data"] * 16, }, "SLR41": { "Language": "Javanese", "LongName": "High quality TTS data for Javanese", "Category": "Speech", "Summary": "Multi-speaker TTS data for Javanese (jv-ID)", "Files": ["jv_id_female.zip", "jv_id_male.zip"], "IndexFiles": ["jv_id_female/line_index.tsv", "jv_id_male/line_index.tsv"], "DataDirs": ["jv_id_female/wavs", "jv_id_male/wavs"], }, "SLR42": { "Language": "Khmer", "LongName": "High quality TTS data for Khmer", "Category": "Speech", "Summary": "Multi-speaker TTS data for Khmer (km-KH)", "Files": ["km_kh_male.zip"], "IndexFiles": ["km_kh_male/line_index.tsv"], "DataDirs": ["km_kh_male/wavs"], }, "SLR43": { "Language": "Nepali", "LongName": "High quality TTS data for Nepali", "Category": "Speech", "Summary": "Multi-speaker TTS data for Nepali (ne-NP)", "Files": ["ne_np_female.zip"], "IndexFiles": ["ne_np_female/line_index.tsv"], "DataDirs": ["ne_np_female/wavs"], }, "SLR44": { "Language": "Sundanese", "LongName": "High quality TTS data for Sundanese", "Category": "Speech", "Summary": "Multi-speaker TTS data for Javanese Sundanese (su-ID)", "Files": ["su_id_female.zip", "su_id_male.zip"], "IndexFiles": ["su_id_female/line_index.tsv", "su_id_male/line_index.tsv"], "DataDirs": ["su_id_female/wavs", "su_id_male/wavs"], }, "SLR63": { "Language": "Malayalam", "LongName": "Crowdsourced high-quality Malayalam multi-speaker speech data set", "Category": "Speech", "Summary": "Data set which contains recordings of native speakers of Malayalam", "Files": ["ml_in_female.zip", "ml_in_male.zip"], "IndexFiles": ["line_index.tsv", "line_index.tsv"], "DataDirs": ["", ""], }, "SLR64": { "Language": "Marathi", "LongName": "Crowdsourced high-quality Marathi multi-speaker speech data set", "Category": "Speech", "Summary": "Data set which contains recordings of native speakers of Marathi", "Files": ["mr_in_female.zip"], "IndexFiles": ["line_index.tsv"], "DataDirs": ["", ""], }, "SLR65": { "Language": "Tamil", "LongName": "Crowdsourced high-quality Tamil multi-speaker speech data set", "Category": "Speech", "Summary": "Data set which contains recordings of native speakers of Tamil", "Files": ["ta_in_female.zip", "ta_in_male.zip"], "IndexFiles": ["line_index.tsv", "line_index.tsv"], "DataDirs": ["", ""], }, "SLR66": { "Language": "Telugu", "LongName": "Crowdsourced high-quality Telugu multi-speaker speech data set", "Category": "Speech", "Summary": "Data set which contains recordings of native speakers of Telugu", "Files": ["te_in_female.zip", "te_in_male.zip"], "IndexFiles": ["line_index.tsv", "line_index.tsv"], "DataDirs": ["", ""], }, "SLR69": { "Language": "Catalan", "LongName": "Crowdsourced high-quality Catalan speech data set", "Category": "Speech", "Summary": "Data set which contains recordings of Catalan", "Files": ["ca_es_female.zip", "ca_es_male.zip"], "IndexFiles": ["line_index.tsv", "line_index.tsv"], "DataDirs": ["", ""], }, "SLR70": { "Language": "Nigerian English", "LongName": "Crowdsourced high-quality Nigerian English speech data set", "Category": "Speech", "Summary": "Data set which contains recordings of Nigerian English", "Files": ["en_ng_female.zip", "en_ng_male.zip"], "IndexFiles": ["line_index.tsv", "line_index.tsv"], "DataDirs": ["", ""], }, "SLR71": { "Language": "Chilean Spanish", "LongName": "Crowdsourced high-quality Chilean Spanish speech data set", "Category": "Speech", "Summary": "Data set which contains recordings of Chilean Spanish", "Files": ["es_cl_female.zip", "es_cl_male.zip"], "IndexFiles": ["line_index.tsv", "line_index.tsv"], "DataDirs": ["", ""], }, "SLR72": { "Language": "Columbian Spanish", "LongName": "Crowdsourced high-quality Columbian Spanish speech data set", "Category": "Speech", "Summary": "Data set which contains recordings of Columbian Spanish", "Files": ["es_co_female.zip", "es_co_male.zip"], "IndexFiles": ["line_index.tsv", "line_index.tsv"], "DataDirs": ["", ""], }, "SLR73": { "Language": "Peruvian Spanish", "LongName": "Crowdsourced high-quality Peruvian Spanish speech data set", "Category": "Speech", "Summary": "Data set which contains recordings of Peruvian Spanish", "Files": ["es_pe_female.zip", "es_pe_male.zip"], "IndexFiles": ["line_index.tsv", "line_index.tsv"], "DataDirs": ["", ""], }, "SLR74": { "Language": "Puerto Rico Spanish", "LongName": "Crowdsourced high-quality Puerto Rico Spanish speech data set", "Category": "Speech", "Summary": "Data set which contains recordings of Puerto Rico Spanish", "Files": ["es_pr_female.zip"], "IndexFiles": ["line_index.tsv"], "DataDirs": [""], }, "SLR75": { "Language": "Venezuelan Spanish", "LongName": "Crowdsourced high-quality Venezuelan Spanish speech data set", "Category": "Speech", "Summary": "Data set which contains recordings of Venezuelan Spanish", "Files": ["es_ve_female.zip", "es_ve_male.zip"], "IndexFiles": ["line_index.tsv", "line_index.tsv"], "DataDirs": ["", ""], }, "SLR76": { "Language": "Basque", "LongName": "Crowdsourced high-quality Basque speech data set", "Category": "Speech", "Summary": "Data set which contains recordings of Basque", "Files": ["eu_es_female.zip", "eu_es_male.zip"], "IndexFiles": ["line_index.tsv", "line_index.tsv"], "DataDirs": ["", ""], }, "SLR77": { "Language": "Galician", "LongName": "Crowdsourced high-quality Galician speech data set", "Category": "Speech", "Summary": "Data set which contains recordings of Galician", "Files": ["gl_es_female.zip", "gl_es_male.zip"], "IndexFiles": ["line_index.tsv", "line_index.tsv"], "DataDirs": ["", ""], }, "SLR78": { "Language": "Gujarati", "LongName": "Crowdsourced high-quality Gujarati multi-speaker speech data set", "Category": "Speech", "Summary": "Data set which contains recordings of native speakers of Gujarati", "Files": ["gu_in_female.zip", "gu_in_male.zip"], "IndexFiles": ["line_index.tsv", "line_index.tsv"], "DataDirs": ["", ""], }, "SLR79": { "Language": "Kannada", "LongName": "Crowdsourced high-quality Kannada multi-speaker speech data set", "Category": "Speech", "Summary": "Data set which contains recordings of native speakers of Kannada", "Files": ["kn_in_female.zip", "kn_in_male.zip"], "IndexFiles": ["line_index.tsv", "line_index.tsv"], "DataDirs": ["", ""], }, "SLR80": { "Language": "Burmese", "LongName": "Crowdsourced high-quality Burmese speech data set", "Category": "Speech", "Summary": "Data set which contains recordings of Burmese", "Files": ["my_mm_female.zip"], "IndexFiles": ["line_index.tsv"], "DataDirs": [""], }, "SLR86": { "Language": "Yoruba", "LongName": "Crowdsourced high-quality Yoruba speech data set", "Category": "Speech", "Summary": "Data set which contains recordings of Yoruba", "Files": ["yo_ng_female.zip", "yo_ng_male.zip"], "IndexFiles": ["line_index.tsv", "line_index.tsv"], "DataDirs": ["", ""], }, } class OpenSlrConfig(datasets.BuilderConfig): """BuilderConfig for OpenSlr.""" def __init__(self, name, **kwargs): """ Args: data_dir: `string`, the path to the folder containing the files in the downloaded .tar citation: `string`, citation for the data set url: `string`, url for information about the data set **kwargs: keyword arguments forwarded to super. """ self.language = kwargs.pop("language", None) self.long_name = kwargs.pop("long_name", None) self.category = kwargs.pop("category", None) self.summary = kwargs.pop("summary", None) self.files = kwargs.pop("files", None) self.index_files = kwargs.pop("index_files", None) self.data_dirs = kwargs.pop("data_dirs", None) description = f"Open Speech and Language Resources dataset in {self.language}. Name: {self.name}, Summary: {self.summary}." super(OpenSlrConfig, self).__init__(name=name, description=description, **kwargs) class OpenSlr(datasets.GeneratorBasedBuilder): BUILDER_CONFIGS = [ OpenSlrConfig( name=resource_id, language=_RESOURCES[resource_id]["Language"], long_name=_RESOURCES[resource_id]["LongName"], category=_RESOURCES[resource_id]["Category"], summary=_RESOURCES[resource_id]["Summary"], files=_RESOURCES[resource_id]["Files"], index_files=_RESOURCES[resource_id]["IndexFiles"], data_dirs=_RESOURCES[resource_id]["DataDirs"], ) for resource_id in _RESOURCES.keys() ] def _info(self): features = datasets.Features( { "path": datasets.Value("string"), "sentence": datasets.Value("string"), } ) return datasets.DatasetInfo( description=_DESCRIPTION, features=features, supervised_keys=None, homepage=_HOMEPAGE, license=_LICENSE, citation=_CITATION, ) def _split_generators(self, dl_manager): """Returns SplitGenerators.""" resource_number = self.config.name.replace("SLR", "") urls = [f"{_DATA_URL.format(resource_number)}/{file}" for file in self.config.files] dl_paths = dl_manager.download_and_extract(urls) abs_path_to_indexs = [os.path.join(path, f"{self.config.index_files[i]}") for i, path in enumerate(dl_paths)] abs_path_to_datas = [os.path.join(path, f"{self.config.data_dirs[i]}") for i, path in enumerate(dl_paths)] return [ datasets.SplitGenerator( name=datasets.Split.TRAIN, gen_kwargs={ "path_to_indexs": abs_path_to_indexs, "path_to_datas": abs_path_to_datas, }, ), ] def _generate_examples(self, path_to_indexs, path_to_datas): """ Yields examples. """ counter = -1 if self.config.name in ["SLR35", "SLR36"]: sentence_index = {} for i, path_to_index in enumerate(path_to_indexs): with open(path_to_index, encoding="utf-8") as f: lines = f.readlines() for id_, line in enumerate(lines): field_values = re.split(r"\t\t?", line.strip()) filename, user_id, sentence = field_values sentence_index[filename] = sentence for path_to_data in sorted(Path(path_to_datas[i]).rglob("*.flac")): filename = path_to_data.stem if path_to_data.stem not in sentence_index: continue path = str(path_to_data.resolve()) sentence = sentence_index[filename] counter += 1 yield counter, {"path": path, "sentence": sentence} else: for i, path_to_index in enumerate(path_to_indexs): with open(path_to_index, encoding="utf-8") as f: lines = f.readlines() for id_, line in enumerate(lines): # Following regexs are needed to normalise the lines, since the datasets # are not always consistent and have bugs: line = re.sub(r"\t[^\t]*\t", "\t", line.strip()) field_values = re.split(r"\t\t?", line) if len(field_values) != 2: continue filename, sentence = field_values # set absolute path for audio file path = os.path.join(path_to_datas[i], f"{filename}.wav") counter += 1 yield counter, {"path": path, "sentence": sentence}
samplecontroller_client.go
/* Copyright The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Code generated by client-gen. DO NOT EDIT. package v1alpha1 import ( "k8s.io/apimachinery/pkg/runtime/serializer" "k8s.io/client-go/rest" "k8s.io/sample-controller/pkg/apis/samplecontroller/v1alpha1" "k8s.io/sample-controller/pkg/client/clientset/versioned/scheme" ) type SamplecontrollerV1alpha1Interface interface { RESTClient() rest.Interface FoosGetter } // SamplecontrollerV1alpha1Client is used to interact with features provided by the samplecontroller.k8s.io group. type SamplecontrollerV1alpha1Client struct { restClient rest.Interface } func (c *SamplecontrollerV1alpha1Client) Foos(namespace string) FooInterface { return newFoos(c, namespace) } // NewForConfig creates a new SamplecontrollerV1alpha1Client for the given config. func NewForConfig(c *rest.Config) (*SamplecontrollerV1alpha1Client, error) { config := *c if err := setConfigDefaults(&config); err != nil { return nil, err } client, err := rest.RESTClientFor(&config) if err != nil { return nil, err } return &SamplecontrollerV1alpha1Client{client}, nil } // NewForConfigOrDie creates a new SamplecontrollerV1alpha1Client for the given config and // panics if there is an error in the config. func NewForConfigOrDie(c *rest.Config) *SamplecontrollerV1alpha1Client { client, err := NewForConfig(c) if err != nil
return client } // New creates a new SamplecontrollerV1alpha1Client for the given RESTClient. func New(c rest.Interface) *SamplecontrollerV1alpha1Client { return &SamplecontrollerV1alpha1Client{c} } func setConfigDefaults(config *rest.Config) error { gv := v1alpha1.SchemeGroupVersion config.GroupVersion = &gv config.APIPath = "/apis" config.NegotiatedSerializer = serializer.DirectCodecFactory{CodecFactory: scheme.Codecs} if config.UserAgent == "" { config.UserAgent = rest.DefaultKubernetesUserAgent() } return nil } // RESTClient returns a RESTClient that is used to communicate // with API server by this client implementation. func (c *SamplecontrollerV1alpha1Client) RESTClient() rest.Interface { if c == nil { return nil } return c.restClient }
{ panic(err) }
test_docscrape.py
# -*- encoding:utf-8 -*- from collections import namedtuple from copy import deepcopy import re import textwrap import warnings import jinja2 from numpydoc.numpydoc import update_config from numpydoc.xref import DEFAULT_LINKS from numpydoc.docscrape import ( NumpyDocString, FunctionDoc, ClassDoc, ParseError ) from numpydoc.docscrape_sphinx import (SphinxDocString, SphinxClassDoc, SphinxFunctionDoc, get_doc_object) from pytest import raises as assert_raises from pytest import warns as assert_warns doc_txt = '''\ numpy.multivariate_normal(mean, cov, shape=None, spam=None) Draw values from a multivariate normal distribution with specified mean and covariance. The multivariate normal or Gaussian distribution is a generalisation of the one-dimensional normal distribution to higher dimensions. Parameters ---------- mean : (N,) ndarray Mean of the N-dimensional distribution. .. math:: (1+2+3)/3 cov : (N, N) ndarray Covariance matrix of the distribution. shape : tuple of ints Given a shape of, for example, (m,n,k), m*n*k samples are generated, and packed in an m-by-n-by-k arrangement. Because each sample is N-dimensional, the output shape is (m,n,k,N). Returns ------- out : ndarray The drawn samples, arranged according to `shape`. If the shape given is (m,n,...), then the shape of `out` is (m,n,...,N). In other words, each entry ``out[i,j,...,:]`` is an N-dimensional value drawn from the distribution. list of str This is not a real return value. It exists to test anonymous return values. no_description Other Parameters ---------------- spam : parrot A parrot off its mortal coil. Raises ------ RuntimeError Some error Warns ----- RuntimeWarning Some warning Warnings -------- Certain warnings apply. Notes ----- Instead of specifying the full covariance matrix, popular approximations include: - Spherical covariance (`cov` is a multiple of the identity matrix) - Diagonal covariance (`cov` has non-negative elements only on the diagonal) This geometrical property can be seen in two dimensions by plotting generated data-points: >>> mean = [0,0] >>> cov = [[1,0],[0,100]] # diagonal covariance, points lie on x or y-axis >>> x,y = multivariate_normal(mean,cov,5000).T >>> plt.plot(x,y,'x'); plt.axis('equal'); plt.show() Note that the covariance matrix must be symmetric and non-negative definite. References ---------- .. [1] A. Papoulis, "Probability, Random Variables, and Stochastic Processes," 3rd ed., McGraw-Hill Companies, 1991 .. [2] R.O. Duda, P.E. Hart, and D.G. Stork, "Pattern Classification," 2nd ed., Wiley, 2001. See Also -------- some, other, funcs otherfunc : relationship Examples -------- >>> mean = (1,2) >>> cov = [[1,0],[1,0]] >>> x = multivariate_normal(mean,cov,(3,3)) >>> print(x.shape) (3, 3, 2) The following is probably true, given that 0.6 is roughly twice the standard deviation: >>> print(list((x[0, 0, :] - mean) < 0.6)) [True, True] .. index:: random :refguide: random;distributions, random;gauss ''' doc = NumpyDocString(doc_txt) doc_yields_txt = """ Test generator Yields ------ a : int The number of apples. b : int The number of bananas. int The number of unknowns. """ doc_yields = NumpyDocString(doc_yields_txt) doc_sent_txt = """ Test generator Yields ------ a : int The number of apples. Receives -------- b : int The number of bananas. c : int The number of oranges. """ doc_sent = NumpyDocString(doc_sent_txt) def test_signature(): assert doc['Signature'].startswith('numpy.multivariate_normal(') assert doc['Signature'].endswith('spam=None)') def test_summary(): assert doc['Summary'][0].startswith('Draw values') assert doc['Summary'][-1].endswith('covariance.') def test_extended_summary(): assert doc['Extended Summary'][0].startswith('The multivariate normal') def test_parameters(): assert len(doc['Parameters']) == 3 names = [n for n, _, _ in doc['Parameters']] assert all(a == b for a, b in zip(names, ['mean', 'cov', 'shape'])) arg, arg_type, desc = doc['Parameters'][1] assert arg_type == '(N, N) ndarray' assert desc[0].startswith('Covariance matrix') assert doc['Parameters'][0][-1][-1] == ' (1+2+3)/3' def test_other_parameters(): assert len(doc['Other Parameters']) == 1 assert [n for n, _, _ in doc['Other Parameters']] == ['spam'] arg, arg_type, desc = doc['Other Parameters'][0] assert arg_type == 'parrot' assert desc[0].startswith('A parrot off its mortal coil') def test_returns(): assert len(doc['Returns']) == 3 arg, arg_type, desc = doc['Returns'][0] assert arg == 'out' assert arg_type == 'ndarray' assert desc[0].startswith('The drawn samples') assert desc[-1].endswith('distribution.') arg, arg_type, desc = doc['Returns'][1] assert arg == '' assert arg_type == 'list of str' assert desc[0].startswith('This is not a real') assert desc[-1].endswith('anonymous return values.') arg, arg_type, desc = doc['Returns'][2] assert arg == '' assert arg_type == 'no_description' assert not ''.join(desc).strip() def test_yields(): section = doc_yields['Yields'] assert len(section) == 3 truth = [('a', 'int', 'apples.'), ('b', 'int', 'bananas.'), ('', 'int', 'unknowns.')] for (arg, arg_type, desc), (arg_, arg_type_, end) in zip(section, truth): assert arg == arg_ assert arg_type == arg_type_ assert desc[0].startswith('The number of') assert desc[0].endswith(end)
section = doc_sent['Receives'] assert len(section) == 2 truth = [('b', 'int', 'bananas.'), ('c', 'int', 'oranges.')] for (arg, arg_type, desc), (arg_, arg_type_, end) in zip(section, truth): assert arg == arg_ assert arg_type == arg_type_ assert desc[0].startswith('The number of') assert desc[0].endswith(end) def test_returnyield(): doc_text = """ Test having returns and yields. Returns ------- int The number of apples. Yields ------ a : int The number of apples. b : int The number of bananas. """ assert_raises(ValueError, NumpyDocString, doc_text) def test_section_twice(): doc_text = """ Test having a section Notes twice Notes ----- See the next note for more information Notes ----- That should break... """ assert_raises(ValueError, NumpyDocString, doc_text) # if we have a numpydoc object, we know where the error came from class Dummy: """ Dummy class. Notes ----- First note. Notes ----- Second note. """ def spam(self, a, b): """Spam\n\nSpam spam.""" pass def ham(self, c, d): """Cheese\n\nNo cheese.""" pass def dummy_func(arg): """ Dummy function. Notes ----- First note. Notes ----- Second note. """ try: SphinxClassDoc(Dummy) except ValueError as e: # python 3 version or python 2 version assert ("test_section_twice.<locals>.Dummy" in str(e) or 'test_docscrape.Dummy' in str(e)) try: SphinxFunctionDoc(dummy_func) except ValueError as e: # python 3 version or python 2 version assert ("test_section_twice.<locals>.dummy_func" in str(e) or 'function dummy_func' in str(e)) def test_notes(): assert doc['Notes'][0].startswith('Instead') assert doc['Notes'][-1].endswith('definite.') assert len(doc['Notes']) == 17 def test_references(): assert doc['References'][0].startswith('..') assert doc['References'][-1].endswith('2001.') def test_examples(): assert doc['Examples'][0].startswith('>>>') assert doc['Examples'][-1].endswith('True]') def test_index(): assert doc['index']['default'] == 'random' assert len(doc['index']) == 2 assert len(doc['index']['refguide']) == 2 def _strip_blank_lines(s): "Remove leading, trailing and multiple blank lines" s = re.sub(r'^\s*\n', '', s) s = re.sub(r'\n\s*$', '', s) s = re.sub(r'\n\s*\n', r'\n\n', s) return s def line_by_line_compare(a, b, n_lines=None): a = textwrap.dedent(a) b = textwrap.dedent(b) a = [l.rstrip() for l in _strip_blank_lines(a).split('\n')][:n_lines] b = [l.rstrip() for l in _strip_blank_lines(b).split('\n')][:n_lines] assert len(a) == len(b) for ii, (aa, bb) in enumerate(zip(a, b)): assert aa == bb def test_str(): # doc_txt has the order of Notes and See Also sections flipped. # This should be handled automatically, and so, one thing this test does # is to make sure that See Also precedes Notes in the output. line_by_line_compare(str(doc), """numpy.multivariate_normal(mean, cov, shape=None, spam=None) Draw values from a multivariate normal distribution with specified mean and covariance. The multivariate normal or Gaussian distribution is a generalisation of the one-dimensional normal distribution to higher dimensions. Parameters ---------- mean : (N,) ndarray Mean of the N-dimensional distribution. .. math:: (1+2+3)/3 cov : (N, N) ndarray Covariance matrix of the distribution. shape : tuple of ints Given a shape of, for example, (m,n,k), m*n*k samples are generated, and packed in an m-by-n-by-k arrangement. Because each sample is N-dimensional, the output shape is (m,n,k,N). Returns ------- out : ndarray The drawn samples, arranged according to `shape`. If the shape given is (m,n,...), then the shape of `out` is (m,n,...,N). In other words, each entry ``out[i,j,...,:]`` is an N-dimensional value drawn from the distribution. list of str This is not a real return value. It exists to test anonymous return values. no_description Other Parameters ---------------- spam : parrot A parrot off its mortal coil. Raises ------ RuntimeError Some error Warns ----- RuntimeWarning Some warning Warnings -------- Certain warnings apply. See Also -------- `some`_, `other`_, `funcs`_ .. `otherfunc`_ relationship Notes ----- Instead of specifying the full covariance matrix, popular approximations include: - Spherical covariance (`cov` is a multiple of the identity matrix) - Diagonal covariance (`cov` has non-negative elements only on the diagonal) This geometrical property can be seen in two dimensions by plotting generated data-points: >>> mean = [0,0] >>> cov = [[1,0],[0,100]] # diagonal covariance, points lie on x or y-axis >>> x,y = multivariate_normal(mean,cov,5000).T >>> plt.plot(x,y,'x'); plt.axis('equal'); plt.show() Note that the covariance matrix must be symmetric and non-negative definite. References ---------- .. [1] A. Papoulis, "Probability, Random Variables, and Stochastic Processes," 3rd ed., McGraw-Hill Companies, 1991 .. [2] R.O. Duda, P.E. Hart, and D.G. Stork, "Pattern Classification," 2nd ed., Wiley, 2001. Examples -------- >>> mean = (1,2) >>> cov = [[1,0],[1,0]] >>> x = multivariate_normal(mean,cov,(3,3)) >>> print(x.shape) (3, 3, 2) The following is probably true, given that 0.6 is roughly twice the standard deviation: >>> print(list((x[0, 0, :] - mean) < 0.6)) [True, True] .. index:: random :refguide: random;distributions, random;gauss""") def test_yield_str(): line_by_line_compare(str(doc_yields), """Test generator Yields ------ a : int The number of apples. b : int The number of bananas. int The number of unknowns. """) def test_receives_str(): line_by_line_compare(str(doc_sent), """Test generator Yields ------ a : int The number of apples. Receives -------- b : int The number of bananas. c : int The number of oranges. """) def test_no_index_in_str(): assert "index" not in str(NumpyDocString("""Test idx """)) assert "index" in str(NumpyDocString("""Test idx .. index :: random """)) assert "index" in str(NumpyDocString("""Test idx .. index :: foo """)) def test_sphinx_str(): sphinx_doc = SphinxDocString(doc_txt) line_by_line_compare(str(sphinx_doc), """ .. index:: random single: random;distributions, random;gauss Draw values from a multivariate normal distribution with specified mean and covariance. The multivariate normal or Gaussian distribution is a generalisation of the one-dimensional normal distribution to higher dimensions. :Parameters: **mean** : (N,) ndarray Mean of the N-dimensional distribution. .. math:: (1+2+3)/3 **cov** : (N, N) ndarray Covariance matrix of the distribution. **shape** : tuple of ints Given a shape of, for example, (m,n,k), m*n*k samples are generated, and packed in an m-by-n-by-k arrangement. Because each sample is N-dimensional, the output shape is (m,n,k,N). :Returns: **out** : ndarray The drawn samples, arranged according to `shape`. If the shape given is (m,n,...), then the shape of `out` is (m,n,...,N). In other words, each entry ``out[i,j,...,:]`` is an N-dimensional value drawn from the distribution. list of str This is not a real return value. It exists to test anonymous return values. no_description .. :Other Parameters: **spam** : parrot A parrot off its mortal coil. :Raises: RuntimeError Some error :Warns: RuntimeWarning Some warning .. warning:: Certain warnings apply. .. seealso:: :obj:`some`, :obj:`other`, :obj:`funcs` .. :obj:`otherfunc` relationship .. rubric:: Notes Instead of specifying the full covariance matrix, popular approximations include: - Spherical covariance (`cov` is a multiple of the identity matrix) - Diagonal covariance (`cov` has non-negative elements only on the diagonal) This geometrical property can be seen in two dimensions by plotting generated data-points: >>> mean = [0,0] >>> cov = [[1,0],[0,100]] # diagonal covariance, points lie on x or y-axis >>> x,y = multivariate_normal(mean,cov,5000).T >>> plt.plot(x,y,'x'); plt.axis('equal'); plt.show() Note that the covariance matrix must be symmetric and non-negative definite. .. rubric:: References .. [1] A. Papoulis, "Probability, Random Variables, and Stochastic Processes," 3rd ed., McGraw-Hill Companies, 1991 .. [2] R.O. Duda, P.E. Hart, and D.G. Stork, "Pattern Classification," 2nd ed., Wiley, 2001. .. only:: latex [1]_, [2]_ .. rubric:: Examples >>> mean = (1,2) >>> cov = [[1,0],[1,0]] >>> x = multivariate_normal(mean,cov,(3,3)) >>> print(x.shape) (3, 3, 2) The following is probably true, given that 0.6 is roughly twice the standard deviation: >>> print(list((x[0, 0, :] - mean) < 0.6)) [True, True] """) def test_sphinx_yields_str(): sphinx_doc = SphinxDocString(doc_yields_txt) line_by_line_compare(str(sphinx_doc), """Test generator :Yields: **a** : int The number of apples. **b** : int The number of bananas. int The number of unknowns. """) doc2 = NumpyDocString(""" Returns array of indices of the maximum values of along the given axis. Parameters ---------- a : {array_like} Array to look in. axis : {None, integer} If None, the index is into the flattened array, otherwise along the specified axis""") def test_parameters_without_extended_description(): assert len(doc2['Parameters']) == 2 doc3 = NumpyDocString(""" my_signature(*params, **kwds) Return this and that. """) def test_escape_stars(): signature = str(doc3).split('\n')[0] assert signature == r'my_signature(\*params, \*\*kwds)' def my_func(a, b, **kwargs): pass fdoc = FunctionDoc(func=my_func) assert fdoc['Signature'] == '' doc4 = NumpyDocString( """a.conj() Return an array with all complex-valued elements conjugated.""") def test_empty_extended_summary(): assert doc4['Extended Summary'] == [] doc5 = NumpyDocString( """ a.something() Raises ------ LinAlgException If array is singular. Warns ----- SomeWarning If needed """) def test_raises(): assert len(doc5['Raises']) == 1 param = doc5['Raises'][0] assert param.name == '' assert param.type == 'LinAlgException' assert param.desc == ['If array is singular.'] def test_warns(): assert len(doc5['Warns']) == 1 param = doc5['Warns'][0] assert param.name == '' assert param.type == 'SomeWarning' assert param.desc == ['If needed'] def test_see_also(): doc6 = NumpyDocString( """ z(x,theta) See Also -------- func_a, func_b, func_c func_d : some equivalent func foo.func_e : some other func over multiple lines func_f, func_g, :meth:`func_h`, func_j, func_k func_f1, func_g1, :meth:`func_h1`, func_j1 func_f2, func_g2, :meth:`func_h2`, func_j2 : description of multiple :obj:`baz.obj_q` :obj:`~baz.obj_r` :class:`class_j`: fubar foobar """) assert len(doc6['See Also']) == 10 for funcs, desc in doc6['See Also']: for func, role in funcs: if func in ('func_a', 'func_b', 'func_c', 'func_f', 'func_g', 'func_h', 'func_j', 'func_k', 'baz.obj_q', 'func_f1', 'func_g1', 'func_h1', 'func_j1', '~baz.obj_r'): assert not desc, str([func, desc]) elif func in ('func_f2', 'func_g2', 'func_h2', 'func_j2'): assert desc, str([func, desc]) else: assert desc, str([func, desc]) if func == 'func_h': assert role == 'meth' elif func == 'baz.obj_q' or func == '~baz.obj_r': assert role == 'obj' elif func == 'class_j': assert role == 'class' elif func in ['func_h1', 'func_h2']: assert role == 'meth' else: assert role is None, str([func, role]) if func == 'func_d': assert desc == ['some equivalent func'] elif func == 'foo.func_e': assert desc == ['some other func over', 'multiple lines'] elif func == 'class_j': assert desc == ['fubar', 'foobar'] elif func in ['func_f2', 'func_g2', 'func_h2', 'func_j2']: assert desc == ['description of multiple'], str([desc, ['description of multiple']]) def test_see_also_parse_error(): text = ( """ z(x,theta) See Also -------- :func:`~foo` """) with assert_raises(ParseError) as err: NumpyDocString(text) s1 = str(r":func:`~foo` is not a item name in '\n z(x,theta)\n\n See Also\n --------\n :func:`~foo`\n '") s2 = str(err.value) assert s1 == s2 def test_see_also_print(): class Dummy: """ See Also -------- func_a, func_b func_c : some relationship goes here func_d """ pass s = str(FunctionDoc(Dummy, role='func')) assert(':func:`func_a`, :func:`func_b`' in s) assert(' some relationship' in s) assert(':func:`func_d`' in s) def test_see_also_trailing_comma_warning(): warnings.filterwarnings('error') with assert_warns(Warning, match='Unexpected comma or period after function list at index 43 of line .*'): NumpyDocString( """ z(x,theta) See Also -------- func_f2, func_g2, :meth:`func_h2`, func_j2, : description of multiple :class:`class_j`: fubar foobar """) def test_unknown_section(): doc_text = """ Test having an unknown section Mope ---- This should be ignored and warned about """ class BadSection: """Class with bad section. Nope ---- This class has a nope section. """ pass with warnings.catch_warnings(record=True) as w: warnings.filterwarnings('always', '', UserWarning) NumpyDocString(doc_text) assert len(w) == 1 assert "Unknown section Mope" == str(w[0].message) with warnings.catch_warnings(record=True) as w: warnings.filterwarnings('always', '', UserWarning) SphinxClassDoc(BadSection) assert len(w) == 1 assert('test_docscrape.test_unknown_section.<locals>.BadSection' in str(w[0].message) or 'test_docscrape.BadSection' in str(w[0].message)) doc7 = NumpyDocString(""" Doc starts on second line. """) def test_empty_first_line(): assert doc7['Summary'][0].startswith('Doc starts') def test_no_summary(): str(SphinxDocString(""" Parameters ----------""")) def test_unicode(): doc = SphinxDocString(""" öäöäöäöäöåååå öäöäöäööäååå Parameters ---------- ååå : äää ööö Returns ------- ååå : ööö äää """) assert isinstance(doc['Summary'][0], str) assert doc['Summary'][0] == 'öäöäöäöäöåååå' def test_plot_examples(): cfg = dict(use_plots=True) doc = SphinxDocString(""" Examples -------- >>> import matplotlib.pyplot as plt >>> plt.plot([1,2,3],[4,5,6]) >>> plt.show() """, config=cfg) assert 'plot::' in str(doc), str(doc) doc = SphinxDocString(""" Examples -------- >>> from matplotlib import pyplot as plt >>> plt.plot([1,2,3],[4,5,6]) >>> plt.show() """, config=cfg) assert 'plot::' in str(doc), str(doc) doc = SphinxDocString(""" Examples -------- .. plot:: import matplotlib.pyplot as plt plt.plot([1,2,3],[4,5,6]) plt.show() """, config=cfg) assert str(doc).count('plot::') == 1, str(doc) def test_use_blockquotes(): cfg = dict(use_blockquotes=True) doc = SphinxDocString(""" Parameters ---------- abc : def ghi jkl mno Returns ------- ABC : DEF GHI JKL MNO """, config=cfg) line_by_line_compare(str(doc), ''' :Parameters: **abc** : def ghi **jkl** mno :Returns: **ABC** : DEF GHI JKL MNO ''') def test_class_members(): class Dummy: """ Dummy class. """ def spam(self, a, b): """Spam\n\nSpam spam.""" pass def ham(self, c, d): """Cheese\n\nNo cheese.""" pass @property def spammity(self): """Spammity index""" return 0.95 class Ignorable: """local class, to be ignored""" pass for cls in (ClassDoc, SphinxClassDoc): doc = cls(Dummy, config=dict(show_class_members=False)) assert 'Methods' not in str(doc), (cls, str(doc)) assert 'spam' not in str(doc), (cls, str(doc)) assert 'ham' not in str(doc), (cls, str(doc)) assert 'spammity' not in str(doc), (cls, str(doc)) assert 'Spammity index' not in str(doc), (cls, str(doc)) doc = cls(Dummy, config=dict(show_class_members=True)) assert 'Methods' in str(doc), (cls, str(doc)) assert 'spam' in str(doc), (cls, str(doc)) assert 'ham' in str(doc), (cls, str(doc)) assert 'spammity' in str(doc), (cls, str(doc)) if cls is SphinxClassDoc: assert '.. autosummary::' in str(doc), str(doc) else: assert 'Spammity index' in str(doc), str(doc) class SubDummy(Dummy): """ Subclass of Dummy class. """ def ham(self, c, d): """Cheese\n\nNo cheese.\nOverloaded Dummy.ham""" pass def bar(self, a, b): """Bar\n\nNo bar""" pass for cls in (ClassDoc, SphinxClassDoc): doc = cls(SubDummy, config=dict(show_class_members=True, show_inherited_class_members=False)) assert 'Methods' in str(doc), (cls, str(doc)) assert 'spam' not in str(doc), (cls, str(doc)) assert 'ham' in str(doc), (cls, str(doc)) assert 'bar' in str(doc), (cls, str(doc)) assert 'spammity' not in str(doc), (cls, str(doc)) if cls is SphinxClassDoc: assert '.. autosummary::' in str(doc), str(doc) else: assert 'Spammity index' not in str(doc), str(doc) doc = cls(SubDummy, config=dict(show_class_members=True, show_inherited_class_members=True)) assert 'Methods' in str(doc), (cls, str(doc)) assert 'spam' in str(doc), (cls, str(doc)) assert 'ham' in str(doc), (cls, str(doc)) assert 'bar' in str(doc), (cls, str(doc)) assert 'spammity' in str(doc), (cls, str(doc)) if cls is SphinxClassDoc: assert '.. autosummary::' in str(doc), str(doc) else: assert 'Spammity index' in str(doc), str(doc) def test_duplicate_signature(): # Duplicate function signatures occur e.g. in ufuncs, when the # automatic mechanism adds one, and a more detailed comes from the # docstring itself. doc = NumpyDocString( """ z(x1, x2) z(a, theta) """) assert doc['Signature'].strip() == 'z(a, theta)' class_doc_txt = """ Foo Parameters ---------- f : callable ``f(t, y, *f_args)`` Aaa. jac : callable ``jac(t, y, *jac_args)`` Bbb. Attributes ---------- t : float Current time. y : ndarray Current variable values. * hello * world an_attribute : float The docstring is printed instead no_docstring : str But a description no_docstring2 : str multiline_sentence midword_period no_period Methods ------- a b c Examples -------- For usage examples, see `ode`. """ def test_class_members_doc(): doc = ClassDoc(None, class_doc_txt) line_by_line_compare(str(doc), """ Foo Parameters ---------- f : callable ``f(t, y, *f_args)`` Aaa. jac : callable ``jac(t, y, *jac_args)`` Bbb. Examples -------- For usage examples, see `ode`. Attributes ---------- t : float Current time. y : ndarray Current variable values. * hello * world an_attribute : float The docstring is printed instead no_docstring : str But a description no_docstring2 : str multiline_sentence midword_period no_period Methods ------- a b c """) def test_class_members_doc_sphinx(): class Foo: @property def an_attribute(self): """Test attribute""" return None @property def no_docstring(self): return None @property def no_docstring2(self): return None @property def multiline_sentence(self): """This is a sentence. It spans multiple lines.""" return None @property def midword_period(self): """The sentence for numpy.org.""" return None @property def no_period(self): """This does not have a period so we truncate its summary to the first linebreak Apparently. """ return None doc = SphinxClassDoc(Foo, class_doc_txt) line_by_line_compare(str(doc), """ Foo :Parameters: **f** : callable ``f(t, y, *f_args)`` Aaa. **jac** : callable ``jac(t, y, *jac_args)`` Bbb. .. rubric:: Examples For usage examples, see `ode`. :Attributes: **t** : float Current time. **y** : ndarray Current variable values. * hello * world :obj:`an_attribute <an_attribute>` : float Test attribute **no_docstring** : str But a description **no_docstring2** : str .. :obj:`multiline_sentence <multiline_sentence>` This is a sentence. :obj:`midword_period <midword_period>` The sentence for numpy.org. :obj:`no_period <no_period>` This does not have a period .. rubric:: Methods ===== ========== **a** **b** **c** ===== ========== """) def test_class_attributes_as_member_list(): class Foo: """ Class docstring. Attributes ---------- an_attribute Another description that is not used. """ @property def an_attribute(self): """Test attribute""" return None attr_doc = """:Attributes: :obj:`an_attribute <an_attribute>` Test attribute""" assert attr_doc in str(SphinxClassDoc(Foo)) assert "Another description" not in str(SphinxClassDoc(Foo)) attr_doc2 = """.. rubric:: Attributes .. autosummary:: :toctree: an_attribute""" cfg = dict(attributes_as_param_list=False) assert attr_doc2 in str(SphinxClassDoc(Foo, config=cfg)) assert "Another description" not in str(SphinxClassDoc(Foo, config=cfg)) def test_templated_sections(): doc = SphinxClassDoc(None, class_doc_txt, config={'template': jinja2.Template('{{examples}}\n{{parameters}}')}) line_by_line_compare(str(doc), """ .. rubric:: Examples For usage examples, see `ode`. :Parameters: **f** : callable ``f(t, y, *f_args)`` Aaa. **jac** : callable ``jac(t, y, *jac_args)`` Bbb. """) def test_nonstandard_property(): # test discovery of a property that does not satisfy isinstace(.., property) class SpecialProperty: def __init__(self, axis=0, doc=""): self.axis = axis self.__doc__ = doc def __get__(self, obj, type): if obj is None: # Only instances have actual _data, not classes return self else: return obj._data.axes[self.axis] def __set__(self, obj, value): obj._set_axis(self.axis, value) class Dummy: attr = SpecialProperty(doc="test attribute") doc = get_doc_object(Dummy) assert "test attribute" in str(doc) def test_args_and_kwargs(): cfg = dict() doc = SphinxDocString(""" Parameters ---------- param1 : int First parameter *args : tuple Arguments **kwargs : dict Keyword arguments """, config=cfg) line_by_line_compare(str(doc), r""" :Parameters: **param1** : int First parameter **\*args** : tuple Arguments **\*\*kwargs** : dict Keyword arguments """) def test_autoclass(): cfg=dict(show_class_members=True, show_inherited_class_members=True) doc = SphinxClassDoc(str, ''' A top section before .. autoclass:: str ''', config=cfg) line_by_line_compare(str(doc), r''' A top section before .. autoclass:: str .. rubric:: Methods ''', 5) xref_doc_txt = """ Test xref in Parameters, Other Parameters and Returns Parameters ---------- p1 : int Integer value p2 : float, optional Integer value Other Parameters ---------------- p3 : list[int] List of integers p4 : :class:`pandas.DataFrame` A dataframe p5 : sequence of `int` A sequence Returns ------- out : array Numerical return value """ xref_doc_txt_expected = r""" Test xref in Parameters, Other Parameters and Returns :Parameters: **p1** : :class:`python:int` Integer value **p2** : :class:`python:float`, optional Integer value :Returns: **out** : :obj:`array <numpy.ndarray>` Numerical return value :Other Parameters: **p3** : :class:`python:list`\[:class:`python:int`] List of integers **p4** : :class:`pandas.DataFrame` A dataframe **p5** : :obj:`python:sequence` of `int` A sequence """ def test_xref(): xref_aliases = { 'sequence': ':obj:`python:sequence`', } class Config(): def __init__(self, a, b): self.numpydoc_xref_aliases = a self.numpydoc_xref_aliases_complete = b xref_aliases_complete = deepcopy(DEFAULT_LINKS) for key in xref_aliases: xref_aliases_complete[key] = xref_aliases[key] config = Config(xref_aliases, xref_aliases_complete) app = namedtuple('config', 'config')(config) update_config(app) xref_ignore = {'of', 'default', 'optional'} doc = SphinxDocString( xref_doc_txt, config=dict( xref_param_type=True, xref_aliases=xref_aliases_complete, xref_ignore=xref_ignore ) ) line_by_line_compare(str(doc), xref_doc_txt_expected) if __name__ == "__main__": import pytest pytest.main()
def test_sent():
app.rs
#[cfg(test)] #[path = "test.rs"] mod test; use super::{Opt, Output}; use crate::gen::include::Include; use crate::syntax::IncludeKind; use clap::{App, AppSettings, Arg}; use std::ffi::OsStr; use std::path::PathBuf; const USAGE: &str = "\ cxxbridge <input>.rs Emit .cc file for bridge to stdout cxxbridge <input>.rs --header Emit .h file for bridge to stdout cxxbridge --header Emit \"rust/cxx.h\" header to stdout\ "; const TEMPLATE: &str = "\ {bin} {version} David Tolnay <[email protected]> https://github.com/dtolnay/cxx USAGE: {usage} ARGS: {positionals} OPTIONS: {options}\ "; fn app() -> App<'static> { let mut app = App::new("cxxbridge") .override_usage(USAGE) .help_template(TEMPLATE) .setting(AppSettings::NextLineHelp) .arg(arg_input()) .arg(arg_cxx_impl_annotations()) .arg(arg_header()) .arg(arg_include()) .arg(arg_output()) .mut_arg("help", |a| a.help("Print help information.")); if let Some(version) = option_env!("CARGO_PKG_VERSION") { app = app .version(version) .mut_arg("version", |a| a.help("Print version information.")); } app } const INPUT: &str = "input"; const CXX_IMPL_ANNOTATIONS: &str = "cxx-impl-annotations"; const HEADER: &str = "header"; const INCLUDE: &str = "include"; const OUTPUT: &str = "output"; pub(super) fn from_args() -> Opt { let matches = app().get_matches(); let input = matches.value_of_os(INPUT).map(PathBuf::from); let cxx_impl_annotations = matches.value_of(CXX_IMPL_ANNOTATIONS).map(str::to_owned); let header = matches.is_present(HEADER); let include = matches .values_of(INCLUDE) .unwrap_or_default() .map(|include| { if include.starts_with('<') && include.ends_with('>') { Include { path: include[1..include.len() - 1].to_owned(), kind: IncludeKind::Bracketed, } } else { Include { path: include.to_owned(), kind: IncludeKind::Quoted, } } }) .collect(); let mut outputs = Vec::new(); for path in matches.values_of_os(OUTPUT).unwrap_or_default() { outputs.push(if path == "-" { Output::Stdout } else { Output::File(PathBuf::from(path)) }); } if outputs.is_empty() { outputs.push(Output::Stdout); } Opt { input, header, cxx_impl_annotations, include, outputs, } } fn validate_utf8(arg: &OsStr) -> Result<(), &'static str> { if arg.to_str().is_some() { Ok(()) } else { Err("invalid utf-8 sequence") } } fn arg_input() -> Arg<'static> { Arg::new(INPUT) .help("Input Rust source file containing #[cxx::bridge].") .required_unless_present(HEADER) .allow_invalid_utf8(true) } fn arg_cxx_impl_annotations() -> Arg<'static> { const HELP: &str = "\ Optional annotation for implementations of C++ function wrappers that may be exposed to Rust. You may for example need to provide __declspec(dllexport) or __attribute__((visibility(\"default\"))) if Rust code from one shared object or executable depends on these C++ functions in another."; Arg::new(CXX_IMPL_ANNOTATIONS) .long(CXX_IMPL_ANNOTATIONS) .takes_value(true) .value_name("annotation") .allow_invalid_utf8(true) .validator_os(validate_utf8) .help(HELP) } fn
() -> Arg<'static> { const HELP: &str = "\ Emit header with declarations only. Optional if using `-o` with a path ending in `.h`."; Arg::new(HEADER).long(HEADER).help(HELP) } fn arg_include() -> Arg<'static> { const HELP: &str = "\ Any additional headers to #include. The cxxbridge tool does not parse or even require the given paths to exist; they simply go into the generated C++ code as #include lines."; Arg::new(INCLUDE) .long(INCLUDE) .short('i') .takes_value(true) .multiple_occurrences(true) .allow_invalid_utf8(true) .validator_os(validate_utf8) .help(HELP) } fn arg_output() -> Arg<'static> { const HELP: &str = "\ Path of file to write as output. Output goes to stdout if -o is not specified."; Arg::new(OUTPUT) .long(OUTPUT) .short('o') .takes_value(true) .multiple_occurrences(true) .allow_invalid_utf8(true) .validator_os(validate_utf8) .help(HELP) }
arg_header
intenclr.rs
#[doc = "Register `INTENCLR` reader"] pub struct R(crate::R<INTENCLR_SPEC>); impl core::ops::Deref for R { type Target = crate::R<INTENCLR_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl From<crate::R<INTENCLR_SPEC>> for R { #[inline(always)] fn from(reader: crate::R<INTENCLR_SPEC>) -> Self { R(reader) } } #[doc = "Register `INTENCLR` writer"] pub struct W(crate::W<INTENCLR_SPEC>); impl core::ops::Deref for W { type Target = crate::W<INTENCLR_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::ops::DerefMut for W { #[inline(always)] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl From<crate::W<INTENCLR_SPEC>> for W { #[inline(always)] fn from(writer: crate::W<INTENCLR_SPEC>) -> Self { W(writer) } } #[doc = "Disable interrupt on COMPARE\\[0\\]\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum COMPARE0_A { #[doc = "0: Interrupt disabled."] DISABLED = 0, #[doc = "1: Interrupt enabled."] ENABLED = 1, } impl From<COMPARE0_A> for bool { #[inline(always)] fn from(variant: COMPARE0_A) -> Self { variant as u8 != 0 } } #[doc = "Field `COMPARE0` reader - Disable interrupt on COMPARE\\[0\\]"] pub struct COMPARE0_R(crate::FieldReader<bool, COMPARE0_A>); impl COMPARE0_R { #[inline(always)] pub(crate) fn new(bits: bool) -> Self { COMPARE0_R(crate::FieldReader::new(bits)) } #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> COMPARE0_A { match self.bits { false => COMPARE0_A::DISABLED, true => COMPARE0_A::ENABLED, } } #[doc = "Checks if the value of the field is `DISABLED`"] #[inline(always)] pub fn is_disabled(&self) -> bool { **self == COMPARE0_A::DISABLED } #[doc = "Checks if the value of the field is `ENABLED`"] #[inline(always)] pub fn is_enabled(&self) -> bool { **self == COMPARE0_A::ENABLED } } impl core::ops::Deref for COMPARE0_R { type Target = crate::FieldReader<bool, COMPARE0_A>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Disable interrupt on COMPARE\\[0\\]\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum COMPARE0_AW { #[doc = "1: Disable interrupt on write."] CLEAR = 1, } impl From<COMPARE0_AW> for bool { #[inline(always)] fn from(variant: COMPARE0_AW) -> Self { variant as u8 != 0 } } #[doc = "Field `COMPARE0` writer - Disable interrupt on COMPARE\\[0\\]"] pub struct COMPARE0_W<'a> { w: &'a mut W, } impl<'a> COMPARE0_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: COMPARE0_AW) -> &'a mut W { self.bit(variant.into()) } #[doc = "Disable interrupt on write."] #[inline(always)] pub fn clear(self) -> &'a mut W { self.variant(COMPARE0_AW::CLEAR) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 16)) | ((value as u32 & 0x01) << 16); self.w } } #[doc = "Disable interrupt on COMPARE\\[1\\]\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum COMPARE1_A { #[doc = "0: Interrupt disabled."] DISABLED = 0, #[doc = "1: Interrupt enabled."] ENABLED = 1, } impl From<COMPARE1_A> for bool { #[inline(always)] fn from(variant: COMPARE1_A) -> Self { variant as u8 != 0 } } #[doc = "Field `COMPARE1` reader - Disable interrupt on COMPARE\\[1\\]"] pub struct COMPARE1_R(crate::FieldReader<bool, COMPARE1_A>); impl COMPARE1_R { #[inline(always)] pub(crate) fn new(bits: bool) -> Self { COMPARE1_R(crate::FieldReader::new(bits)) } #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> COMPARE1_A { match self.bits { false => COMPARE1_A::DISABLED, true => COMPARE1_A::ENABLED, } } #[doc = "Checks if the value of the field is `DISABLED`"] #[inline(always)] pub fn is_disabled(&self) -> bool { **self == COMPARE1_A::DISABLED } #[doc = "Checks if the value of the field is `ENABLED`"] #[inline(always)] pub fn is_enabled(&self) -> bool { **self == COMPARE1_A::ENABLED } } impl core::ops::Deref for COMPARE1_R { type Target = crate::FieldReader<bool, COMPARE1_A>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Disable interrupt on COMPARE\\[1\\]\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum COMPARE1_AW { #[doc = "1: Disable interrupt on write."] CLEAR = 1, } impl From<COMPARE1_AW> for bool { #[inline(always)] fn from(variant: COMPARE1_AW) -> Self { variant as u8 != 0 } } #[doc = "Field `COMPARE1` writer - Disable interrupt on COMPARE\\[1\\]"] pub struct COMPARE1_W<'a> { w: &'a mut W, } impl<'a> COMPARE1_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: COMPARE1_AW) -> &'a mut W { self.bit(variant.into()) } #[doc = "Disable interrupt on write."] #[inline(always)] pub fn clear(self) -> &'a mut W { self.variant(COMPARE1_AW::CLEAR) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 17)) | ((value as u32 & 0x01) << 17); self.w } } #[doc = "Disable interrupt on COMPARE\\[2\\]\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum COMPARE2_A { #[doc = "0: Interrupt disabled."] DISABLED = 0, #[doc = "1: Interrupt enabled."] ENABLED = 1, } impl From<COMPARE2_A> for bool { #[inline(always)] fn from(variant: COMPARE2_A) -> Self { variant as u8 != 0 } } #[doc = "Field `COMPARE2` reader - Disable interrupt on COMPARE\\[2\\]"] pub struct COMPARE2_R(crate::FieldReader<bool, COMPARE2_A>); impl COMPARE2_R { #[inline(always)] pub(crate) fn new(bits: bool) -> Self { COMPARE2_R(crate::FieldReader::new(bits)) } #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> COMPARE2_A { match self.bits { false => COMPARE2_A::DISABLED, true => COMPARE2_A::ENABLED, } } #[doc = "Checks if the value of the field is `DISABLED`"] #[inline(always)] pub fn is_disabled(&self) -> bool { **self == COMPARE2_A::DISABLED } #[doc = "Checks if the value of the field is `ENABLED`"] #[inline(always)] pub fn is_enabled(&self) -> bool { **self == COMPARE2_A::ENABLED } } impl core::ops::Deref for COMPARE2_R { type Target = crate::FieldReader<bool, COMPARE2_A>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Disable interrupt on COMPARE\\[2\\]\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum COMPARE2_AW { #[doc = "1: Disable interrupt on write."] CLEAR = 1, } impl From<COMPARE2_AW> for bool { #[inline(always)] fn from(variant: COMPARE2_AW) -> Self { variant as u8 != 0 } } #[doc = "Field `COMPARE2` writer - Disable interrupt on COMPARE\\[2\\]"] pub struct COMPARE2_W<'a> { w: &'a mut W, } impl<'a> COMPARE2_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: COMPARE2_AW) -> &'a mut W { self.bit(variant.into()) } #[doc = "Disable interrupt on write."] #[inline(always)] pub fn clear(self) -> &'a mut W { self.variant(COMPARE2_AW::CLEAR) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 18)) | ((value as u32 & 0x01) << 18); self.w } } #[doc = "Disable interrupt on COMPARE\\[3\\]\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum COMPARE3_A { #[doc = "0: Interrupt disabled."] DISABLED = 0, #[doc = "1: Interrupt enabled."] ENABLED = 1, } impl From<COMPARE3_A> for bool { #[inline(always)] fn from(variant: COMPARE3_A) -> Self { variant as u8 != 0 } } #[doc = "Field `COMPARE3` reader - Disable interrupt on COMPARE\\[3\\]"] pub struct COMPARE3_R(crate::FieldReader<bool, COMPARE3_A>); impl COMPARE3_R { #[inline(always)] pub(crate) fn new(bits: bool) -> Self { COMPARE3_R(crate::FieldReader::new(bits)) } #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> COMPARE3_A { match self.bits { false => COMPARE3_A::DISABLED, true => COMPARE3_A::ENABLED, } } #[doc = "Checks if the value of the field is `DISABLED`"] #[inline(always)] pub fn is_disabled(&self) -> bool { **self == COMPARE3_A::DISABLED } #[doc = "Checks if the value of the field is `ENABLED`"] #[inline(always)] pub fn is_enabled(&self) -> bool { **self == COMPARE3_A::ENABLED } } impl core::ops::Deref for COMPARE3_R { type Target = crate::FieldReader<bool, COMPARE3_A>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Disable interrupt on COMPARE\\[3\\]\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum COMPARE3_AW { #[doc = "1: Disable interrupt on write."] CLEAR = 1, } impl From<COMPARE3_AW> for bool { #[inline(always)] fn from(variant: COMPARE3_AW) -> Self { variant as u8 != 0 } } #[doc = "Field `COMPARE3` writer - Disable interrupt on COMPARE\\[3\\]"] pub struct COMPARE3_W<'a> { w: &'a mut W, } impl<'a> COMPARE3_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: COMPARE3_AW) -> &'a mut W { self.bit(variant.into()) } #[doc = "Disable interrupt on write."] #[inline(always)] pub fn clear(self) -> &'a mut W { self.variant(COMPARE3_AW::CLEAR) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 19)) | ((value as u32 & 0x01) << 19); self.w } } impl R { #[doc = "Bit 16 - Disable interrupt on COMPARE\\[0\\]"] #[inline(always)] pub fn compare0(&self) -> COMPARE0_R { COMPARE0_R::new(((self.bits >> 16) & 0x01) != 0) } #[doc = "Bit 17 - Disable interrupt on COMPARE\\[1\\]"] #[inline(always)] pub fn compare1(&self) -> COMPARE1_R { COMPARE1_R::new(((self.bits >> 17) & 0x01) != 0) } #[doc = "Bit 18 - Disable interrupt on COMPARE\\[2\\]"] #[inline(always)] pub fn compare2(&self) -> COMPARE2_R { COMPARE2_R::new(((self.bits >> 18) & 0x01) != 0) } #[doc = "Bit 19 - Disable interrupt on COMPARE\\[3\\]"] #[inline(always)] pub fn compare3(&self) -> COMPARE3_R { COMPARE3_R::new(((self.bits >> 19) & 0x01) != 0) } } impl W { #[doc = "Bit 16 - Disable interrupt on COMPARE\\[0\\]"] #[inline(always)] pub fn compare0(&mut self) -> COMPARE0_W { COMPARE0_W { w: self } } #[doc = "Bit 17 - Disable interrupt on COMPARE\\[1\\]"] #[inline(always)] pub fn compare1(&mut self) -> COMPARE1_W { COMPARE1_W { w: self } } #[doc = "Bit 18 - Disable interrupt on COMPARE\\[2\\]"] #[inline(always)] pub fn compare2(&mut self) -> COMPARE2_W { COMPARE2_W { w: self } } #[doc = "Bit 19 - Disable interrupt on COMPARE\\[3\\]"] #[inline(always)] pub fn compare3(&mut self) -> COMPARE3_W { COMPARE3_W { w: self } } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn
(&mut self, bits: u32) -> &mut Self { self.0.bits(bits); self } } #[doc = "Interrupt enable clear register.\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [intenclr](index.html) module"] pub struct INTENCLR_SPEC; impl crate::RegisterSpec for INTENCLR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [intenclr::R](R) reader structure"] impl crate::Readable for INTENCLR_SPEC { type Reader = R; } #[doc = "`write(|w| ..)` method takes [intenclr::W](W) writer structure"] impl crate::Writable for INTENCLR_SPEC { type Writer = W; } #[doc = "`reset()` method sets INTENCLR to value 0"] impl crate::Resettable for INTENCLR_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
bits
FiChevronRight.js
// THIS FILE IS AUTO GENERATED var GenIcon = require('../lib').GenIcon module.exports.FiChevronRight = function FiChevronRight (props) {
return GenIcon({"tag":"svg","attr":{"viewBox":"0 0 24 24","fill":"none","stroke":"currentColor","strokeWidth":"2","strokeLinecap":"round","strokeLinejoin":"round"},"child":[{"tag":"polyline","attr":{"points":"9 18 15 12 9 6"}}]})(props); };
client.py
""" Copyright 2016-present Nike, Inc. Licensed under the Apache License, Version 2.0 (the "License"); You may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and* limitations under the License.* """ # For python 2.7 from __future__ import print_function import requests from .aws_auth import AWSAuth from .user_auth import UserAuth from . import CerberusClientException, CLIENT_VERSION from .util import throw_if_bad_response, get_with_retry, post_with_retry, put_with_retry, delete_with_retry, head_with_retry import ast import json import logging import sys import warnings import os logger = logging.getLogger(__name__) class CerberusClient(object): """ Cerberus Python Client for interacting with Cerberus APIs. Authentication is done via the Auth Classes""" HEADERS = {'Content-Type': 'application/json'} def __init__(self, cerberus_url, username=None, password=None, region='us-west-2', token=None, aws_session=None, verbose=None): """ Username and password are optional, they are not needed for IAM Role Auth. If aws_session is set with a botocore.session.Session object, the Cerberus client will sign the request using the session provided instead of the default session. verbose (default True) controls if the cerberus library will output some debuging statements to the console (sys.stderr). """ self.cerberus_url = cerberus_url self.username = username or "" self.password = password or "" self.region = region self.token = token if verbose is None or type(verbose) != bool: self.verbose = True else: self.verbose = verbose self.aws_session = aws_session if self.token is None: self._set_token() self.HEADERS['X-Cerberus-Token'] = self.token self.HEADERS['X-Cerberus-Client'] = 'CerberusPythonClient/' + CLIENT_VERSION def _add_slash(self, string=None): """ if a string doesn't end in a '/' add one """ if(not str.endswith(string, '/')): return str.join('', [string, '/']) return str(string) def _set_token(self): """Set the Cerberus token based on auth type""" try: self.token = os.environ['CERBERUS_TOKEN'] if self.verbose: print("Overriding Cerberus token with environment variable.", file=sys.stderr) logger.info("Overriding Cerberus token with environment variable.") return except: pass if self.username: ua = UserAuth(self.cerberus_url, self.username, self.password) self.token = ua.get_token() else: awsa = AWSAuth(self.cerberus_url, region=self.region, aws_session=self.aws_session, verbose=self.verbose) self.token = awsa.get_token() def get_token(self): """Return a client token from Cerberus""" return self.token def get_roles(self): """Return all the roles (IAM or User Groups) that can be granted to a safe deposit box. Roles are permission levels that are granted to IAM or User Groups. Associating the id for the write role would allow that IAM or User Group to write in the safe deposit box.""" roles_resp = get_with_retry(self.cerberus_url + '/v1/role', headers=self.HEADERS) throw_if_bad_response(roles_resp) return roles_resp.json() def get_role(self, key): """Return id of named role.""" json_resp = self.get_roles() for item in json_resp: if key in item["name"]: return item["id"] raise CerberusClientException("Key '%s' not found" % key) def list_roles(self): """Simplified version of get_roles that returns a dict of just name: id for the roles""" json_resp = self.get_roles() temp_dict = {} for item in json_resp: temp_dict[item["name"]] = item["id"] return temp_dict def get_categories(self): """ Return a list of categories that a safe deposit box can belong to""" sdb_resp = get_with_retry(self.cerberus_url + '/v1/category', headers=self.HEADERS) throw_if_bad_response(sdb_resp) return sdb_resp.json() def create_sdb(self, name, category_id, owner, description="", user_group_permissions=None, iam_principal_permissions=None): """Create a safe deposit box. You need to refresh your token before the iam role is granted permission to the new safe deposit box. Keyword arguments: name (string) -- name of the safe deposit box category_id (string) -- category id that determines where to store the sdb. (ex: shared, applications) owner (string) -- AD group that owns the safe deposit box description (string) -- Description of the safe deposit box user_group_permissions (list) -- list of dictionaries containing the key name and maybe role_id iam_principal_permissions (list) -- list of dictionaries containing the key name iam_principal_arn and role_id """ # Do some sanity checking if user_group_permissions is None: user_group_permissions = [] if iam_principal_permissions is None: iam_principal_permissions = [] if list != type(user_group_permissions): raise(TypeError('Expected list, but got ' + str(type(user_group_permissions)))) if list != type(iam_principal_permissions): raise(TypeError('Expected list, but got ' + str(type(iam_principal_permissions)))) temp_data = { "name": name, "description": description, "category_id": category_id, "owner": owner, } if len(user_group_permissions) > 0: temp_data["user_group_permissions"] = user_group_permissions if len(iam_principal_permissions) > 0: temp_data["iam_principal_permissions"] = iam_principal_permissions data = json.encoder.JSONEncoder().encode(temp_data) sdb_resp = post_with_retry(self.cerberus_url + '/v2/safe-deposit-box', data=str(data), headers=self.HEADERS) throw_if_bad_response(sdb_resp) return sdb_resp.json() def delete_sdb(self, sdb_id): """ Delete a safe deposit box specified by id Keyword arguments: sdb_id -- this is the id of the safe deposit box, not the path.""" sdb_resp = delete_with_retry(self.cerberus_url + '/v2/safe-deposit-box/' + sdb_id, headers=self.HEADERS) throw_if_bad_response(sdb_resp) return sdb_resp def get_sdbs(self): """ Return a list of each SDB the client is authorized to view""" sdb_resp = get_with_retry(self.cerberus_url + '/v2/safe-deposit-box', headers=self.HEADERS) throw_if_bad_response(sdb_resp) return sdb_resp.json() def get_sdb_path(self, sdb): """Return the path for a SDB""" sdb_id = self.get_sdb_id(sdb) sdb_resp = get_with_retry( self.cerberus_url + '/v1/safe-deposit-box/' + sdb_id + '/', headers=self.HEADERS ) throw_if_bad_response(sdb_resp) return sdb_resp.json()['path'] def get_sdb_keys(self, path): """Return the keys for a SDB, which are need for the full secure data path""" list_resp = get_with_retry( self.cerberus_url + '/v1/secret/' + path + '/?list=true', headers=self.HEADERS ) throw_if_bad_response(list_resp) return list_resp.json()['data']['keys'] def get_sdb_id(self, sdb): """ Return the ID for the given safe deposit box. Keyword arguments: sdb -- This is the name of the safe deposit box, not the path""" json_resp = self.get_sdbs() for r in json_resp: if r['name'] == sdb: return str(r['id']) # If we haven't returned yet then we didn't find what we were # looking for. raise CerberusClientException("'%s' not found" % sdb) def get_sdb_id_by_path(self, sdb_path): """ Given the path, return the ID for the given safe deposit box.""" json_resp = self.get_sdbs() # Deal with the supplied path possibly missing an ending slash path = self._add_slash(sdb_path) for r in json_resp: if r['path'] == path: return str(r['id']) # If we haven't returned yet then we didn't find what we were # looking for. raise CerberusClientException("'%s' not found" % sdb_path) def get_sdb_by_id(self, sdb_id): """ Return the details for the given safe deposit box id Keyword arguments: sdb_id -- this is the id of the safe deposit box, not the path. """ sdb_resp = get_with_retry(self.cerberus_url + '/v2/safe-deposit-box/' + sdb_id, headers=self.HEADERS) throw_if_bad_response(sdb_resp) return sdb_resp.json() def get_sdb_by_path(self, sdb_path): """ Return the details for the given safe deposit box path. Keyword arguments: sdb_path -- this is the path for the given safe deposit box. ex: ('shared/my-test-box') """ return self.get_sdb_by_id(self.get_sdb_id_by_path(sdb_path)) def get_sdb_by_name(self, sdb_name): """ Return the details for the given safe deposit box name. Keyword arguments: sdb_name -- this is the name for the given safe deposit box. ex: ('My Test Box') """ return self.get_sdb_by_id(self.get_sdb_id(sdb_name)) def get_sdb_secret_version_paths(self, sdb_id): """ Get SDB secret version paths. This function takes the sdb_id """ sdb_resp = get_with_retry(str.join('', [self.cerberus_url, '/v1/sdb-secret-version-paths/', sdb_id]), headers=self.HEADERS) throw_if_bad_response(sdb_resp) return sdb_resp.json() def get_sdb_secret_version_paths_by_path(self, sdb_path): """ Get SDB secret version paths. This function takes the sdb_path """ return self.get_sdb_secret_version_paths(self.get_sdb_id_by_path(sdb_path)) def list_sdbs(self): """ Return sdbs by Name """ sdb_raw = self.get_sdbs() sdbs = [] for s in sdb_raw: sdbs.append(s['name']) return sdbs def update_sdb(self, sdb_id, owner=None, description=None, user_group_permissions=None, iam_principal_permissions=None): """ Update a safe deposit box. Keyword arguments: owner (string) -- AD group that owns the safe deposit box description (string) -- Description of the safe deposit box user_group_permissions (list) -- list of dictionaries containing the key name and maybe role_id iam_principal_permissions (list) -- list of dictionaries containing the key name iam_principal_arn and role_id """ # Grab current data old_data = self.get_sdb_by_id(sdb_id) # Assemble information to update temp_data = {} keys = ('owner', 'description', 'iam_principal_permissions', 'user_group_permissions') for k in keys: if k in old_data: temp_data[k] = old_data[k] if owner is not None: temp_data["owner"] = owner if description is not None: temp_data["description"] = description if user_group_permissions is not None and len(user_group_permissions) > 0: temp_data["user_group_permissions"] = user_group_permissions if iam_principal_permissions is not None and len(iam_principal_permissions) > 0: temp_data["iam_principal_permissions"] = iam_principal_permissions data = json.encoder.JSONEncoder().encode(temp_data) sdb_resp = put_with_retry(self.cerberus_url + '/v2/safe-deposit-box/' + sdb_id, data=str(data), headers=self.HEADERS) throw_if_bad_response(sdb_resp) return sdb_resp.json() ###------ Files ------### def delete_file(self, secure_data_path): """Delete a file at the given secure data path""" secret_resp = delete_with_retry(self.cerberus_url + '/v1/secure-file/' + secure_data_path, headers=self.HEADERS) throw_if_bad_response(secret_resp) return secret_resp def get_file_metadata(self, secure_data_path, version=None): """Get just the metadata for a file, not the content""" if not version: version = "CURRENT" payload = {'versionId': str(version)} secret_resp = head_with_retry(str.join('', [self.cerberus_url, '/v1/secure-file/', secure_data_path]), params=payload, headers=self.HEADERS) throw_if_bad_response(secret_resp) return secret_resp.headers def _get_file(self, secure_data_path, version=None): """ Return the file stored at the secure data path Keyword arguments: secure_data_path (string) -- full path in the secret deposit box that contains the key /shared/sdb-path/secret """ if not version: version = "CURRENT" payload = {'versionId': str(version)} secret_resp = get_with_retry(str.join('', [self.cerberus_url, '/v1/secure-file/', secure_data_path]), params=payload, headers=self.HEADERS) throw_if_bad_response(secret_resp) return secret_resp def _parse_metadata_filename(self, metadata):
def get_file(self, secure_data_path, version=None): """ Return a requests.structures.CaseInsensitiveDict object containing a file and the metadata/header information around it. The binary data of the file is under the key 'data' """ query = self._get_file(secure_data_path, version) resp = query.headers.copy() resp = self._parse_metadata_filename(resp) resp['data'] = query.content return resp def get_file_data(self, secure_data_path, version=None): """ Return the data of a file stored at the secure data path This only returns the file data, and does not include any of the meta information stored with it. Keyword arguments: secure_data_path (string) -- full path in the secret deposit box that contains the file key """ return self._get_file(secure_data_path, version).content def get_file_versions(self, secure_data_path, limit=None, offset=None): """ Get versions of a particular file This is just a shim to get_secret_versions secure_data_path -- full path to the file in the safety deposit box limit -- Default(100), limits how many records to be returned from the api at once. offset -- Default(0), used for pagination. Will request records from the given offset. """ return self.get_secret_versions(secure_data_path, limit, offset) def _get_all_file_version_ids(self, secure_data_path, limit=None): """ Convenience function that returns a generator that will paginate over the file version ids secure_data_path -- full path to the file in the safety deposit box limit -- Default(100), limits how many records to be returned from the api at once. """ offset = 0 # Prime the versions dictionary so that all the logic can happen in the loop versions = {'has_next': True, 'next_offset': 0} while (versions['has_next']): offset = versions['next_offset'] versions = self.get_file_versions(secure_data_path, limit, offset) for summary in versions['secure_data_version_summaries']: yield summary def _get_all_file_versions(self, secure_data_path, limit=None): """ Convenience function that returns a generator yielding the contents of all versions of a file and its version info secure_data_path -- full path to the file in the safety deposit box limit -- Default(100), limits how many records to be returned from the api at once. """ for secret in self._get_all_file_version_ids(secure_data_path, limit): yield {'secret': self.get_file_data(secure_data_path, version=secret['id']), 'version': secret} def list_files(self, secure_data_path, limit=None, offset=None): """Return the list of files in the path. May need to be paginated""" # Make sure that limit and offset are in range. # Set the normal defaults if not limit or limit <= 0: limit = 100 if not offset or offset < 0: offset = 0 payload = {'limit': str(limit), 'offset': str(offset)} # Because of the addition of versionId and the way URLs are constructed, secure_data_path should # always end in a '/'. secure_data_path = self._add_slash(secure_data_path) secret_resp = get_with_retry(self.cerberus_url + '/v1/secure-files/' + secure_data_path, params=payload, headers=self.HEADERS) throw_if_bad_response(secret_resp) return secret_resp.json() def put_file(self, secure_data_path, filehandle, content_type=None): """ Upload a file to a secure data path provided Keyword arguments: secure_data_path -- full path in the safety deposit box that contains the file key to store things under filehandle -- Pass an opened filehandle to the file you want to upload. Make sure that the file was opened in binary mode, otherwise the size calculations can be off for text files. content_type -- Optional. Set the Mime type of the file you're uploading. """ # Parse out the filename from the path filename = secure_data_path.rsplit('/', 1) if content_type: data = {'file-content': (filename, filehandle, content_type)} else: data = {'file-content': (filename, filehandle)} headers = self.HEADERS.copy() if 'Content-Type' in headers: headers.__delitem__('Content-Type') secret_resp = post_with_retry(self.cerberus_url + '/v1/secure-file/' + secure_data_path, files=data, headers=headers) throw_if_bad_response(secret_resp) return secret_resp ###------ Secrets -----#### def delete_secret(self, secure_data_path): """Delete a secret from the given secure data path""" secret_resp = delete_with_retry(self.cerberus_url + '/v1/secret/' + secure_data_path, headers=self.HEADERS) throw_if_bad_response(secret_resp) return secret_resp def get_secret(self, secure_data_path, key, version=None): """ (Deprecated)Return the secret based on the secure data path and key This method is deprecated because it misleads users into thinking they're only getting one value from Cerberus when in reality they're getting all values, from which a single value is returned. Use get_secrets_data(secure_data_path)[key] instead. (See https://github.com/Nike-Inc/cerberus-python-client/issues/18) """ warnings.warn( "get_secret is deprecated, use get_secrets_data instead", DeprecationWarning ) secret_resp_json = self._get_secrets(secure_data_path, version) if key in secret_resp_json['data']: return secret_resp_json['data'][key] else: raise CerberusClientException("Key '%s' not found" % key) def _get_secrets(self, secure_data_path, version=None): """ Return full json secrets based on the secure data path Keyword arguments: secure_data_path (string) -- full path in the secret deposit box that contains the key /shared/sdb-path/secret """ if not version: version = "CURRENT" payload = {'versionId': str(version)} secret_resp = get_with_retry(str.join('', [self.cerberus_url, '/v1/secret/', secure_data_path]), params=payload, headers=self.HEADERS) throw_if_bad_response(secret_resp) return secret_resp.json() def get_secrets(self, secure_data_path, version=None): """(Deprecated)Return json secrets based on the secure data path This method is deprecated because an addition step of reading value with ['data'] key from the returned data is required to get secrets, which contradicts the method name. Use get_secrets_data(secure_data_path) instead. (See https://github.com/Nike-Inc/cerberus-python-client/issues/19) """ warnings.warn( "get_secrets is deprecated, use get_secrets_data instead", DeprecationWarning ) return self._get_secrets(secure_data_path, version) def get_secrets_data(self, secure_data_path, version=None): """Return json secrets based on the secure data path Keyword arguments: secure_data_path (string) -- full path in the secret deposit box that contains the key """ return self._get_secrets(secure_data_path, version)['data'] def get_secret_versions(self, secure_data_path, limit=None, offset=None): """ Get versions of a particular secret key secure_data_path -- full path to the key in the safety deposit box limit -- Default(100), limits how many records to be returned from the api at once. offset -- Default(0), used for pagination. Will request records from the given offset. """ # Make sure that limit and offset are in range. # Set the normal defaults if not limit or limit <= 0: limit = 100 if not offset or offset < 0: offset = 0 payload = {'limit': str(limit), 'offset': str(offset)} secret_resp = get_with_retry(str.join('', [self.cerberus_url, '/v1/secret-versions/', secure_data_path]), params=payload, headers=self.HEADERS) throw_if_bad_response(secret_resp) return secret_resp.json() def _get_all_secret_version_ids(self, secure_data_path, limit=None): """ Convenience function that returns a generator that will paginate over the secret version ids secure_data_path -- full path to the key in the safety deposit box limit -- Default(100), limits how many records to be returned from the api at once. """ offset = 0 # Prime the versions dictionary so that all the logic can happen in the loop versions = {'has_next': True, 'next_offset': 0} while (versions['has_next']): offset = versions['next_offset'] versions = self.get_secret_versions(secure_data_path, limit, offset) for summary in versions['secure_data_version_summaries']: yield summary def _get_all_secret_versions(self, secure_data_path, limit=None): """ Convenience function that returns a generator yielding the contents of secrets and their version info secure_data_path -- full path to the key in the safety deposit box limit -- Default(100), limits how many records to be returned from the api at once. """ for secret in self._get_all_secret_version_ids(secure_data_path, limit): yield {'secret': self.get_secrets_data(secure_data_path, version=secret['id']), 'version': secret} def list_secrets(self, secure_data_path): """Return json secrets based on the secure_data_path, this will list keys in a folder""" # Because of the addition of versionId and the way URLs are constructed, secure_data_path should # always end in a '/'. secure_data_path = self._add_slash(secure_data_path) secret_resp = get_with_retry(self.cerberus_url + '/v1/secret/' + secure_data_path + '?list=true', headers=self.HEADERS) throw_if_bad_response(secret_resp) return secret_resp.json() def put_secret(self, secure_data_path, secret, merge=True): """Write secret(s) to a secure data path provided a dictionary of key/values Keyword arguments: secure_data_path -- full path in the safety deposit box that contains the key secret -- A dictionary containing key/values to be written at the secure data path merge -- Boolean that determines if the provided secret keys should be merged with the values already present at the secure data path. If False the keys will completely overwrite what was stored at the secure data path. (default True) """ # json encode the input. Cerberus is sensitive to double vs single quotes. # an added bonus is that json encoding transforms python2 unicode strings # into a compatible format. data = json.encoder.JSONEncoder().encode(secret) if merge: data = self.secret_merge(secure_data_path, secret) secret_resp = post_with_retry(self.cerberus_url + '/v1/secret/' + secure_data_path, data=str(data), headers=self.HEADERS) throw_if_bad_response(secret_resp) return secret_resp def secret_merge(self, secure_data_path, key): """Compare key/values at secure_data_path and merges them. New values will overwrite old.""" get_resp = get_with_retry(self.cerberus_url + '/v1/secret/' + secure_data_path, headers=self.HEADERS) temp_key = {} # Ignore a return of 404 since it means the key might not exist if get_resp.status_code == requests.codes.bad and get_resp.status_code not in [403, 404]: throw_if_bad_response(get_resp) elif get_resp.status_code in [403, 404]: temp_key = {} else: temp_key = get_resp.json()['data'] # Allow key to be either a string describing a dict or a dict. if type(key) == str: temp_key.update(ast.literal_eval(key)) else: temp_key.update(key) # This is a bit of a hack to get around python 2 treating unicode strings # differently. Cerberus will throw a 400 if we try to post python 2 style # unicode stings as the payload. combined_key = json.encoder.JSONEncoder().encode(temp_key) return combined_key
""" Parse the header metadata to pull out the filename and then store it under the key 'filename' """ index = metadata['Content-Disposition'].index('=')+1 metadata['filename'] = metadata['Content-Disposition'][index:].replace('"', '') return metadata
bash.py
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import os import signal from subprocess import PIPE, STDOUT, Popen from tempfile import TemporaryDirectory, gettempdir from typing import Dict, Optional from airflow.exceptions import AirflowException from airflow.models import BaseOperator from airflow.utils.decorators import apply_defaults from airflow.utils.operator_helpers import context_to_airflow_vars class BashOperator(BaseOperator): """ Execute a Bash script, command or set of commands. .. seealso:: For more information on how to use this operator, take a look at the guide: :ref:`howto/operator:BashOperator` If BaseOperator.do_xcom_push is True, the last line written to stdout will also be pushed to an XCom when the bash command completes :param bash_command: The command, set of commands or reference to a bash script (must be '.sh') to be executed. (templated) :type bash_command: str :param env: If env is not None, it must be a mapping that defines the environment variables for the new process; these are used instead of inheriting the current process environment, which is the default behavior. (templated) :type env: dict :param output_encoding: Output encoding of bash command :type output_encoding: str On execution of this operator the task will be up for retry when exception is raised. However, if a sub-command exits with non-zero value Airflow will not recognize it as failure unless the whole shell exits with a failure. The easiest way of achieving this is to prefix the command with ``set -e;`` Example: .. code-block:: python bash_command = "set -e; python3 script.py '{{ next_execution_date }}'" """ template_fields = ('bash_command', 'env') template_ext = ('.sh', '.bash',) ui_color = '#f0ede4' @apply_defaults def __init__( self, bash_command: str, env: Optional[Dict[str, str]] = None, output_encoding: str = 'utf-8', *args, **kwargs) -> None: super().__init__(*args, **kwargs) self.bash_command = bash_command self.env = env self.output_encoding = output_encoding if kwargs.get('xcom_push') is not None: raise AirflowException("'xcom_push' was deprecated, use 'BaseOperator.do_xcom_push' instead") self.sub_process = None def execute(self, context): """ Execute the bash command in a temporary directory which will be cleaned afterwards """ self.log.info('Tmp dir root location: \n %s', gettempdir()) # Prepare env for child process. env = self.env if env is None: env = os.environ.copy() airflow_context_vars = context_to_airflow_vars(context, in_env_var_format=True) self.log.debug('Exporting the following env vars:\n%s', '\n'.join(["{}={}".format(k, v) for k, v in airflow_context_vars.items()])) env.update(airflow_context_vars) with TemporaryDirectory(prefix='airflowtmp') as tmp_dir: def pre_exec(): # Restore default signal disposition and invoke setsid
self.log.info('Running command: %s', self.bash_command) self.sub_process = Popen( # pylint: disable=subprocess-popen-preexec-fn ['bash', "-c", self.bash_command], stdout=PIPE, stderr=STDOUT, cwd=tmp_dir, env=env, preexec_fn=pre_exec) self.log.info('Output:') line = '' for raw_line in iter(self.sub_process.stdout.readline, b''): line = raw_line.decode(self.output_encoding).rstrip() self.log.info("%s", line) self.sub_process.wait() self.log.info('Command exited with return code %s', self.sub_process.returncode) if self.sub_process.returncode != 0: raise AirflowException('Bash command failed. The command returned a non-zero exit code.') return line def on_kill(self): self.log.info('Sending SIGTERM signal to bash process group') if self.sub_process and hasattr(self.sub_process, 'pid'): os.killpg(os.getpgid(self.sub_process.pid), signal.SIGTERM)
for sig in ('SIGPIPE', 'SIGXFZ', 'SIGXFSZ'): if hasattr(signal, sig): signal.signal(getattr(signal, sig), signal.SIG_DFL) os.setsid()
virtualmachines.go
/* Copyright 2019 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package virtualmachines import ( "context" "encoding/base64" "fmt" "strconv" "strings" "github.com/Azure/azure-sdk-for-go/services/compute/mgmt/2020-06-01/compute" "github.com/Azure/go-autorest/autorest/to" "github.com/pkg/errors" corev1 "k8s.io/api/core/v1" infrav1 "sigs.k8s.io/cluster-api-provider-azure/api/v1alpha3" azure "sigs.k8s.io/cluster-api-provider-azure/cloud" "sigs.k8s.io/cluster-api-provider-azure/cloud/converters" "sigs.k8s.io/cluster-api-provider-azure/cloud/services/resourceskus" ) // getExisting provides information about a virtual machine. func (s *Service) getExisting(ctx context.Context, name string) (*infrav1.VM, error) { vm, err := s.Client.Get(ctx, s.Scope.ResourceGroup(), name) if err != nil { return nil, err } convertedVM, err := converters.SDKToVM(vm) if err != nil { return convertedVM, err } // Discover addresses for NICs associated with the VM // and add them to our converted vm struct addresses, err := s.getAddresses(ctx, vm) if err != nil { return convertedVM, err } convertedVM.Addresses = addresses return convertedVM, nil } // Reconcile gets/creates/updates a virtual machine. func (s *Service) Reconcile(ctx context.Context) error { for _, vmSpec := range s.Scope.VMSpecs() { existingVM, err := s.getExisting(ctx, vmSpec.Name) switch { case err != nil && !azure.ResourceNotFound(err): return errors.Wrapf(err, "failed to get VM %s", vmSpec.Name) case err == nil: // VM already exists, update the spec and skip creation. s.Scope.SetProviderID(fmt.Sprintf("azure:///%s", existingVM.ID)) s.Scope.SetAnnotation("cluster-api-provider-azure", "true") s.Scope.SetAddresses(existingVM.Addresses) s.Scope.SetVMState(existingVM.State) default: s.Scope.V(2).Info("creating VM", "vm", vmSpec.Name) storageProfile, err := s.generateStorageProfile(ctx, vmSpec) if err != nil { return err } nicRefs := make([]compute.NetworkInterfaceReference, len(vmSpec.NICNames)) for i, nicName := range vmSpec.NICNames { primary := i == 0 nicRefs[i] = compute.NetworkInterfaceReference{ ID: to.StringPtr(azure.NetworkInterfaceID(s.Scope.SubscriptionID(), s.Scope.ResourceGroup(), nicName)), NetworkInterfaceReferenceProperties: &compute.NetworkInterfaceReferenceProperties{ Primary: to.BoolPtr(primary), }, } } priority, evictionPolicy, billingProfile, err := getSpotVMOptions(vmSpec.SpotVMOptions) if err != nil { return errors.Wrapf(err, "failed to get Spot VM options") } sshKey, err := base64.StdEncoding.DecodeString(vmSpec.SSHKeyData) if err != nil { return errors.Wrapf(err, "failed to decode ssh public key") } bootstrapData, err := s.Scope.GetBootstrapData(ctx) if err != nil { return errors.Wrap(err, "failed to retrieve bootstrap data") } virtualMachine := compute.VirtualMachine{ Plan: s.generateImagePlan(), Location: to.StringPtr(s.Scope.Location()), Tags: converters.TagsToMap(infrav1.Build(infrav1.BuildParams{ ClusterName: s.Scope.ClusterName(), Lifecycle: infrav1.ResourceLifecycleOwned, Name: to.StringPtr(vmSpec.Name), Role: to.StringPtr(vmSpec.Role), Additional: s.Scope.AdditionalTags(), })), VirtualMachineProperties: &compute.VirtualMachineProperties{ HardwareProfile: &compute.HardwareProfile{ VMSize: compute.VirtualMachineSizeTypes(vmSpec.Size), }, StorageProfile: storageProfile, OsProfile: &compute.OSProfile{ ComputerName: to.StringPtr(vmSpec.Name), AdminUsername: to.StringPtr(azure.DefaultUserName), CustomData: to.StringPtr(bootstrapData), LinuxConfiguration: &compute.LinuxConfiguration{ DisablePasswordAuthentication: to.BoolPtr(true), SSH: &compute.SSHConfiguration{ PublicKeys: &[]compute.SSHPublicKey{ { Path: to.StringPtr(fmt.Sprintf("/home/%s/.ssh/authorized_keys", azure.DefaultUserName)), KeyData: to.StringPtr(string(sshKey)), }, }, }, }, }, NetworkProfile: &compute.NetworkProfile{ NetworkInterfaces: &nicRefs, }, Priority: priority, EvictionPolicy: evictionPolicy, BillingProfile: billingProfile, DiagnosticsProfile: &compute.DiagnosticsProfile{ BootDiagnostics: &compute.BootDiagnostics{ Enabled: to.BoolPtr(true), }, }, }, } if vmSpec.Zone != "" { zones := []string{vmSpec.Zone} virtualMachine.Zones = &zones } if vmSpec.Identity == infrav1.VMIdentitySystemAssigned { virtualMachine.Identity = &compute.VirtualMachineIdentity{ Type: compute.ResourceIdentityTypeSystemAssigned, } } else if vmSpec.Identity == infrav1.VMIdentityUserAssigned { if len(vmSpec.UserAssignedIdentities) == 0 { return errors.Wrapf(err, "cannot create VM: The user-assigned identity provider ids must not be null or empty for 'UserAssigned' identity type.") } // UserAssignedIdentities - The list of user identities associated with the Virtual Machine. // The user identity dictionary key references will be ARM resource ids in the form: // '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. userIdentitiesMap := make(map[string]*compute.VirtualMachineIdentityUserAssignedIdentitiesValue, len(vmSpec.UserAssignedIdentities)) for _, id := range vmSpec.UserAssignedIdentities { key := id.ProviderID if strings.HasPrefix(id.ProviderID, "azure:///") { key = strings.TrimPrefix(key, "azure:///") } userIdentitiesMap[key] = &compute.VirtualMachineIdentityUserAssignedIdentitiesValue{} } virtualMachine.Identity = &compute.VirtualMachineIdentity{ Type: compute.ResourceIdentityTypeUserAssigned, UserAssignedIdentities: userIdentitiesMap, } } if err := s.Client.CreateOrUpdate(ctx, s.Scope.ResourceGroup(), vmSpec.Name, virtualMachine); err != nil { return errors.Wrapf(err, "failed to create VM %s in resource group %s", vmSpec.Name, s.Scope.ResourceGroup()) } s.Scope.V(2).Info("successfully created VM", "vm", vmSpec.Name) } } return nil } func (s *Service) generateImagePlan() *compute.Plan { image, err := s.Scope.GetVMImage() if err != nil { return nil } if image.Marketplace == nil || image.Marketplace.ThirdPartyImage == false { return nil } if image.Marketplace.Publisher == "" || image.Marketplace.SKU == "" || image.Marketplace.Offer == "" { return nil } return &compute.Plan{ Publisher: to.StringPtr(image.Marketplace.Publisher), Name: to.StringPtr(image.Marketplace.SKU), Product: to.StringPtr(image.Marketplace.Offer), } } // Delete deletes the virtual machine with the provided name. func (s *Service) Delete(ctx context.Context) error { for _, vmSpec := range s.Scope.VMSpecs() { s.Scope.V(2).Info("deleting VM", "vm", vmSpec.Name) err := s.Client.Delete(ctx, s.Scope.ResourceGroup(), vmSpec.Name) if err != nil && azure.ResourceNotFound(err) { // already deleted continue } if err != nil { return errors.Wrapf(err, "failed to delete VM %s in resource group %s", vmSpec.Name, s.Scope.ResourceGroup()) } s.Scope.V(2).Info("successfully deleted VM", "vm", vmSpec.Name) } return nil } func (s *Service) getAddresses(ctx context.Context, vm compute.VirtualMachine) ([]corev1.NodeAddress, error) { addresses := []corev1.NodeAddress{} if vm.NetworkProfile.NetworkInterfaces == nil { return addresses, nil } for _, nicRef := range *vm.NetworkProfile.NetworkInterfaces { // The full ID includes the name at the very end. Split the string and pull the last element // Ex: /subscriptions/$SUB/resourceGroups/$RG/providers/Microsoft.Network/networkInterfaces/$NICNAME // We'll check to see if ID is nil and bail early if we don't have it if nicRef.ID == nil { continue } nicName := getResourceNameByID(to.String(nicRef.ID)) // Fetch nic and append its addresses nic, err := s.InterfacesClient.Get(ctx, s.Scope.ResourceGroup(), nicName) if err != nil { return addresses, err } if nic.IPConfigurations == nil { continue } for _, ipConfig := range *nic.IPConfigurations { if ipConfig.PrivateIPAddress != nil { addresses = append(addresses, corev1.NodeAddress{ Type: corev1.NodeInternalIP, Address: to.String(ipConfig.PrivateIPAddress), }, ) } if ipConfig.PublicIPAddress == nil { continue } // ID is the only field populated in PublicIPAddress sub-resource. // Thus, we have to go fetch the publicIP with the name. publicIPName := getResourceNameByID(to.String(ipConfig.PublicIPAddress.ID)) publicNodeAddress, err := s.getPublicIPAddress(ctx, publicIPName) if err != nil { return addresses, err } addresses = append(addresses, publicNodeAddress) } } return addresses, nil } // getPublicIPAddress will fetch a public ip address resource by name and return a nodeaddresss representation func (s *Service) getPublicIPAddress(ctx context.Context, publicIPAddressName string) (corev1.NodeAddress, error) { retAddress := corev1.NodeAddress{} publicIP, err := s.PublicIPsClient.Get(ctx, s.Scope.ResourceGroup(), publicIPAddressName) if err != nil { return retAddress, err } retAddress.Type = corev1.NodeExternalIP retAddress.Address = to.String(publicIP.IPAddress) return retAddress, nil } // getResourceNameById takes a resource ID like // `/subscriptions/$SUB/resourceGroups/$RG/providers/Microsoft.Network/networkInterfaces/$NICNAME` // and parses out the string after the last slash. func getResourceNameByID(resourceID string) string { explodedResourceID := strings.Split(resourceID, "/") resourceName := explodedResourceID[len(explodedResourceID)-1] return resourceName } // generateStorageProfile generates a pointer to a compute.StorageProfile which can utilized for VM creation. func (s *Service) generateStorageProfile(ctx context.Context, vmSpec azure.VMSpec) (*compute.StorageProfile, error) { storageProfile := &compute.StorageProfile{ OsDisk: &compute.OSDisk{ Name: to.StringPtr(azure.GenerateOSDiskName(vmSpec.Name)), OsType: compute.OperatingSystemTypes(vmSpec.OSDisk.OSType), CreateOption: compute.DiskCreateOptionTypesFromImage, DiskSizeGB: to.Int32Ptr(vmSpec.OSDisk.DiskSizeGB), ManagedDisk: &compute.ManagedDiskParameters{ StorageAccountType: compute.StorageAccountTypes(vmSpec.OSDisk.ManagedDisk.StorageAccountType), }, Caching: compute.CachingTypes(vmSpec.OSDisk.CachingType), }, } sku, err := s.ResourceSKUCache.Get(ctx, vmSpec.Size, resourceskus.VirtualMachines) if err != nil { return nil, errors.Wrapf(err, "failed to get find vm sku %s in compute api", vmSpec.Size) } // Checking if the requested VM size has at least 2 vCPUS vCPUCapability, err := sku.HasCapabilityWithCapacity(resourceskus.VCPUs, resourceskus.MinimumVCPUS) if err != nil { return nil, errors.Wrap(err, "failed to validate the vCPU cabability") } if !vCPUCapability { return nil, errors.New("vm size should be bigger or equal to at least 2 vCPUs") } // Checking if the requested VM size has at least 2 Gi of memory MemoryCapability, err := sku.HasCapabilityWithCapacity(resourceskus.MemoryGB, resourceskus.MinimumMemory) if err != nil
if !MemoryCapability { return nil, errors.New("vm memory should be bigger or equal to at least 2Gi") } // enable ephemeral OS if vmSpec.OSDisk.DiffDiskSettings != nil { if !sku.HasCapability(resourceskus.EphemeralOSDisk) { return nil, fmt.Errorf("vm size %s does not support ephemeral os. select a different vm size or disable ephemeral os", vmSpec.Size) } storageProfile.OsDisk.DiffDiskSettings = &compute.DiffDiskSettings{ Option: compute.DiffDiskOptions(vmSpec.OSDisk.DiffDiskSettings.Option), } } dataDisks := []compute.DataDisk{} for _, disk := range vmSpec.DataDisks { dataDisks = append(dataDisks, compute.DataDisk{ CreateOption: compute.DiskCreateOptionTypesEmpty, DiskSizeGB: to.Int32Ptr(disk.DiskSizeGB), Lun: disk.Lun, Name: to.StringPtr(azure.GenerateDataDiskName(vmSpec.Name, disk.NameSuffix)), Caching: compute.CachingTypes(disk.CachingType), }) } storageProfile.DataDisks = &dataDisks image, err := s.Scope.GetVMImage() if err != nil { return nil, errors.Wrap(err, "failed to get VM image") } imageRef, err := converters.ImageToSDK(image) if err != nil { return nil, err } storageProfile.ImageReference = imageRef return storageProfile, nil } func getSpotVMOptions(spotVMOptions *infrav1.SpotVMOptions) (compute.VirtualMachinePriorityTypes, compute.VirtualMachineEvictionPolicyTypes, *compute.BillingProfile, error) { // Spot VM not requested, return zero values to apply defaults if spotVMOptions == nil { return "", "", nil, nil } var billingProfile *compute.BillingProfile if spotVMOptions.MaxPrice != nil { maxPrice, err := strconv.ParseFloat(*spotVMOptions.MaxPrice, 64) if err != nil { return "", "", nil, err } billingProfile = &compute.BillingProfile{ MaxPrice: &maxPrice, } } return compute.Spot, compute.Deallocate, billingProfile, nil }
{ return nil, errors.Wrap(err, "failed to validate the memory cabability") }
test_window.rs
extern crate glium; extern crate imgui; extern crate imgui_glium_renderer; extern crate imgui_glutin_support; mod support; const CLEAR_COLOR: [f32; 4] = [0.2, 0.2, 0.2, 1.0]; fn main()
{ support::run("test_window.rs".to_owned(), CLEAR_COLOR, |ui| { let mut open = true; ui.show_demo_window(&mut open); open }); }
219_92b42c76490df6167edf.js
/*! Copyright Pyplan 2020. All rights reserved. */ (window.webpackJsonp=window.webpackJsonp||[]).push([[219],{1054:function(n,e){function t(n){var e=new Error("Cannot find module '"+n+"'");throw e.code="MODULE_NOT_FOUND",e}t.keys=function(){return[]},t.resolve=t,n.exports=t,t.id=1054},1237:function(n,e){function t(n){var e=new Error("Cannot find module '"+n+"'");throw e.code="MODULE_NOT_FOUND",e}t.keys=function(){return[]},t.resolve=t,n.exports=t,t.id=1237},1243:function(n,e){function t(n){var e=new Error("Cannot find module '"+n+"'");throw e.code="MODULE_NOT_FOUND",e}t.keys=function(){return[]},t.resolve=t,n.exports=t,t.id=1243},1244:function(n,e){function t(n){var e=new Error("Cannot find module '"+n+"'");throw e.code="MODULE_NOT_FOUND",e}t.keys=function(){return[]},t.resolve=t,n.exports=t,t.id=1244},1262:function(n,e){function t(n){var e=new Error("Cannot find module '"+n+"'");throw e.code="MODULE_NOT_FOUND",e}t.keys=function(){return[]},t.resolve=t,n.exports=t,t.id=1262},1264:function(n,e){function
(n){var e=new Error("Cannot find module '"+n+"'");throw e.code="MODULE_NOT_FOUND",e}t.keys=function(){return[]},t.resolve=t,n.exports=t,t.id=1264},1855:function(n,e,t){var o=t(690);function i(n){return n&&(n.__esModule?n.default:n)}n.exports=(o.default||o).template({compiler:[8,">= 4.3.0"],main:function(n,e,o,a,s){var c=null!=e?e:n.nullContext||{},d=n.escapeExpression;return'<div class="row withBottomMargin">\n <div class="col-sm-12">\n <button class="btn btn-orange btn--icon btnStartDebug"><i class="fa fa-bug"></i>'+d(i(t(688)).call(c,"start_debug",{name:"L",hash:{},data:s,loc:{start:{line:3,column:84},end:{line:3,column:103}}}))+'</button>\n\n <input type="radio" class=\'icheck-me checked\' id="reset_model_option_node" name="reset_model_option"\n data-skin="square" data-color="blue" checked="checked">\n <label class=\'inline\' for="reset_model_option_node">'+d(i(t(688)).call(c,"reset_this_node",{name:"L",hash:{},data:s,loc:{start:{line:7,column:56},end:{line:7,column:79}}}))+'</label>\n\n <input type="radio" class=\'icheck-me\' id="reset_model_option_model" name="reset_model_option" data-skin="square"\n data-color="blue">\n <label class=\'inline\' for="reset_model_option_model">'+d(i(t(688)).call(c,"reset_entire_model",{name:"L",hash:{},data:s,loc:{start:{line:11,column:57},end:{line:11,column:83}}}))+'</label>\n </div>\n\n</div>\n\n<div class="row">\n <div class="col-sm-6">\n <pre class="debugLog"></pre>\n </div>\n <div class="col-sm-6">\n\n <div class="row">\n\n <div class="col-sm-6">\n <div class="box">\n <div class="box-title">\n <h3>\n <i class="fa fa-clock"></i>\n '+d(i(t(688)).call(c,"acc_calc_time",{name:"L",hash:{},data:s,loc:{start:{line:29,column:14},end:{line:29,column:35}}}))+'\n </h3>\n </div>\n <div class="box-content">\n <div class="row text-center">\n <h2 class="name cumTime">-</h2>\n </div>\n </div>\n </div>\n </div>\n\n <div class="col-sm-6">\n <div class="box">\n <div class="box-title">\n <h3>\n <i class="fa fa-bars"></i>\n '+d(i(t(688)).call(c,"container_memory",{name:"L",hash:{},data:s,loc:{start:{line:45,column:14},end:{line:45,column:38}}}))+'\n </h3>\n </div>\n <div class="box-content">\n <div class="row text-center">\n <h2 class="name cumMem">-</h2>\n </div>\n </div>\n </div>\n </div>\n </div>\n\n <div class="row">\n\n <div class="col-sm-12 progressChartContainer nodisplay ">\n <div class="box">\n <div class="box-title">\n <h3>\n <i class="fa fa-chart-line"></i>\n '+d(i(t(688)).call(c,"memory_progress",{name:"L",hash:{},data:s,loc:{start:{line:64,column:14},end:{line:64,column:37}}}))+'\n </h3>\n </div>\n <div class="box-content">\n <div class="row">\n <div class="progress_chart">\n </div>\n </div>\n </div>\n </div>\n </div>\n </div>\n\n </div>\n</div>'},useData:!0})},958:function(n,e,t){"use strict";(function(o,i){var a,s,c=t(18);t(1183);a=[t(219),t(683),t(1855)],void 0===(s=function(n,e,t){return o.View.extend({el:i("#main"),cumTime:0,categories:[],points:[],render:function(){var n=this,o=t(this.options);(new e).show({title:"".concat((0,c.translate)("debug_node"),": ").concat(this.options.nodeTitle," (").concat(this.options.nodeId,")"),html:o,modalClass:"debugNodeView",onLoad:function(){var e=i("#main-modal"),t=e.find(".debugLog"),o=parseInt(.9*i(window).height()-200).toString();t.css("height","".concat(o,"px")),e.on("debug_info",function(o,i){n.updateDebugInfo(e,t,i)}),e.find(".btnStartDebug").on("click",function(){n.startDebug(e,t)}),(0,c.postRender)(e)},onClose:function(){n.points=[],n.categories=[]}})},startDebug:function(e,t){e.find(".btnStartDebug").attr("disabled",!0);var o=new n;t.text(""),e.find(".cumMem").text("-"),e.find(".cumTime").text("-"),e.find(".progressChartContainer").hide(),this.points=[],this.categories=[],this.cumTime=0,o.previewNode(this.options.nodeId,function(n){e.find(".btnStartDebug").attr("disabled",!1)},function(){e.find(".btnStartDebug").attr("disabled",!1)},e.find("#reset_model_option_node").is(":checked")?"node":"model")},updateDebugInfo:function(n,e,t){if("endPreview"==t.action)i("<span></span>").text("---------------- end ----------------").appendTo(e),e.scrollTop(e.prop("scrollHeight")),this.createChart(n);else{var o="<code ".concat("endCalc"==t.action?"title='Time:".concat(t.time,"'"):""," class='").concat(t.action,"'>").concat(t.action,"</code> ").concat(t.fromDynamic?'<code class="dynamic">dynamic</code>':""," <span>").concat(t.title," (").concat(t.node,")</span>");if(i("".concat(o,"<br>")).appendTo(e),e.scrollTop(e.prop("scrollHeight")),"endCalc"==t.action){this.cumTime+=parseFloat(t.time);var a=t.totalMemory>0?parseFloat(t.usedMemory/t.totalMemory*100):0;n.find(".cumMem").text("".concat(t.usedMemory.toFixed(2)," Gb - ").concat(a.toFixed(0),"%")),n.find(".cumTime").text("".concat(this.cumTime.toFixed(2)," sec.")),this.categories.push(t.node),this.points.push(t.usedMemory)}}},createChart:function(n){n.find(".progressChartContainer").show(),this.$el.find(".progress_chart").highcharts({chart:{type:"area",zoomType:"xy",height:n.find(".debugLog").height()-n.find(".cumTime").closest(".box").height()-60,marginTop:10},title:!1,xAxis:{categories:this.categories,labels:{enabled:!1}},yAxis:{title:{text:"Memory (gb)"}},legend:!1,exporting:{enabled:!1},credits:!1,series:[{name:"Memory (gb)",data:this.points,tooltip:{valueDecimals:2,valueSuffix:" gb"}}]})}})}.apply(e,a))||(n.exports=s)}).call(this,t(218),t(1))}}]);
t
arlo_arm.py
from Arlo import Arlo import config USERNAME = config.key("ARLO_USERNAME") PASSWORD = config.key("ARLO_PASSWORD")
arlo.Arm(basestations[0]) except Exception as e: print(e)
try: arlo = Arlo(USERNAME, PASSWORD) basestations = arlo.GetDevices('basestation')
generator.rs
use std::sync::Arc; use rand::Rng; use crate::{physics::{materials::{lambertian::Lambertian, dielectric::Dielectric, metal::Metal}, shapes::sphere::Sphere}, maths::vec3::Vec3, render::color::Color}; use super::scene::Scene; pub fn
() -> Scene { let mut scene = Scene::new(); let mut rng = rand::thread_rng(); let ground_mat = Arc::new(Lambertian::new(Color::new(0.5, 0.5, 0.5))); let ground_sphere = Sphere::new(0.0, -1000.0, 0.0, 1000.0, ground_mat, Vec3::zero()); scene.add_shape(Arc::new(ground_sphere)); let x = 11; for a in -x..=x { for b in -x..=x { let mat: f64 = rng.gen::<f64>(); let center = Vec3::new( (a as f64) + rng.gen_range(0.0..0.9), 0.2, (b as f64) + rng.gen_range(0.0..0.9), ); if mat < 0.7 { // Diffuse let albedo = Color::random(0.0..1.0) * Color::random(0.0..1.0); let sphere_mat = Arc::new(Lambertian::new(albedo)); let sphere = Sphere::new(center.x, center.y, center.z, 0.2, sphere_mat, Vec3::new(0.0, 0.1, 0.0)); scene.add_shape(Arc::new(sphere)); } else if mat < 0.9 { // Metal let albedo = Color::random(0.4..1.0); let fuzz = rng.gen_range(0.0..0.5); let sphere_mat = Arc::new(Metal::new(albedo, fuzz)); let sphere = Sphere::new(center.x, center.y, center.z, 0.2, sphere_mat, Vec3::zero()); scene.add_shape(Arc::new(sphere)); } else { // Glass let sphere_mat = Arc::new(Dielectric::new(1.5, 0.8)); let sphere = Sphere::new(center.x, center.y, center.z, 0.2, sphere_mat, Vec3::zero()); scene.add_shape(Arc::new(sphere)); } } } let mat1 = Arc::new(Dielectric::new(1.5, 0.8)); let mat2 = Arc::new(Lambertian::new(Color::new(0.4, 0.2, 0.1))); let mat3 = Arc::new(Metal::new(Color::new(0.7, 0.6, 0.5), 0.0)); let sphere1 = Sphere::new(0.0, 1.0, 0.0, 1.0, mat1, Vec3::zero()); let sphere2 = Sphere::new(-4.0, 1.0, 0.0, 1.0, mat2, Vec3::zero()); let sphere3 = Sphere::new(4.0, 1.0, 0.0, 1.0, mat3, Vec3::zero()); scene.add_shape(Arc::new(sphere1)); scene.add_shape(Arc::new(sphere2)); scene.add_shape(Arc::new(sphere3)); return scene; }
make_random_balls_scene
keypairs.py
# -*- coding: utf-8 -*- # Copyright 2013-2017 Ent. Services Development Corporation LP # # Redistribution and use of this software in source and binary forms, # with or without modification, are permitted provided that the following # conditions are met: # # Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ Pyramid views for Eucalyptus and AWS key pairs """ import simplejson as json from boto.exception import BotoServerError from pyramid.httpexceptions import HTTPFound from pyramid.view import view_config from pyramid.response import Response from ..forms.keypairs import KeyPairForm, KeyPairImportForm, KeyPairDeleteForm from ..i18n import _ from ..models import Notification from ..views import BaseView, LandingPageView, JSONResponse from . import boto_error_handler class KeyPairsView(LandingPageView): def __init__(self, request): super(KeyPairsView, self).__init__(request) self.title_parts = [_(u'Key Pairs')] self.initial_sort_key = 'name' self.prefix = '/keypairs' self.delete_form = KeyPairDeleteForm(self.request, formdata=self.request.params or None) self.enable_smart_table = True @view_config(route_name='keypairs', renderer='../templates/keypairs/keypairs.pt') def keypairs_landing(self): json_items_endpoint = self.request.route_path('keypairs_json') # filter_keys are passed to client-side filtering in search box self.filter_keys = ['name', 'fingerprint'] # sort_keys are passed to sorting drop-down self.sort_keys = [ dict(key='name', name=_(u'Name: A to Z')), dict(key='-name', name=_(u'Name: Z to A')), ] return dict( filter_keys=self.filter_keys, search_facets=[], sort_keys=self.sort_keys, prefix=self.prefix, initial_sort_key=self.initial_sort_key, json_items_endpoint=json_items_endpoint, delete_form=self.delete_form, ) class KeyPairsJsonView(BaseView): def __init__(self, request): super(KeyPairsJsonView, self).__init__(request) self.conn = self.get_connection() @view_config(route_name='keypairs_json', renderer='json', request_method='POST') def keypairs_json(self): if not(self.is_csrf_valid()): return JSONResponse(status=400, message="missing CSRF token") keypairs = [] with boto_error_handler(self.request): for keypair in self.get_items(): keypairs.append(dict( name=keypair.name, fingerprint=keypair.fingerprint, )) return dict(results=keypairs) def get_items(self): ret = [] if self.conn: ret = self.conn.get_all_key_pairs() return ret class KeyPairView(BaseView): """Views for single Key Pair""" TEMPLATE = '../templates/keypairs/keypair_view.pt' def __init__(self, request): super(KeyPairView, self).__init__(request) keyname = '/'.join(self.request.subpath) if keyname == 'new': keyname = _(u'Create') if keyname == 'new2': keyname = _(u'Import') self.title_parts = [_(u'Key Pair'), keyname] self.conn = self.get_connection() self.keypair = self.get_keypair() self.keypair_route_id = '/'.join(self.request.subpath) self.keypair_form = KeyPairForm(self.request, keypair=self.keypair, formdata=self.request.params or None) self.keypair_import_form = KeyPairImportForm( self.request, keypair=self.keypair, formdata=self.request.params or None) self.delete_form = KeyPairDeleteForm(self.request, formdata=self.request.params or None) self.new_keypair_created = True if self._has_file_() else False # Detect if session has new keypair material self.created_msg = _(u'Successfully created key pair {keypair}'.format(keypair=self.keypair_route_id)) controller_options_json = BaseView.escape_json(json.dumps({ 'route_id': self.keypair_route_id, 'keypair_created': self.new_keypair_created, 'keypair_created_msg': self.created_msg, })) self.render_dict = dict( keypair=self.keypair, keypair_name=self.escape_braces(self.keypair.name) if self.keypair else '', keypair_route_id=self.keypair_route_id, keypair_form=self.keypair_form, keypair_import_form=self.keypair_import_form, keypair_created=self.new_keypair_created, delete_form=self.delete_form, keypair_names=self.get_keypair_names(), controller_options_json=controller_options_json, ) def get_keypair(self): keypair_param = '/'.join(self.request.subpath) if keypair_param == "new" or keypair_param == "new2": return None keypairs_param = [keypair_param] keypairs = [] if self.conn: try: keypairs = self.conn.get_all_key_pairs(keynames=keypairs_param) except BotoServerError: return None keypair = keypairs[0] if keypairs else None return keypair @view_config(route_name='keypair_view', renderer=TEMPLATE) def keypair_view(self): return self.render_dict def
(self): keypairs = [] with boto_error_handler(self.request): if self.conn: keypairs = [k.name for k in self.conn.get_all_key_pairs()] return sorted(set(keypairs)) @view_config(route_name='keypair_create', request_method='POST', renderer=TEMPLATE) def keypair_create(self): if self.keypair_form.validate(): name = self.request.params.get('name') location = self.request.route_path('keypair_view', subpath=name) with boto_error_handler(self.request, location): self.log_request(_(u"Creating keypair ") + name) new_keypair = self.conn.create_key_pair(name) # Store the new keypair material information in the session self._store_file_(new_keypair.name + ".pem", 'application/x-pem-file;charset=ISO-8859-1', new_keypair.material) msg_template = _(u'Successfully created key pair {keypair}') msg = msg_template.format(keypair=name) if self.request.is_xhr: resp_body = json.dumps(dict(message=msg)) return Response(status=200, body=resp_body, content_type='application/x-pem-file;charset=ISO-8859-1') else: location = self.request.route_path('keypair_view', subpath=name) return HTTPFound(location=location) if self.request.is_xhr: form_errors = ', '.join(self.keypair_form.get_errors_list()) return JSONResponse(status=400, message=form_errors) # Validation failure = bad request else: self.request.error_messages = self.keypair_form.get_errors_list() return self.render_dict @view_config(route_name='keypair_import', request_method='POST', renderer=TEMPLATE) def keypair_import(self): if self.keypair_form.validate(): name = self.request.params.get('name') key_material = self.request.params.get('key_material') # Return to import form if failure failure_location = self.request.route_path('keypair_view', subpath='new2') success_location = self.request.route_path('keypair_view', subpath=name) with boto_error_handler(self.request, failure_location): self.log_request(_(u"Importing keypair ") + name) self.conn.import_key_pair(name, key_material) msg_template = _(u'Successfully imported key pair {keypair}') msg = msg_template.format(keypair=name) self.request.session.flash(msg, queue=Notification.SUCCESS) return HTTPFound(location=success_location) return self.render_dict @view_config(route_name='keypair_delete', request_method='POST', renderer=TEMPLATE) def keypair_delete(self): if self.delete_form.validate(): keypair_name_param = self.request.params.get('name') keypair_names = [keypair.strip() for keypair in keypair_name_param.split(',')] location = self.request.route_path('keypairs') with boto_error_handler(self.request, location): for keypair_name in keypair_names: self.log_request(_(u"Deleting keypair ") + keypair_name) self.conn.delete_key_pair(keypair_name) prefix = _(u'Successfully deleted keypair') if len(keypair_names) == 1: msg = prefix else: msg = u'{0} {1}'.format(prefix, ', '.join(keypair_names)) self.request.session.flash(msg, queue=Notification.SUCCESS) return HTTPFound(location=location) return self.render_dict
get_keypair_names
ModalDropdown.js
/** * Created by sohobloo on 16/9/13. * Modifiedd by vitorizkiimanda on 19/01/10. */ 'use strict'; // https://github.com/sohobloo/react-native-modal-dropdown import React, { Component } from 'react'; import { StyleSheet, Dimensions, View, Text, TouchableWithoutFeedback, TouchableNativeFeedback, TouchableOpacity, TouchableHighlight, Modal, ActivityIndicator, ListView, } from 'react-native'; // import ListView from 'deprecated-react-native-listview'; import PropTypes from 'prop-types'; const TOUCHABLE_ELEMENTS = [ 'TouchableHighlight', 'TouchableOpacity', 'TouchableWithoutFeedback', 'TouchableNativeFeedback', ]; export default class
extends Component { static propTypes = { disabled: PropTypes.bool, scrollEnabled: PropTypes.bool, defaultIndex: PropTypes.number, defaultValue: PropTypes.string, options: PropTypes.array, accessible: PropTypes.bool, animated: PropTypes.bool, showsVerticalScrollIndicator: PropTypes.bool, keyboardShouldPersistTaps: PropTypes.string, style: PropTypes.oneOfType([PropTypes.number, PropTypes.object, PropTypes.array]), textStyle: PropTypes.oneOfType([PropTypes.number, PropTypes.object, PropTypes.array]), dropdownStyle: PropTypes.oneOfType([PropTypes.number, PropTypes.object, PropTypes.array]), dropdownTextStyle: PropTypes.oneOfType([PropTypes.number, PropTypes.object, PropTypes.array]), dropdownTextHighlightStyle: PropTypes.oneOfType([ PropTypes.number, PropTypes.object, PropTypes.array, ]), adjustFrame: PropTypes.func, renderRow: PropTypes.func, renderSeparator: PropTypes.func, renderButtonText: PropTypes.func, onDropdownWillShow: PropTypes.func, onDropdownWillHide: PropTypes.func, onSelect: PropTypes.func, }; static defaultProps = { disabled: false, scrollEnabled: true, defaultIndex: -1, defaultValue: 'Please select...', options: null, animated: true, showsVerticalScrollIndicator: true, keyboardShouldPersistTaps: 'never', }; constructor(props) { super(props); this._button = null; this._buttonFrame = null; this._nextValue = null; this._nextIndex = null; this.state = { accessible: !!props.accessible, loading: !props.options, showDropdown: false, buttonText: props.defaultValue, selectedIndex: props.defaultIndex, }; } componentWillReceiveProps(nextProps) { let { buttonText, selectedIndex } = this.state; const { defaultIndex, defaultValue, options } = nextProps; buttonText = this._nextValue == null ? buttonText : this._nextValue; selectedIndex = this._nextIndex == null ? selectedIndex : this._nextIndex; if (selectedIndex < 0) { selectedIndex = defaultIndex; if (selectedIndex < 0) { buttonText = defaultValue; } } this._nextValue = null; this._nextIndex = null; this.setState({ loading: !options, buttonText, selectedIndex, }); } render() { return ( <View {...this.props}> {this._renderButton()} {this._renderModal()} </View> ); } _updatePosition(callback) { if (this._button && this._button.measure) { this._button.measure((fx, fy, width, height, px, py) => { this._buttonFrame = { x: px, y: py, w: width, h: height }; callback && callback(); }); } } show() { this._updatePosition(() => { this.setState({ showDropdown: true, }); }); } hide() { this.setState({ showDropdown: false, }); } select(idx) { const { defaultValue, options, defaultIndex, renderButtonText } = this.props; let value = defaultValue; if (idx == null || !options || idx >= options.length) { idx = defaultIndex; } if (idx >= 0) { value = renderButtonText ? renderButtonText(options[idx]) : options[idx].toString(); } this._nextValue = value; this._nextIndex = idx; this.setState({ buttonText: value, selectedIndex: idx, }); } _renderButton() { const { disabled, accessible, children, textStyle } = this.props; const { buttonText } = this.state; return ( <TouchableOpacity ref={button => (this._button = button)} disabled={disabled} accessible={accessible} onPress={this._onButtonPress} > {children || ( <View style={styles.button}> <Text style={[styles.buttonText, textStyle]} numberOfLines={1}> {buttonText} </Text> </View> )} </TouchableOpacity> ); } _onButtonPress = () => { const { onDropdownWillShow } = this.props; if (!onDropdownWillShow || onDropdownWillShow() !== false) { this.show(); } }; _renderModal() { const { animated, accessible, dropdownStyle } = this.props; const { showDropdown, loading } = this.state; if (showDropdown && this._buttonFrame) { const frameStyle = this._calcPosition(); const animationType = animated ? 'fade' : 'none'; return ( <Modal animationType={animationType} visible transparent onRequestClose={this._onRequestClose} supportedOrientations={[ 'portrait', 'portrait-upside-down', 'landscape', 'landscape-left', 'landscape-right', ]} > <TouchableWithoutFeedback accessible={accessible} disabled={!showDropdown} onPress={this._onModalPress} > <View style={styles.modal}> <View style={[styles.dropdown, dropdownStyle, frameStyle]}> {loading ? this._renderLoading() : this._renderDropdown()} </View> </View> </TouchableWithoutFeedback> </Modal> ); } } _calcPosition() { const { dropdownStyle, style, adjustFrame } = this.props; const dimensions = Dimensions.get('window'); const windowWidth = dimensions.width; const windowHeight = dimensions.height; const dropdownHeight = (dropdownStyle && StyleSheet.flatten(dropdownStyle).height) || StyleSheet.flatten(styles.dropdown).height; const bottomSpace = windowHeight - this._buttonFrame.y - this._buttonFrame.h; const rightSpace = windowWidth - this._buttonFrame.x; const showInBottom = bottomSpace >= dropdownHeight || bottomSpace >= this._buttonFrame.y; const showInLeft = rightSpace >= this._buttonFrame.x; const positionStyle = { height: dropdownHeight, top: showInBottom ? this._buttonFrame.y + this._buttonFrame.h : Math.max(0, this._buttonFrame.y - dropdownHeight), }; if (showInLeft) { positionStyle.left = this._buttonFrame.x; } else { const dropdownWidth = (dropdownStyle && StyleSheet.flatten(dropdownStyle).width) || (style && StyleSheet.flatten(style).width) || -1; if (dropdownWidth !== -1) { positionStyle.width = dropdownWidth; } positionStyle.right = rightSpace - this._buttonFrame.w; } return adjustFrame ? adjustFrame(positionStyle) : positionStyle; } _onRequestClose = () => { const { onDropdownWillHide } = this.props; if (!onDropdownWillHide || onDropdownWillHide() !== false) { this.hide(); } }; _onModalPress = () => { const { onDropdownWillHide } = this.props; if (!onDropdownWillHide || onDropdownWillHide() !== false) { this.hide(); } }; _renderLoading() { return <ActivityIndicator size="small" />; } _renderDropdown() { const { scrollEnabled, renderSeparator, showsVerticalScrollIndicator, keyboardShouldPersistTaps, } = this.props; return ( <ListView scrollEnabled={scrollEnabled} style={styles.list} dataSource={this._dataSource} renderRow={this._renderRow} renderSeparator={renderSeparator && this._renderSeparator} automaticallyAdjustContentInsets={false} showsVerticalScrollIndicator={showsVerticalScrollIndicator} keyboardShouldPersistTaps={keyboardShouldPersistTaps} /> ); } get _dataSource() { const { options } = this.props; const ds = new ListView.DataSource({ rowHasChanged: (r1, r2) => r1 !== r2, }); return ds.cloneWithRows(options); } _renderRow = (rowData, sectionID, rowID, highlightRow) => { const { renderRow, dropdownTextStyle, dropdownTextHighlightStyle, accessible } = this.props; const { selectedIndex } = this.state; const key = `row_${rowID}`; const highlighted = rowID == selectedIndex; const row = !renderRow ? ( <Text style={[ styles.rowText, dropdownTextStyle, highlighted && styles.highlightedRowText, highlighted && dropdownTextHighlightStyle, ]} > {rowData} </Text> ) : ( renderRow(rowData, rowID, highlighted) ); const preservedProps = { key, accessible, onPress: () => this._onRowPress(rowData, sectionID, rowID, highlightRow), }; if (TOUCHABLE_ELEMENTS.find(name => name == row.type.displayName)) { const props = { ...row.props }; props.key = preservedProps.key; props.onPress = preservedProps.onPress; const { children } = row.props; switch (row.type.displayName) { case 'TouchableHighlight': { return <TouchableHighlight {...props}>{children}</TouchableHighlight>; } case 'TouchableOpacity': { return <TouchableOpacity {...props}>{children}</TouchableOpacity>; } case 'TouchableWithoutFeedback': { return <TouchableWithoutFeedback {...props}>{children}</TouchableWithoutFeedback>; } case 'TouchableNativeFeedback': { return <TouchableNativeFeedback {...props}>{children}</TouchableNativeFeedback>; } default: break; } } return <TouchableHighlight {...preservedProps}>{row}</TouchableHighlight>; }; _onRowPress(rowData, sectionID, rowID, highlightRow) { const { onSelect, renderButtonText, onDropdownWillHide } = this.props; if (!onSelect || onSelect(rowID, rowData) !== false) { highlightRow(sectionID, rowID); const value = (renderButtonText && renderButtonText(rowData)) || rowData.toString(); this._nextValue = value; this._nextIndex = rowID; this.setState({ buttonText: value, selectedIndex: rowID, }); } if (!onDropdownWillHide || onDropdownWillHide() !== false) { this.setState({ showDropdown: false, }); } } _renderSeparator = (sectionID, rowID, adjacentRowHighlighted) => { const key = `spr_${rowID}`; return <View style={styles.separator} key={key} />; }; } const styles = StyleSheet.create({ button: { justifyContent: 'center', }, buttonText: { fontSize: 12, }, modal: { flexGrow: 1, }, dropdown: { position: 'absolute', borderWidth: StyleSheet.hairlineWidth, borderColor: 'lightgray', borderRadius: 2, backgroundColor: 'white', justifyContent: 'center', }, loading: { alignSelf: 'center', }, list: { // flexGrow: 1, }, rowText: { paddingHorizontal: 6, paddingVertical: 10, fontSize: 11, color: 'gray', backgroundColor: 'white', textAlignVertical: 'center', }, highlightedRowText: { color: 'black', }, separator: { height: StyleSheet.hairlineWidth, backgroundColor: 'lightgray', }, });
ModalDropdown
model_v202101beta1_test_monitoring_settings.go
/* * Synthetics Monitoring API * * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) * * API version: 202101beta1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. package synthetics import ( "encoding/json" ) // V202101beta1TestMonitoringSettings struct for V202101beta1TestMonitoringSettings type V202101beta1TestMonitoringSettings struct { ActivationGracePeriod *string `json:"activationGracePeriod,omitempty"` ActivationTimeUnit *string `json:"activationTimeUnit,omitempty"` ActivationTimeWindow *string `json:"activationTimeWindow,omitempty"` ActivationTimes *string `json:"activationTimes,omitempty"` NotificationChannels *[]string `json:"notificationChannels,omitempty"` } // NewV202101beta1TestMonitoringSettings instantiates a new V202101beta1TestMonitoringSettings object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed func NewV202101beta1TestMonitoringSettings() *V202101beta1TestMonitoringSettings
// NewV202101beta1TestMonitoringSettingsWithDefaults instantiates a new V202101beta1TestMonitoringSettings object // This constructor will only assign default values to properties that have it defined, // but it doesn't guarantee that properties required by API are set func NewV202101beta1TestMonitoringSettingsWithDefaults() *V202101beta1TestMonitoringSettings { this := V202101beta1TestMonitoringSettings{} return &this } // GetActivationGracePeriod returns the ActivationGracePeriod field value if set, zero value otherwise. func (o *V202101beta1TestMonitoringSettings) GetActivationGracePeriod() string { if o == nil || o.ActivationGracePeriod == nil { var ret string return ret } return *o.ActivationGracePeriod } // GetActivationGracePeriodOk returns a tuple with the ActivationGracePeriod field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *V202101beta1TestMonitoringSettings) GetActivationGracePeriodOk() (*string, bool) { if o == nil || o.ActivationGracePeriod == nil { return nil, false } return o.ActivationGracePeriod, true } // HasActivationGracePeriod returns a boolean if a field has been set. func (o *V202101beta1TestMonitoringSettings) HasActivationGracePeriod() bool { if o != nil && o.ActivationGracePeriod != nil { return true } return false } // SetActivationGracePeriod gets a reference to the given string and assigns it to the ActivationGracePeriod field. func (o *V202101beta1TestMonitoringSettings) SetActivationGracePeriod(v string) { o.ActivationGracePeriod = &v } // GetActivationTimeUnit returns the ActivationTimeUnit field value if set, zero value otherwise. func (o *V202101beta1TestMonitoringSettings) GetActivationTimeUnit() string { if o == nil || o.ActivationTimeUnit == nil { var ret string return ret } return *o.ActivationTimeUnit } // GetActivationTimeUnitOk returns a tuple with the ActivationTimeUnit field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *V202101beta1TestMonitoringSettings) GetActivationTimeUnitOk() (*string, bool) { if o == nil || o.ActivationTimeUnit == nil { return nil, false } return o.ActivationTimeUnit, true } // HasActivationTimeUnit returns a boolean if a field has been set. func (o *V202101beta1TestMonitoringSettings) HasActivationTimeUnit() bool { if o != nil && o.ActivationTimeUnit != nil { return true } return false } // SetActivationTimeUnit gets a reference to the given string and assigns it to the ActivationTimeUnit field. func (o *V202101beta1TestMonitoringSettings) SetActivationTimeUnit(v string) { o.ActivationTimeUnit = &v } // GetActivationTimeWindow returns the ActivationTimeWindow field value if set, zero value otherwise. func (o *V202101beta1TestMonitoringSettings) GetActivationTimeWindow() string { if o == nil || o.ActivationTimeWindow == nil { var ret string return ret } return *o.ActivationTimeWindow } // GetActivationTimeWindowOk returns a tuple with the ActivationTimeWindow field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *V202101beta1TestMonitoringSettings) GetActivationTimeWindowOk() (*string, bool) { if o == nil || o.ActivationTimeWindow == nil { return nil, false } return o.ActivationTimeWindow, true } // HasActivationTimeWindow returns a boolean if a field has been set. func (o *V202101beta1TestMonitoringSettings) HasActivationTimeWindow() bool { if o != nil && o.ActivationTimeWindow != nil { return true } return false } // SetActivationTimeWindow gets a reference to the given string and assigns it to the ActivationTimeWindow field. func (o *V202101beta1TestMonitoringSettings) SetActivationTimeWindow(v string) { o.ActivationTimeWindow = &v } // GetActivationTimes returns the ActivationTimes field value if set, zero value otherwise. func (o *V202101beta1TestMonitoringSettings) GetActivationTimes() string { if o == nil || o.ActivationTimes == nil { var ret string return ret } return *o.ActivationTimes } // GetActivationTimesOk returns a tuple with the ActivationTimes field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *V202101beta1TestMonitoringSettings) GetActivationTimesOk() (*string, bool) { if o == nil || o.ActivationTimes == nil { return nil, false } return o.ActivationTimes, true } // HasActivationTimes returns a boolean if a field has been set. func (o *V202101beta1TestMonitoringSettings) HasActivationTimes() bool { if o != nil && o.ActivationTimes != nil { return true } return false } // SetActivationTimes gets a reference to the given string and assigns it to the ActivationTimes field. func (o *V202101beta1TestMonitoringSettings) SetActivationTimes(v string) { o.ActivationTimes = &v } // GetNotificationChannels returns the NotificationChannels field value if set, zero value otherwise. func (o *V202101beta1TestMonitoringSettings) GetNotificationChannels() []string { if o == nil || o.NotificationChannels == nil { var ret []string return ret } return *o.NotificationChannels } // GetNotificationChannelsOk returns a tuple with the NotificationChannels field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *V202101beta1TestMonitoringSettings) GetNotificationChannelsOk() (*[]string, bool) { if o == nil || o.NotificationChannels == nil { return nil, false } return o.NotificationChannels, true } // HasNotificationChannels returns a boolean if a field has been set. func (o *V202101beta1TestMonitoringSettings) HasNotificationChannels() bool { if o != nil && o.NotificationChannels != nil { return true } return false } // SetNotificationChannels gets a reference to the given []string and assigns it to the NotificationChannels field. func (o *V202101beta1TestMonitoringSettings) SetNotificationChannels(v []string) { o.NotificationChannels = &v } func (o V202101beta1TestMonitoringSettings) MarshalJSON() ([]byte, error) { toSerialize := map[string]interface{}{} if o.ActivationGracePeriod != nil { toSerialize["activationGracePeriod"] = o.ActivationGracePeriod } if o.ActivationTimeUnit != nil { toSerialize["activationTimeUnit"] = o.ActivationTimeUnit } if o.ActivationTimeWindow != nil { toSerialize["activationTimeWindow"] = o.ActivationTimeWindow } if o.ActivationTimes != nil { toSerialize["activationTimes"] = o.ActivationTimes } if o.NotificationChannels != nil { toSerialize["notificationChannels"] = o.NotificationChannels } return json.Marshal(toSerialize) } type NullableV202101beta1TestMonitoringSettings struct { value *V202101beta1TestMonitoringSettings isSet bool } func (v NullableV202101beta1TestMonitoringSettings) Get() *V202101beta1TestMonitoringSettings { return v.value } func (v *NullableV202101beta1TestMonitoringSettings) Set(val *V202101beta1TestMonitoringSettings) { v.value = val v.isSet = true } func (v NullableV202101beta1TestMonitoringSettings) IsSet() bool { return v.isSet } func (v *NullableV202101beta1TestMonitoringSettings) Unset() { v.value = nil v.isSet = false } func NewNullableV202101beta1TestMonitoringSettings(val *V202101beta1TestMonitoringSettings) *NullableV202101beta1TestMonitoringSettings { return &NullableV202101beta1TestMonitoringSettings{value: val, isSet: true} } func (v NullableV202101beta1TestMonitoringSettings) MarshalJSON() ([]byte, error) { return json.Marshal(v.value) } func (v *NullableV202101beta1TestMonitoringSettings) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) }
{ this := V202101beta1TestMonitoringSettings{} return &this }
scale_stroke.py
from warnings import warn import numpy as np from mizani.palettes import rescale_pal from ..doctools import document from ..exceptions import PlotnineWarning from ..utils import alias from .scale import scale_discrete, scale_continuous @document class scale_stroke_continuous(scale_continuous): """ Continuous Stroke Scale Parameters ---------- range : array_like Range ([Minimum, Maximum]) of output stroke values. Should be between 0 and 1. Default is ``(1, 6)`` {superclass_parameters}
self.palette = rescale_pal(range) scale_continuous.__init__(self, **kwargs) @document class scale_stroke_ordinal(scale_discrete): """ Discrete Stroke Scale Parameters ---------- range : array_like Range ([Minimum, Maximum]) of output stroke values. Should be between 0 and 1. Default is ``(1, 6)`` {superclass_parameters} """ _aesthetics = ['stroke'] def __init__(self, range=(1, 6), **kwargs): def palette(n): return np.linspace(range[0], range[1], n) self.palette = palette scale_discrete.__init__(self, **kwargs) @document class scale_stroke_discrete(scale_stroke_ordinal): """ Discrete Stroke Scale Parameters ---------- {superclass_parameters} """ _aesthetics = ['stroke'] def __init__(self, **kwargs): warn( "Using stroke for a ordinal variable is not advised.", PlotnineWarning ) super().__init__(self, **kwargs) alias('scale_stroke', scale_stroke_continuous)
""" _aesthetics = ['stroke'] def __init__(self, range=(1, 6), **kwargs):
selectableService.js
/** * ag-grid-community - Advanced Data Grid / Data Table supporting Javascript / React / AngularJS / Web Components * @version v21.0.1 * @link http://www.ag-grid.com/ * @license MIT */ "use strict"; var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) { var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; return c > 3 && r && Object.defineProperty(target, key, r), r; }; var __metadata = (this && this.__metadata) || function (k, v) { if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v); }; Object.defineProperty(exports, "__esModule", { value: true }); var context_1 = require("../context/context"); var gridOptionsWrapper_1 = require("../gridOptionsWrapper"); var utils_1 = require("../utils"); var SelectableService = /** @class */ (function () { function
() { } SelectableService.prototype.init = function () { this.groupSelectsChildren = this.gridOptionsWrapper.isGroupSelectsChildren(); this.isRowSelectableFunc = this.gridOptionsWrapper.getIsRowSelectableFunc(); }; SelectableService.prototype.updateSelectableAfterGrouping = function (rowNode) { if (this.isRowSelectableFunc) { var nextChildrenFunc = function (rowNode) { return rowNode.childrenAfterGroup; }; this.recurseDown(rowNode.childrenAfterGroup, nextChildrenFunc); } }; SelectableService.prototype.updateSelectableAfterFiltering = function (rowNode) { if (this.isRowSelectableFunc) { var nextChildrenFunc = function (rowNode) { return rowNode.childrenAfterFilter; }; this.recurseDown(rowNode.childrenAfterGroup, nextChildrenFunc); } }; SelectableService.prototype.recurseDown = function (children, nextChildrenFunc) { var _this = this; children.forEach(function (child) { if (!child.group) { return; } // only interested in groups if (child.hasChildren()) { _this.recurseDown(nextChildrenFunc(child), nextChildrenFunc); } var rowSelectable; if (_this.groupSelectsChildren) { // have this group selectable if at least one direct child is selectable var firstSelectable = utils_1._.find(nextChildrenFunc(child), 'selectable', true); rowSelectable = utils_1._.exists(firstSelectable); } else { // directly retrieve selectable value from user callback rowSelectable = _this.isRowSelectableFunc ? _this.isRowSelectableFunc(child) : false; } child.setRowSelectable(rowSelectable); }); }; __decorate([ context_1.Autowired('gridOptionsWrapper'), __metadata("design:type", gridOptionsWrapper_1.GridOptionsWrapper) ], SelectableService.prototype, "gridOptionsWrapper", void 0); __decorate([ context_1.PostConstruct, __metadata("design:type", Function), __metadata("design:paramtypes", []), __metadata("design:returntype", void 0) ], SelectableService.prototype, "init", null); SelectableService = __decorate([ context_1.Bean('selectableService') ], SelectableService); return SelectableService; }()); exports.SelectableService = SelectableService;
SelectableService
utils.rs
use crate::options; extern crate crossterm; extern crate termion; use crossterm::{ execute, cursor, terminal, Result }; use std::io::{stdout, Write}; pub fn hide_cursor() { if !cfg!(windows) { print!("{}", termion::cursor::Hide); } } pub fn show_cursor() { if !cfg!(windows) { print!("{}", termion::cursor::Show); } } pub fn clear() { let mut stdout = stdout(); execute!(stdout, terminal::Clear(terminal::ClearType::All)).unwrap(); print!("{}", termion::cursor::Goto(1, 1)); } pub fn center(text: &str, length: u16, cursor_y: u16) -> String { let (term_width, term_height) = termion::terminal_size().unwrap(); return format!("{}{}", termion::cursor::Goto(term_width / 2 - (length / 2), cursor_y as u16), text); } pub fn title(text: &str) -> String { return underline(bold(text).as_str()); } pub fn back_rgb(r: i32, g: i32, b: i32, text: &str) -> String { unsafe { if options::OPTIONS.color && options::OPTIONS.styling { return format!("\x1b[48;2;{};{};{}m{}\x1b[0m", r, g, b, text).to_string(); } else { return text.to_string(); } } } pub fn for_rgb(r: i32, g: i32, b: i32, text: &str) -> String { unsafe { if options::OPTIONS.color && options::OPTIONS.styling { return format!("\x1b[38;2;{};{};{}m{}\x1b[0m", r, g, b, text).to_string(); } else { return text.to_string(); } } } pub fn underline(text: &str) -> String { unsafe { if options::OPTIONS.styling { return format!("\x1b[4m{}\x1b[0m", text).to_string(); } else { return text.to_string(); } } } pub fn bold(text: &str) -> String { unsafe {
if options::OPTIONS.styling { return format!("\x1b[1m{}\x1b[0m", text).to_string(); } else { return text.to_string(); } } } pub fn italic(text: &str) -> String { unsafe { if options::OPTIONS.styling { return format!("\x1b[3m{}\x1b[0m", text).to_string(); } else { return text.to_string(); } } } pub fn strikethrough(text: &str) -> String { unsafe { if options::OPTIONS.styling { return format!("\x1b[9m{}\x1b[0m", text).to_string(); } else { return text.to_string(); } } }
add.js
'use strict' var test = require('tape') var fakeTimers = require('@sinonjs/fake-timers') var dbFactory = require('../utils/db') test('has "add" method', function (t) { t.plan(1) var db = dbFactory() var store = db.hoodieApi() t.is(typeof store.add, 'function', 'has method') }) test('adds object to db', function (t) { t.plan(5) var db = dbFactory() var store = db.hoodieApi() store.add({ foo: 'bar' }) .then(function (object) { t.is(object.foo, 'bar', 'resolves value') t.is(typeof object._id, 'string', 'resolves id') t.is(typeof object._rev, 'string', 'resolves _rev') return db.allDocs({ include_docs: true }) }) .then(function (res) { t.is(res.total_rows, 1, 'puts doc') t.is(res.rows[0].doc.foo, 'bar', 'puts value') }) }) test('adds object with id to db', function (t) { t.plan(2) var db = dbFactory() var store = db.hoodieApi() store.add({ _id: 'baz', foo: 'bar' })
}) .then(function (doc) { t.is(doc.foo, 'bar') }) }) test('fails for invalid object', function (t) { t.plan(2) var db = dbFactory() var store = db.hoodieApi() store.add() .catch(function (err) { t.ok(err instanceof Error, 'rejects error') t.is(err.status, 400, 'rejects with error 400') }) }) test('fails for existing object', function (t) { t.plan(2) var db = dbFactory() var store = db.hoodieApi() store.add({ _id: 'foo', foo: 'bar' }) .then(function () { return store.add({ _id: 'foo', foo: 'baz' }) }) .catch(function (err) { t.ok(err instanceof Error, 'rejects error') t.is(err.status, 409, 'rejects status') }) }) test('returns custom conflict error for existing object', function (t) { t.plan(2) var db = dbFactory() var store = db.hoodieApi() store.add({ _id: 'foo', foo: 'bar' }) .then(function () { return store.add({ _id: 'foo', foo: 'baz' }) }) .catch(function (err) { t.is(err.name, 'Conflict', 'custom conflict name') t.is(err.message, 'Object with id "foo" already exists', 'custom error message') }) }) test('adds multiple objects to db', function (t) { t.plan(8) var db = dbFactory() var store = db.hoodieApi() store.add({ _id: 'foo', foo: 'bar' }) .then(function () { return store.add([{ foo: 'bar' }, { foo: 'baz' }, { _id: 'foo', foo: 'baz' }]) }) .then(function (objects) { t.is(objects[0].foo, 'bar', 'resolves first value') t.ok(objects[0]._id, 'resolves first id') t.ok(objects[0]._rev, 'resolves first _rev') t.is(objects[1].foo, 'baz', 'resolves second value') t.ok(objects[1]._id, 'resolves second id') t.ok(objects[1]._rev, 'resolves second _rev') t.ok(objects[2] instanceof Error, 'resolves third w/ error') return db.allDocs() }) .then(function (res) { t.is(res.total_rows, 3, 'puts docs') }) }) test('store.add(object) makes createdAt and updatedAt timestamps', function (t) { t.plan(4) var clock = fakeTimers.install({ now: 0, toFake: ['Date'] }) var db = dbFactory() var store = db.hoodieApi() var now = require('../../lib/utils/now') var isValidDate = require('../utils/is-valid-date') store.add({ _id: 'shouldHaveTimestamps' }) .then(function (object) { t.is(object._id, 'shouldHaveTimestamps', 'resolves doc') t.ok(isValidDate(object.hoodie.createdAt), 'createdAt should be a valid date') t.is(now(), object.hoodie.createdAt, 'createdAt should be the same time as right now') t.is(object.hoodie.updatedAt, undefined, 'updatedAt should be undefined') clock.uninstall() }) }) test('store.add([objects]) makes createdAt and updatedAt timestamps', function (t) { t.plan(8) var clock = fakeTimers.install({ now: 0, toFake: ['Date'] }) var db = dbFactory() var store = db.hoodieApi() var now = require('../../lib/utils/now') var isValidDate = require('../utils/is-valid-date') store.add([{ _id: 'shouldHaveTimestamps' }, { _id: 'shouldAlsoHaveTimestamps' }]) .then(function (objects) { t.is(objects[0]._id, 'shouldHaveTimestamps', 'resolves doc') t.is(objects[1]._id, 'shouldAlsoHaveTimestamps', 'resolves doc') objects.forEach(function (object) { t.ok(isValidDate(object.hoodie.createdAt), 'createdAt should be a valid date') t.is(now(), object.hoodie.createdAt, 'createdAt should be the same time as right now') t.is(object.hoodie.updatedAt, undefined, 'updatedAt should be undefined') }) clock.uninstall() }) }) test('store.add(doc) ignores doc.hoodie properties', function (t) { t.plan(2) var db = dbFactory() var store = db.hoodieApi() var doc = { foo: 'bar', hoodie: { foo: 'bar' } } store.add(doc) .then(function (doc) { t.is(doc.hoodie.foo, undefined) }) t.is(doc.hoodie.foo, 'bar', 'does not alter passed doc') }) test('store.add(docs) ignores doc.hoodie properties', function (t) { t.plan(3) var db = dbFactory() var store = db.hoodieApi() var docs = [{ foo: 'bar', hoodie: { foo: 'bar' } }, { foo: 'baz', hoodie: { foo: 'baz' } }] store.add(docs) .then(function (docs) { t.is(docs[0].hoodie.foo, undefined) t.is(docs[1].hoodie.foo, undefined) }) t.is(docs[0].hoodie.foo, 'bar', 'does not alter passed doc') })
.then(function (object) { t.is(object._id, 'baz', 'resolves id') return db.get('baz')
identityserver.go
// Copyright © 2019 The Things Network Foundation, The Things Industries B.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package identityserver import ( "context" "fmt" pbtypes "github.com/gogo/protobuf/types" "github.com/grpc-ecosystem/grpc-gateway/runtime" "github.com/jinzhu/gorm" _ "github.com/jinzhu/gorm/dialects/postgres" // Postgres database driver. "go.thethings.network/lorawan-stack/v3/pkg/account" account_store "go.thethings.network/lorawan-stack/v3/pkg/account/store" "go.thethings.network/lorawan-stack/v3/pkg/auth/rights" "go.thethings.network/lorawan-stack/v3/pkg/cluster" "go.thethings.network/lorawan-stack/v3/pkg/component" "go.thethings.network/lorawan-stack/v3/pkg/email" "go.thethings.network/lorawan-stack/v3/pkg/errors" "go.thethings.network/lorawan-stack/v3/pkg/identityserver/store" "go.thethings.network/lorawan-stack/v3/pkg/interop" "go.thethings.network/lorawan-stack/v3/pkg/log" "go.thethings.network/lorawan-stack/v3/pkg/oauth" "go.thethings.network/lorawan-stack/v3/pkg/redis" "go.thethings.network/lorawan-stack/v3/pkg/rpcmiddleware/hooks" "go.thethings.network/lorawan-stack/v3/pkg/rpcmiddleware/rpclog" "go.thethings.network/lorawan-stack/v3/pkg/ttnpb" "go.thethings.network/lorawan-stack/v3/pkg/webui" "google.golang.org/grpc" ) // IdentityServer implements the Identity Server component. // // The Identity Server exposes the Registry and Access services for Applications, // OAuth clients, Gateways, Organizations and Users. type IdentityServer struct { *component.Component ctx context.Context config *Config db *gorm.DB redis *redis.Client emailTemplates *email.TemplateRegistry account account.Server oauth oauth.Server } // Context returns the context of the Identity Server. func (is *IdentityServer) Context() context.Context { return is.ctx } // SetRedisCache configures the given redis instance for caching. func (is *IdentityServer) SetRedisCache(redis *redis.Client) { is.redis = redis } type ctxKeyType struct{} var ctxKey ctxKeyType func (is *IdentityServer) configFromContext(ctx context.Context) *Config { if config, ok := ctx.Value(ctxKey).(*Config); ok { return config } return is.config } // GenerateCSPString returns a Content-Security-Policy header value // for OAuth and Account app template. func GenerateCSPString(config *oauth.Config, nonce string) string { cspMap := webui.CleanCSP(map[string][]string{ "connect-src": { "'self'", config.UI.StackConfig.IS.BaseURL, config.UI.SentryDSN, "gravatar.com", "www.gravatar.com", }, "style-src": { "'self'", config.UI.AssetsBaseURL, config.UI.BrandingBaseURL, }, "script-src": { "'self'", config.UI.AssetsBaseURL, config.UI.BrandingBaseURL, "'unsafe-eval'", "'strict-dynamic'", fmt.Sprintf("'nonce-%s'", nonce), }, "base-uri": { "'self'", }, "frame-ancestors": { "'none'", }, }) return webui.GenerateCSPString(cspMap) } type accountAppStore struct { db *gorm.DB store.UserStore store.LoginTokenStore store.UserSessionStore } // WithSoftDeleted implements account_store.Interface. func (as *accountAppStore) WithSoftDeleted(ctx context.Context, onlyDeleted bool) context.Context { return store.WithSoftDeleted(ctx, onlyDeleted) } // Transact implements account_store.Interface. func (as *accountAppStore) Transact(ctx context.Context, f func(context.Context, account_store.Interface) error) error { return store.Transact(ctx, as.db, func(db *gorm.DB) error { return f(ctx, createAccountAppStore(db)) }) } func createAccountAppStore(db *gorm.DB) account_store.Interface {
var errDBNeedsMigration = errors.Define("db_needs_migration", "the database needs to be migrated") // New returns new *IdentityServer. func New(c *component.Component, config *Config) (is *IdentityServer, err error) { is = &IdentityServer{ Component: c, ctx: log.NewContextWithField(c.Context(), "namespace", "identityserver"), config: config, } is.db, err = store.Open(is.Context(), is.config.DatabaseURI) if err != nil { return nil, err } if c.LogDebug() { is.db = is.db.Debug() } if err = store.Check(is.db); err != nil { return nil, errDBNeedsMigration.WithCause(err) } go func() { <-is.Context().Done() is.db.Close() }() is.emailTemplates, err = is.initEmailTemplates(is.Context()) if err != nil { return nil, err } if is.config.DevEUIBlock.Enabled { err := store.Transact(is.Context(), is.db, func(db *gorm.DB) error { err = store.GetEUIStore(db).CreateEUIBlock(is.Context(), "dev_eui", is.config.DevEUIBlock.Prefix, is.config.DevEUIBlock.InitCounter) if err != nil { return err } return nil }) if err != nil { return nil, err } } is.config.OAuth.CSRFAuthKey = is.GetBaseConfig(is.Context()).HTTP.Cookie.HashKey is.config.OAuth.UI.FrontendConfig.EnableUserRegistration = is.config.UserRegistration.Enabled is.oauth, err = oauth.NewServer(c, struct { store.UserStore store.UserSessionStore store.ClientStore store.OAuthStore }{ UserStore: store.GetUserStore(is.db), UserSessionStore: store.GetUserSessionStore(is.db), ClientStore: store.GetClientStore(is.db), OAuthStore: store.GetOAuthStore(is.db), }, is.config.OAuth, GenerateCSPString) is.account, err = account.NewServer(c, createAccountAppStore(is.db), is.config.OAuth, GenerateCSPString) if err != nil { return nil, err } c.AddContextFiller(func(ctx context.Context) context.Context { ctx = is.withRequestAccessCache(ctx) ctx = rights.NewContextWithFetcher(ctx, is) ctx = rights.NewContextWithCache(ctx) return ctx }) for _, hook := range []struct { name string middleware hooks.UnaryHandlerMiddleware }{ {rpclog.NamespaceHook, rpclog.UnaryNamespaceHook("identityserver")}, {cluster.HookName, c.ClusterAuthUnaryHook()}, } { hooks.RegisterUnaryHook("/ttn.lorawan.v3.Is", hook.name, hook.middleware) hooks.RegisterUnaryHook("/ttn.lorawan.v3.ApplicationRegistry", hook.name, hook.middleware) hooks.RegisterUnaryHook("/ttn.lorawan.v3.ApplicationAccess", hook.name, hook.middleware) hooks.RegisterUnaryHook("/ttn.lorawan.v3.ClientRegistry", hook.name, hook.middleware) hooks.RegisterUnaryHook("/ttn.lorawan.v3.ClientAccess", hook.name, hook.middleware) hooks.RegisterUnaryHook("/ttn.lorawan.v3.EndDeviceRegistry", hook.name, hook.middleware) hooks.RegisterUnaryHook("/ttn.lorawan.v3.GatewayRegistry", hook.name, hook.middleware) hooks.RegisterUnaryHook("/ttn.lorawan.v3.GatewayAccess", hook.name, hook.middleware) hooks.RegisterUnaryHook("/ttn.lorawan.v3.OrganizationRegistry", hook.name, hook.middleware) hooks.RegisterUnaryHook("/ttn.lorawan.v3.OrganizationAccess", hook.name, hook.middleware) hooks.RegisterUnaryHook("/ttn.lorawan.v3.UserRegistry", hook.name, hook.middleware) hooks.RegisterUnaryHook("/ttn.lorawan.v3.UserAccess", hook.name, hook.middleware) hooks.RegisterUnaryHook("/ttn.lorawan.v3.UserSessionRegistry", hook.name, hook.middleware) } hooks.RegisterUnaryHook("/ttn.lorawan.v3.EntityAccess", rpclog.NamespaceHook, rpclog.UnaryNamespaceHook("identityserver")) hooks.RegisterUnaryHook("/ttn.lorawan.v3.EntityAccess", cluster.HookName, c.ClusterAuthUnaryHook()) hooks.RegisterUnaryHook("/ttn.lorawan.v3.OAuthAuthorizationRegistry", rpclog.NamespaceHook, rpclog.UnaryNamespaceHook("identityserver")) c.RegisterGRPC(is) c.RegisterWeb(is.oauth) c.RegisterWeb(is.account) c.RegisterInterop(is) return is, nil } func (is *IdentityServer) withDatabase(ctx context.Context, f func(*gorm.DB) error) error { return store.Transact(ctx, is.db, f) } // RegisterServices registers services provided by is at s. func (is *IdentityServer) RegisterServices(s *grpc.Server) { ttnpb.RegisterIsServer(s, is) ttnpb.RegisterEntityAccessServer(s, &entityAccess{IdentityServer: is}) ttnpb.RegisterApplicationRegistryServer(s, &applicationRegistry{IdentityServer: is}) ttnpb.RegisterApplicationAccessServer(s, &applicationAccess{IdentityServer: is}) ttnpb.RegisterClientRegistryServer(s, &clientRegistry{IdentityServer: is}) ttnpb.RegisterClientAccessServer(s, &clientAccess{IdentityServer: is}) ttnpb.RegisterEndDeviceRegistryServer(s, &endDeviceRegistry{IdentityServer: is}) ttnpb.RegisterGatewayRegistryServer(s, &gatewayRegistry{IdentityServer: is}) ttnpb.RegisterGatewayAccessServer(s, &gatewayAccess{IdentityServer: is}) ttnpb.RegisterOrganizationRegistryServer(s, &organizationRegistry{IdentityServer: is}) ttnpb.RegisterOrganizationAccessServer(s, &organizationAccess{IdentityServer: is}) ttnpb.RegisterUserRegistryServer(s, &userRegistry{IdentityServer: is}) ttnpb.RegisterUserAccessServer(s, &userAccess{IdentityServer: is}) ttnpb.RegisterUserSessionRegistryServer(s, &userSessionRegistry{IdentityServer: is}) ttnpb.RegisterUserInvitationRegistryServer(s, &invitationRegistry{IdentityServer: is}) ttnpb.RegisterEntityRegistrySearchServer(s, &registrySearch{IdentityServer: is}) ttnpb.RegisterEndDeviceRegistrySearchServer(s, &registrySearch{IdentityServer: is}) ttnpb.RegisterOAuthAuthorizationRegistryServer(s, &oauthRegistry{IdentityServer: is}) ttnpb.RegisterContactInfoRegistryServer(s, &contactInfoRegistry{IdentityServer: is}) } // RegisterHandlers registers gRPC handlers. func (is *IdentityServer) RegisterHandlers(s *runtime.ServeMux, conn *grpc.ClientConn) { ttnpb.RegisterIsHandler(is.Context(), s, conn) ttnpb.RegisterEntityAccessHandler(is.Context(), s, conn) ttnpb.RegisterApplicationRegistryHandler(is.Context(), s, conn) ttnpb.RegisterApplicationAccessHandler(is.Context(), s, conn) ttnpb.RegisterClientRegistryHandler(is.Context(), s, conn) ttnpb.RegisterClientAccessHandler(is.Context(), s, conn) ttnpb.RegisterEndDeviceRegistryHandler(is.Context(), s, conn) ttnpb.RegisterGatewayRegistryHandler(is.Context(), s, conn) ttnpb.RegisterGatewayAccessHandler(is.Context(), s, conn) ttnpb.RegisterOrganizationRegistryHandler(is.Context(), s, conn) ttnpb.RegisterOrganizationAccessHandler(is.Context(), s, conn) ttnpb.RegisterUserRegistryHandler(is.Context(), s, conn) ttnpb.RegisterUserAccessHandler(is.Context(), s, conn) ttnpb.RegisterUserSessionRegistryHandler(is.Context(), s, conn) ttnpb.RegisterUserInvitationRegistryHandler(is.Context(), s, conn) ttnpb.RegisterEntityRegistrySearchHandler(is.Context(), s, conn) ttnpb.RegisterEndDeviceRegistrySearchHandler(is.Context(), s, conn) ttnpb.RegisterOAuthAuthorizationRegistryHandler(is.Context(), s, conn) ttnpb.RegisterContactInfoRegistryHandler(is.Context(), s, conn) } // RegisterInterop registers the LoRaWAN Backend Interfaces interoperability services. func (is *IdentityServer) RegisterInterop(srv *interop.Server) { srv.RegisterIS(&interopServer{IdentityServer: is}) } // Roles returns the roles that the Identity Server fulfills. func (is *IdentityServer) Roles() []ttnpb.ClusterRole { return []ttnpb.ClusterRole{ttnpb.ClusterRole_ACCESS, ttnpb.ClusterRole_ENTITY_REGISTRY} } func (is *IdentityServer) getMembershipStore(ctx context.Context, db *gorm.DB) store.MembershipStore { s := store.GetMembershipStore(db) if is.redis != nil { if membershipTTL := is.configFromContext(ctx).AuthCache.MembershipTTL; membershipTTL > 0 { s = store.GetMembershipCache(s, is.redis, membershipTTL) } } return s } var softDeleteFieldMask = &pbtypes.FieldMask{Paths: []string{"deleted_at"}} var errRestoreWindowExpired = errors.DefineFailedPrecondition("restore_window_expired", "this entity can no longer be restored")
return &accountAppStore{ db: db, UserStore: store.GetUserStore(db), LoginTokenStore: store.GetLoginTokenStore(db), UserSessionStore: store.GetUserSessionStore(db), } }
lib.rs
//! QUIC transport protocol support for Tokio //! //! [QUIC](https://en.wikipedia.org/wiki/QUIC) is a modern transport protocol addressing //! shortcomings of TCP, such as head-of-line blocking, poor security, slow handshakes, and //! inefficient congestion control. This crate provides a portable userspace implementation. It //! builds on top of quinn-proto, which implements protocol logic independent of any particular //! runtime. //! //! The entry point of this crate is the [`Endpoint`](generic/struct.Endpoint.html). //! #![cfg_attr( feature = "rustls", doc = "```no_run # use futures::TryFutureExt; let mut builder = quinn::Endpoint::builder(); // ... configure builder ... // Ensure you're inside a tokio runtime context let (endpoint, _) = builder.bind(&\"[::]:0\".parse().unwrap()).unwrap(); // ... use endpoint ... ```" )] //! # About QUIC //! //! A QUIC connection is an association between two endpoints. The endpoint which initiates the //! connection is termed the client, and the endpoint which accepts it is termed the server. A //! single endpoint may function as both client and server for different connections, for example //! in a peer-to-peer application. To communicate application data, each endpoint may open streams //! up to a limit dictated by its peer. Typically, that limit is increased as old streams are //! finished. //! //! Streams may be unidirectional or bidirectional, and are cheap to create and disposable. For //! example, a traditionally datagram-oriented application could use a new stream for every //! message it wants to send, no longer needing to worry about MTUs. Bidirectional streams behave //! much like a traditional TCP connection, and are useful for sending messages that have an //! immediate response, such as an HTTP request. Stream data is delivered reliably, and there is no //! ordering enforced between data on different streams. //! //! By avoiding head-of-line blocking and providing unified congestion control across all streams //! of a connection, QUIC is able to provide higher throughput and lower latency than one or //! multiple TCP connections between the same two hosts, while providing more useful behavior than //! raw UDP sockets. //! //! Quinn also exposes unreliable datagrams, which are a low-level primitive preferred when //! automatic fragmentation and retransmission of certain data is not desired. //! //! QUIC uses encryption and identity verification built directly on TLS 1.3. Just as with a TLS //! server, it is useful for a QUIC server to be identified by a certificate signed by a trusted //! authority. If this is infeasible--for example, if servers are short-lived or not associated //! with a domain name--then as with TLS, self-signed certificates can be used to provide //! encryption alone. #![warn(missing_docs)] mod broadcast; mod builders; mod connection; mod endpoint; mod mutex; mod platform; mod recv_stream; mod send_stream; pub use proto::{ crypto, ApplicationClose, Certificate, CertificateChain, Chunk, ConnectError, ConnectionClose, ConnectionError, ParseError, PrivateKey, StreamId, Transmit, TransportConfig, VarInt, }; pub use crate::builders::EndpointError; pub use crate::connection::{SendDatagramError, ZeroRttAccepted}; pub use crate::recv_stream::{ReadError, ReadExactError, ReadToEndError}; pub use crate::send_stream::{StoppedError, WriteError}; /// Types that are generic over the crypto protocol implementation pub mod generic { pub use crate::builders::{ClientConfigBuilder, EndpointBuilder, ServerConfigBuilder}; pub use crate::connection::{ Connecting, Connection, Datagrams, IncomingBiStreams, IncomingUniStreams, NewConnection, OpenBi, OpenUni, }; pub use crate::endpoint::{Endpoint, Incoming}; pub use crate::recv_stream::{Read, ReadChunk, ReadChunks, ReadExact, ReadToEnd, RecvStream}; pub use crate::send_stream::SendStream; pub use proto::generic::{ClientConfig, ServerConfig}; } #[cfg(feature = "rustls")] mod rustls_impls { use crate::generic; use proto::crypto::rustls::TlsSession; /// A `ClientConfig` using rustls for the cryptography protocol pub type ClientConfig = generic::ClientConfig<TlsSession>; /// A `ServerConfig` using rustls for the cryptography protocol pub type ServerConfig = generic::ServerConfig<TlsSession>; /// A `ClientConfigBuilder` using rustls for the cryptography protocol pub type ClientConfigBuilder = generic::ClientConfigBuilder<TlsSession>; /// An `EndpointBuilder` using rustls for the cryptography protocol pub type EndpointBuilder = generic::EndpointBuilder<TlsSession>; /// A `ServerConfigBuilder` using rustls for the cryptography protocol pub type ServerConfigBuilder = generic::ServerConfigBuilder<TlsSession>; /// A `Connecting` using rustls for the cryptography protocol pub type Connecting = generic::Connecting<TlsSession>; /// A `Connection` using rustls for the cryptography protocol pub type Connection = generic::Connection<TlsSession>; /// A `Datagrams` using rustls for the cryptography protocol pub type Datagrams = generic::Datagrams<TlsSession>; /// An `IncomingBiStreams` using rustls for the cryptography protocol pub type IncomingBiStreams = generic::IncomingBiStreams<TlsSession>; /// An `IncomingUniStreams` using rustls for the cryptography protocol pub type IncomingUniStreams = generic::IncomingUniStreams<TlsSession>; /// A `NewConnection` using rustls for the cryptography protocol pub type NewConnection = generic::NewConnection<TlsSession>; /// An `OpenBi` using rustls for the cryptography protocol pub type OpenBi = generic::OpenBi<TlsSession>; /// An `OpenUni` using rustls for the cryptography protocol pub type OpenUni = generic::OpenUni<TlsSession>; /// An `Endpoint` using rustls for the cryptography protocol pub type Endpoint = generic::Endpoint<TlsSession>; /// An `Incoming` using rustls for the cryptography protocol pub type Incoming = generic::Incoming<TlsSession>; /// A `Read` using rustls for the cryptography protocol pub type Read<'a> = generic::Read<'a, TlsSession>; /// A `ReadExact` using rustls for the cryptography protocol pub type ReadExact<'a> = generic::ReadExact<'a, TlsSession>; /// A `ReadToEnd` using rustls for the cryptography protocol pub type ReadToEnd = generic::ReadToEnd<TlsSession>; /// A `RecvStream` using rustls for the cryptography protocol pub type RecvStream = generic::RecvStream<TlsSession>; /// A `SendStream` using rustls for the cryptography protocol pub type SendStream = generic::SendStream<TlsSession>; } #[cfg(feature = "rustls")] pub use rustls_impls::*; #[cfg(test)] mod tests; #[derive(Debug)] enum ConnectionEvent { Close { error_code: VarInt, reason: bytes::Bytes, }, Proto(proto::ConnectionEvent), } #[derive(Debug)] enum
{ Proto(proto::EndpointEvent), Transmit(proto::Transmit), } /// Maximum number of send/recv calls to make before moving on to other processing /// /// This helps ensure we don't starve anything when the CPU is slower than the link. Value selected /// more or less arbitrarily. const IO_LOOP_BOUND: usize = 10;
EndpointEvent
decode.go
package client import ( "encoding/json" "errors" "fmt" tmjson "github.com/mydexchain/tendermint/libs/json" types "github.com/mydexchain/tendermint/rpc/jsonrpc/types" ) func unmarshalResponseBytes( responseBytes []byte, expectedID types.JSONRPCIntID, result interface{}, ) (interface{}, error) { // Read response. If rpc/core/types is imported, the result will unmarshal // into the correct type. response := &types.RPCResponse{} if err := json.Unmarshal(responseBytes, response); err != nil { return nil, fmt.Errorf("error unmarshalling: %w", err) } if response.Error != nil { return nil, response.Error } if err := validateAndVerifyID(response, expectedID); err != nil { return nil, fmt.Errorf("wrong ID: %w", err) } // Unmarshal the RawMessage into the result. if err := tmjson.Unmarshal(response.Result, result); err != nil { return nil, fmt.Errorf("error unmarshalling result: %w", err) } return result, nil } func unmarshalResponseBytesArray( responseBytes []byte, expectedIDs []types.JSONRPCIntID, results []interface{}, ) ([]interface{}, error)
func validateResponseIDs(ids, expectedIDs []types.JSONRPCIntID) error { m := make(map[types.JSONRPCIntID]bool, len(expectedIDs)) for _, expectedID := range expectedIDs { m[expectedID] = true } for i, id := range ids { if m[id] { delete(m, id) } else { return fmt.Errorf("unsolicited ID #%d: %v", i, id) } } return nil } // From the JSON-RPC 2.0 spec: // id: It MUST be the same as the value of the id member in the Request Object. func validateAndVerifyID(res *types.RPCResponse, expectedID types.JSONRPCIntID) error { if err := validateResponseID(res.ID); err != nil { return err } if expectedID != res.ID.(types.JSONRPCIntID) { // validateResponseID ensured res.ID has the right type return fmt.Errorf("response ID (%d) does not match request ID (%d)", res.ID, expectedID) } return nil } func validateResponseID(id interface{}) error { if id == nil { return errors.New("no ID") } _, ok := id.(types.JSONRPCIntID) if !ok { return fmt.Errorf("expected JSONRPCIntID, but got: %T", id) } return nil }
{ var ( responses []types.RPCResponse ) if err := json.Unmarshal(responseBytes, &responses); err != nil { return nil, fmt.Errorf("error unmarshalling: %w", err) } // No response error checking here as there may be a mixture of successful // and unsuccessful responses. if len(results) != len(responses) { return nil, fmt.Errorf( "expected %d result objects into which to inject responses, but got %d", len(responses), len(results), ) } // Intersect IDs from responses with expectedIDs. ids := make([]types.JSONRPCIntID, len(responses)) var ok bool for i, resp := range responses { ids[i], ok = resp.ID.(types.JSONRPCIntID) if !ok { return nil, fmt.Errorf("expected JSONRPCIntID, got %T", resp.ID) } } if err := validateResponseIDs(ids, expectedIDs); err != nil { return nil, fmt.Errorf("wrong IDs: %w", err) } for i := 0; i < len(responses); i++ { if err := tmjson.Unmarshal(responses[i].Result, results[i]); err != nil { return nil, fmt.Errorf("error unmarshalling #%d result: %w", i, err) } } return results, nil }
test_ip_manager.py
from __future__ import unicode_literals from __future__ import print_function from __future__ import division from __future__ import absolute_import from conftest import Mock import responses class TestIP(object): @responses.activate def test_get_ip(self, manager): data = Mock.mock_get('ip_address/10.1.0.101') ip_addr = manager.get_ip('10.1.0.101') assert type(ip_addr).__name__ == 'IPAddress' assert ip_addr.address == '10.1.0.101' assert ip_addr.ptr_record == 'a.ptr.record' @responses.activate
for ip_addr in ip_addrs: assert type(ip_addr).__name__ == 'IPAddress' @responses.activate def test_modify_ip_oop(self, manager): # get ip data = Mock.mock_get('ip_address/10.1.0.101') ip_addr = manager.get_ip('10.1.0.101') # put ip data = Mock.mock_put('ip_address/10.1.0.101') ip_addr.ptr_record = 'my.ptr.record' ip_addr.save() assert ip_addr.ptr_record == 'my.ptr.record' @responses.activate def test_modify_ip(self, manager): data = Mock.mock_put('ip_address/10.1.0.101') ip_addr = manager.modify_ip('10.1.0.101', ptr_record='my.ptr.record') assert ip_addr.ptr_record == 'my.ptr.record' @responses.activate def test_modify_ip(self, manager): data = Mock.mock_put('ip_address/10.1.0.101') ip_addr = manager.modify_ip('10.1.0.101', ptr_record='my.ptr.record') assert ip_addr.ptr_record == 'my.ptr.record' @responses.activate def test_ip_delete(self, manager): Mock.mock_delete('ip_address/10.1.0.101') res = manager.release_ip('10.1.0.101') assert res == {}
def test_get_ips(self, manager): data = Mock.mock_get('ip_address') ip_addrs = manager.get_ips()
app.7678959f9b17a348c1c8.js
webpackJsonp([1],{"36b/":function(t,e){},"43Mc":function(t,e){},"811s":function(t,e){},"N+zL":function(t,e,i){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var a={render:function(){var t=this.$createElement;return(this._self._c||t)("div",{class:this.slideClass},[this._t("default")],2)},staticRenderFns:[]},s=i("VU/8")({name:"swiper-slide",data:function(){return{slideClass:"swiper-slide"}},ready:function(){this.update()},mounted:function(){this.update(),this.$parent.options.slideClass&&(this.slideClass=this.$parent.options.slideClass)},updated:function(){this.update()},attached:function(){this.update()},methods:{update:function(){this.$parent&&this.$parent.swiper&&this.$parent.swiper.update&&(this.$parent.swiper.update(!0),this.$parent.options.loop&&this.$parent.swiper.reLoop())}}},a,!1,null,null,null);e.default=s.exports},NHnr:function(t,e,i){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var a=i("7+uW"),s={render:function(){var t=this.$createElement,e=this._self._c||t;return e("div",{attrs:{id:"app"}},[e("keep-alive",[e("router-view")],1)],1)},staticRenderFns:[]};var n=i("VU/8")({name:"App"},s,!1,function(t){i("qfNC")},null,null).exports,c=i("/ocq"),l={render:function(){var t=this,e=t.$createElement,i=t._self._c||e;return i("div",[i("el-menu",[i("a",{staticClass:"title",attrs:{href:"/"}},[i("el-menu-item",{attrs:{index:"1"}},[i("i",{staticClass:"iconfont iconblogger"}),t._v(" "),i("span",{attrs:{slot:"title"},slot:"title"},[t._v("关于我")])])],1),t._v(" "),i("a",{staticClass:"title",attrs:{href:"https://github.com/NCR7777",target:"_blank"}},[i("el-menu-item",{attrs:{index:"2"}},[i("i",{staticClass:"iconfont icongithub"}),t._v(" "),i("span",{attrs:{slot:"title"},slot:"title"},[t._v("Github")])])],1),t._v(" "),i("a",{staticClass:"title",attrs:{href:"http://www.ncrlhym.cn",target:"_blank"}},[i("el-menu-item",{attrs:{index:"3"}},[i("i",{staticClass:"iconfont iconicon_Notepad"}),t._v(" "),i("span",{staticClass:"title",attrs:{slot:"title"},slot:"title"},[t._v("我的博客")])])],1),t._v(" "),i("a",{staticClass:"title",attrs:{href:"mailto: [email protected]"}},[i("el-menu-item",{attrs:{index:"4"}},[i("i",{staticClass:"iconfont iconicon_Email"}),t._v(" "),i("span",{staticClass:"title",attrs:{slot:"title"},slot:"title"},[t._v("联系我")])])],1)])],1)},staticRenderFns:[]};var o={name:"IndexHeaderMobile",data:function(){return{showHeader:!1,showMenu:!1,clickShowMenu:!1,num:0,menuIconColorClass:"white",opacityStyle:{opacity:0}}},components:{MenuMobile:i("VU/8")({name:"MenuMobile"},l,!1,function(t){i("zZKr")},"data-v-5a8f1d62",null).exports},methods:{handleShowMenu:function(){this.showMenu||(this.num=1),this.showMenu=!this.showMenu,this.menuIconColorClass=this.clickShowMenu&&this.showMenu?"blue":"white"},handleCloseMenu:function(){1===this.num?this.clickShowMenu=!0:(this.showMenu=!1,this.clickShowMenu=!1),this.num=0,this.menuIconColorClass=this.clickShowMenu&&this.showMenu?"blue":"white"},handleScroll:function(){var t=document.documentElement.scrollTop,e=0;t>100?(this.showHeader=!0,e=e<1?(t-100)/200:1):this.showHeader=!1,this.opacityStyle={opacity:e}}},directives:{clickOutSide:{bind:function(t,e,i){t._vueClickOutSide_=function(i){if(t.contains(i.target))return!1;e.expression&&e.value(i)},document.addEventListener("click",t._vueClickOutSide_)},unbind:function(t,e){document.removeEventListener("click",t._vueClickOutSide_),delete t._vueClickOutSide_}}},activated:function(){this.showHeader=!1,window.addEventListener("scroll",this.handleScroll)},deactivated:function(){window.removeEventListener("scroll",this.handleScroll)}},h={render:function(){var t=this,e=t.$createElement,i=t._self._c||e;return i("div",{staticClass:"hidden-sm-and-up top"},[i("div",{directives:[{name:"show",rawName:"v-show",value:t.showHeader,expression:"showHeader"}],staticClass:"header",style:t.opacityStyle},[t._m(0),t._v(" "),i("i",{class:["el-icon-s-operation","menu",t.menuIconColorClass],on:{click:t.handleShowMenu}})]),t._v(" "),i("div",{staticClass:"menu-list",style:t.opacityStyle},[i("transition",{attrs:{"enter-active-class":"animated bounceInDown","leave-active-class":"animated bounceOutUp"}},[i("menu-mobile",{directives:[{name:"show",rawName:"v-show",value:t.clickShowMenu&&t.showMenu,expression:"clickShowMenu && showMenu"},{name:"click-out-side",rawName:"v-click-out-side",value:t.handleCloseMenu,expression:"handleCloseMenu"}]})],1)],1)])},staticRenderFns:[function(){var t=this.$createElement,e=this._self._c||t;return e("span",{staticClass:"title"},[this._v("那纯瑞 | "),e("span",{staticClass:"small"},[this._v("西安电子科技大学")])])}]};var r={name:"IndexHeaderPc",data:function(){return{screenHeight:document.documentElement.clientHeight,headerBg:"",headerHeight:"70px",timer:!1,menuList:[{desc:"关于我",url:""},{desc:"我的Github",url:"https://github.com/NCR7777",target:"_blank"},{desc:"我的博客",url:"http://www.ncrlhym.cn",target:"_blank"},{desc:"联系我",url:"mailto: [email protected]"}]}},methods:{handleScroll:function(){this.screenHeight=document.documentElement.clientHeight,document.documentElement.scrollTop>=this.screenHeight-50?(this.headerBg="#3f424b",this.headerHeight="50px"):(this.headerBg="",this.headerHeight="70px")}},activated:function(){window.addEventListener("scroll",this.handleScroll)},deactivated:function(){window.removeEventListener("scroll",this.handleScroll)}},d={render:function(){var t=this,e=t.$createElement,i=t._self._c||e;return i("div",{staticClass:"hidden-xs-only header",style:{background:t.headerBg,lineHeight:t.headerHeight}},[i("el-row",[i("el-col",{attrs:{sm:1,md:3,lg:4,xl:4}},[t._v(" ")]),t._v(" "),i("el-col",{attrs:{sm:22,md:18,lg:16,xl:16}},[i("span",{staticClass:"title"},[i("span",{staticClass:"name"},[i("b",[t._v("那纯瑞")])]),t._v(" | 西安电子科技大学")]),t._v(" "),i("div",{staticClass:"menu"},t._l(t.menuList,function(e,a){return i("span",{key:a},[i("a",{staticClass:"menu-item",attrs:{href:e.url,target:e.target}},[t._v(t._s(e.desc))])])}),0)])],1)],1)},staticRenderFns:[]};var v={name:"IndexInfoMobile",data:function(){return{swiperOption:{pagination:".swiper-pagination",autoplay:5e3,autoplayDisableOnInteraction:!1,mousewheelControl:!0,observeParents:!0,loop:!0},myName:"那纯瑞",screenHeight:document.documentElement.clientHeight-50,timer:!1,photoClass:"about-me-new",aboutMe:[{myInfo:"联系方式",children:[{icon:"iconicon_Call",desc:'电话 <a href="tel: 18629488674">186-294-88674</a> | <a href="tel: 18845155191">188-451-55191</a>'},{icon:"iconicon_Email",desc:'邮箱 <a href="mailto: [email protected]">[email protected]</a>'}]},{myInfo:"个人信息",children:[{icon:"iconicon_User",desc:"那纯瑞 | 男 | 1999年1月19日"},{icon:"iconicon_Read",desc:"西安电子科技大学 | 电子商务 | 本科应届"},{icon:"iconicon_Folder",desc:'期望职位:<span style="color: red">前端开发/技术支持(全国)</span>'},{icon:"icongithub",desc:'我的Github:<a href="https://github.com/NCR7777" target="_blank">https://github.com/NCR7777</a>'},{icon:"iconblogger",desc:'我的博客:<a href="http://www.ncrlhym.cn" target="_blank">http://www.ncrlhym.cn</a>'}]},{myInfo:"技能掌握",children:[{iconSvg:[{iconName:'<svg class="icon" aria-hidden="true"> <use xlink:href="#iconVue"></use></svg>',skillDesc:"Vue"},{iconName:'<svg class="icon" aria-hidden="true"> <use xlink:href="#iconHTML"></use></svg>',skillDesc:"HTML5"},{iconName:'<svg class="icon" aria-hidden="true"> <use xlink:href="#iconCSS"></use></svg>',skillDesc:"CSS3"},{iconName:'<svg class="icon" aria-hidden="true"> <use xlink:href="#iconJavaScript"></use></svg>',skillDesc:"JavaScript"},{iconName:'<img src="/static/images/es6.jpg" alt="ES6" style="width: .8rem">',skillDesc:"ES6"},{iconName:'<svg class="icon" aria-hidden="true"> <use xlink:href="#icongit"></use></svg>',skillDesc:"Git"},{iconName:'<svg class="icon" aria-hidden="true"> <use xlink:href="#icongitlab"></use></svg>',skillDesc:"GitLab"},{iconName:'<svg class="icon" aria-hidden="true"> <use xlink:href="#iconjQuery"></use></svg>',skillDesc:"jQuery"},{iconName:'<svg class="icon" aria-hidden="true"> <use xlink:href="#iconMySQL"></use></svg>',skillDesc:"MySQL"},{iconName:'<svg class="icon" aria-hidden="true"> <use xlink:href="#iconNodejs"></use></svg>',skillDesc:"Nodejs"},{iconName:'<svg class="icon" aria-hidden="true"> <use xlink:href="#iconBootstrap"></use></svg>',skillDesc:"Bootstrap"},{iconName:'<svg class="icon" aria-hidden="true"> <use xlink:href="#iconAdobe-Photoshop"></use></svg>',skillDesc:"PhotoShop"},{iconName:'<svg class="icon" aria-hidden="true"> <use xlink:href="#iconwebstorm"></use></svg>',skillDesc:"WebStorm"},{iconName:'<svg class="icon" aria-hidden="true"> <use xlink:href="#iconAdobe-Dreamweaver"></use></svg>',skillDesc:"Dreamweaver"},{iconName:'<svg class="icon" aria-hidden="true"> <use xlink:href="#iconMicrosoft-Word"></use></svg>',skillDesc:"Word"},{iconName:'<svg class="icon" aria-hidden="true"> <use xlink:href="#iconMicrosoft-Powerpoint"></use></svg>',skillDesc:"Powerpoint"},{iconName:'<svg class="icon" aria-hidden="true"> <use xlink:href="#iconMicrosoft-Excel"></use></svg>',skillDesc:"Excel"}]}]},{myInfo:"项目作品",children:[{icon:"iconicon_Favorite",projectDesc:'个人博客:<a href="http://www.ncrlhym.cn" target="_blank">那纯瑞的博客</a><br/>Node.JS+Express框架+mysql数据库等<br/>登陆注册,整合富文本编辑器、文章展示,评论管理等'},{icon:"iconicon_Favorite",projectDesc:'仿去哪网:<a href="./qunar/index.html" target="_blank">仿去哪网</a> <span style="color: red">(网速较慢)</span><br/>Vue,Vuex,Vue-Router,Axios,swiper,stylus等<br/>取消默认样式、适配移动端,路由配置,组件化开发,轮播效果,图标分页,组件间传值,keep-alive滚动事件,全局事件解绑等'},{icon:"iconicon_Favorite",projectDesc:'页面特效:<a href="./pages.html" target="_blank">请使用电脑访问</a> <span style="color: red">(部分不适配手机)</span><br/>html,css等<br/>css3动画,flex布局,box-shadow阴影等'},{icon:"iconicon_Favorite",projectDesc:'二次开发ecshop:<a href="https://www.kuailehaigou.com" target="_blank">拾贝E购</a> <span style="color: red">(请勿下单)</span><br/>PHP+阿里云ECS+宝塔面板等<br/>修复商家入驻页面跳转bug,按照需求删除无用信息以及部分内容的修改,域名备案,SSL证书布置等'},{icon:"iconicon_Favorite",projectDesc:'电影网站首页:<a href="http://www.hawgreencar.club" target="_blank">Movie</a> <span style="color: red">(仅一个页面)</span><br/>html,css,js,jquery等<br/>轮播图,css3动画等'}]}]}},methods:{calculateHeight:function(){this.screenHeight<=640?this.photoClass="about-me-old":this.photoClass="about-me-new"}},computed:{pages:function(){var t=[];return this.aboutMe[2].children[0].iconSvg.forEach(function(e,i){var a=Math.floor(i/8);t[a]||(t[a]=[]),t[a].push(e)}),t}},mounted:function(){this.calculateHeight();var t=this;window.onresize=function(){return window.screenHeight=document.documentElement.clientHeight-50,void(t.screenHeight=window.screenHeight)}},watch:{screenHeight:function(t){if(!this.timer){this.screenHeight=t,this.calculateHeight(),this.timer=!0;var e=this;setTimeout(function(){e.timer=!1},400)}}}},m={render:function(){var t=this,e=t.$createElement,i=t._self._c||e;return i("div",{staticClass:"hidden-sm-and-up"},[i("div",{class:t.photoClass},[t._m(0),t._v(" "),i("div",{staticClass:"name animated heartBeat"},[t._v(t._s(t.myName))]),t._v(" "),t._l(t.aboutMe,function(e,a){return i("div",{key:a,staticClass:"info animated zoomIn"},[i("div",{staticClass:"info-title border-bottom"},[i("div",{staticClass:"left-mark"}),t._v(" "),i("div",{staticClass:"info-title-desc"},[t._v(t._s(e.myInfo))])]),t._v(" "),i("ul",{staticClass:"item"},t._l(e.children,function(e,a){return i("li",{key:a,staticClass:"item-desc"},[e.iconSvg?i("div",{staticClass:"icon-svg"},[i("swiper",{attrs:{options:t.swiperOption}},[t._l(t.pages,function(e,a){return i("swiper-slide",{key:a},t._l(e,function(e,a){return i("div",{key:a,staticClass:"icon-svg-item"},[i("div",{staticClass:"icon-svg-item-iconName",domProps:{innerHTML:t._s(e.iconName)}},[t._v(t._s(e.iconName))]),t._v(" "),i("div",{staticClass:"icon-svg-item-skillDesc"},[t._v(t._s(e.skillDesc))])])}),0)}),t._v(" "),i("div",{staticClass:"swiper-pagination",attrs:{slot:"pagination"},slot:"pagination"})],2)],1):t._e(),t._v(" "),e.icon?i("span",{class:["iconfont",e.icon]}):t._e(),t._v(" "),e.desc?i("span",{staticClass:"item-normal-desc",domProps:{innerHTML:t._s(e.desc)}},[t._v(t._s(e.desc))]):t._e(),t._v(" "),e.projectDesc?i("span",{staticClass:"item-project-desc",domProps:{innerHTML:t._s(e.projectDesc)}},[t._v(t._s(e.projectDesc))]):t._e()])}),0)])})],2)])},staticRenderFns:[function(){var t=this.$createElement,e=this._self._c||t;return e("div",{staticClass:"my-photo animated zoomIn"},[e("div",{staticClass:"photo"})])}]};var u={render:function(){this.$createElement;this._self._c;return this._m(0)},staticRenderFns:[function(){var t=this.$createElement,e=this._self._c||t;return e("div",[e("div",{staticClass:"hidden-sm-and-up footer-mobile"},[this._v("© 那纯瑞 | 西安电子科技大学")]),this._v(" "),e("div",{staticClass:"hidden-xs-only footer-PC"},[this._v("© 那纯瑞 | 西安电子科技大学")])])}]};var p={name:"IndexInfoPc",data:function(){return{infoTitleHeight:"6%",infoItemHeight:"",screenHeight:document.documentElement.clientHeight,screenWidth:document.documentElement.clientWidth,timer:!1,timerWidth:!1,myInfoList:[{icon:"iconicon_User",title:"个人信息",desc:"<br>那纯瑞 | 男<br>1999年1月19日<br>西安电子科技大学<br>电子商务 | 本科应届"},{icon:"iconicon_Call",title:"联系方式",desc:'<br>电话<br><a href="tel: [email protected]">186-294-88674</a><br>邮箱<br><a href="mailto: [email protected]">[email protected]</a>'},{icon:"icongithub",title:"个人网站",desc:'<br>Github<br><a href="https://github.com/NCR7777" target="_blank">github.com/NCR7777</a><br>个人博客(无文章)<br><a href="http://www.ncrlhym.cn" target="_blank">www.ncrlhym.cn</a>'},{icon:"iconai-code",title:"程序人生",desc:"<br>HTML/CSS/JS: 8000+行<br>Vue: 2000+行<br>Node.Js: 2000+行<br>Java/JSP: 2000+行"}]}},methods:{scrollToNext:function(){document.getElementById("next").scrollIntoView({block:"start",behavior:"smooth"})},changeHeight:function(){this.screenWidth>1184?this.infoItemHeight="90%":this.infoItemHeight="45%"}},created:function(){this.changeHeight()},mounted:function(){var t=this;window.onresize=function(){return window.screenHeight=document.documentElement.clientHeight,window.screenWidth=document.documentElement.clientWidth,t.screenHeight=window.screenHeight,void(t.screenWidth=window.screenWidth)}},watch:{screenHeight:function(t){if(!this.timer){this.screenHeight=t,this.timer=!0;var e=this;setTimeout(function(){e.timer=!1},400)}},screenWidth:function(t){this.screenWidth=t,this.changeHeight()}}},f={render:function(){var t=this,e=t.$createElement,i=t._self._c||e;return i("div",{staticClass:"hidden-xs-only info-PC"},[i("div",{staticClass:"picture",style:{height:t.screenHeight+"px"},attrs:{id:"picture"}},[i("div",{staticClass:"title"},[t._v("\n 期望岗位\n "),i("span",{staticClass:"job"},[t._v("前端开发 / 技术支持")]),t._v(" "),i("span",{staticClass:"desc"},[t._v("喜欢技术 热爱挑战")]),t._v(" "),i("div",{staticClass:"scroll-button",on:{click:t.scrollToNext}},[t._m(0)])])]),t._v(" "),i("div",{staticClass:"content",style:{height:t.screenHeight+"px"},attrs:{id:"next"}},[i("div",{staticClass:"wrapper"},[i("el-row",{staticClass:"my-info"},[i("el-col",{staticStyle:{height:"4%"},attrs:{span:24}}),t._v(" "),i("el-col",{staticClass:"info-title",style:{height:t.infoTitleHeight},attrs:{span:24}},[i("span",[t._v("基本资料")])]),t._v(" "),i("el-col",{staticClass:"my-info-item",style:{height:t.infoItemHeight},attrs:{span:2}},[t._v(" ")]),t._v(" "),i("el-col",{staticClass:"my-info-item",style:{height:t.infoItemHeight},attrs:{sm:9,md:9,lg:5,xl:5}},[i("div",{staticClass:"info-item"},[i("span",{class:["iconfont",t.myInfoList[0].icon,"info-icon"]}),t._v(" "),i("span",{staticClass:"info-item-title"},[t._v(t._s(t.myInfoList[0].title))]),t._v(" "),i("span",{staticClass:"info-desc",domProps:{innerHTML:t._s(t.myInfoList[0].desc)}},[t._v(t._s(t.myInfoList[0].desc))])])]),t._v(" "),i("el-col",{staticClass:"my-info-item hidden-lg-and-up",style:{height:t.infoItemHeight},attrs:{span:2}},[t._v(" ")]),t._v(" "),i("el-col",{staticClass:"my-info-item",style:{height:t.infoItemHeight},attrs:{sm:9,md:9,lg:5,xl:5}},[i("div",{staticClass:"info-item"},[i("span",{class:["iconfont",t.myInfoList[1].icon,"info-icon"]}),t._v(" "),i("span",{staticClass:"info-item-title"},[t._v(t._s(t.myInfoList[1].title))]),t._v(" "),i("span",{staticClass:"info-desc",domProps:{innerHTML:t._s(t.myInfoList[1].desc)}},[t._v(t._s(t.myInfoList[1].desc))])])]),t._v(" "),i("el-col",{staticClass:"my-info-item hidden-lg-and-up",style:{height:t.infoItemHeight},attrs:{span:2}},[t._v(" ")]),t._v(" "),i("el-col",{staticClass:"my-info-item hidden-lg-and-up",style:{height:t.infoItemHeight},attrs:{span:2}},[t._v(" ")]),t._v(" "),i("el-col",{staticClass:"my-info-item",style:{height:t.infoItemHeight},attrs:{sm:9,md:9,lg:5,xl:5}},[i("div",{staticClass:"info-item"},[i("span",{class:["iconfont",t.myInfoList[2].icon,"info-icon"]}),t._v(" "),i("span",{staticClass:"info-item-title"},[t._v(t._s(t.myInfoList[2].title))]),t._v(" "),i("span",{staticClass:"info-desc",domProps:{innerHTML:t._s(t.myInfoList[2].desc)}},[t._v(t._s(t.myInfoList[2].desc))])])]),t._v(" "),i("el-col",{staticClass:"my-info-item hidden-lg-and-up",style:{height:t.infoItemHeight},attrs:{span:2}},[t._v(" ")]),t._v(" "),i("el-col",{staticClass:"my-info-item",style:{height:t.infoItemHeight},attrs:{sm:9,md:9,lg:5,xl:5}},[i("div",{staticClass:"info-item"},[i("span",{class:["iconfont",t.myInfoList[3].icon,"info-icon"]}),t._v(" "),i("span",{staticClass:"info-item-title"},[t._v(t._s(t.myInfoList[3].title))]),t._v(" "),i("span",{staticClass:"info-desc",domProps:{innerHTML:t._s(t.myInfoList[3].desc)}},[t._v(t._s(t.myInfoList[3].desc))])])]),t._v(" "),i("el-col",{staticClass:"my-info-item",style:{height:t.infoItemHeight},attrs:{span:2}},[t._v(" ")])],1)],1)])])},staticRenderFns:[function(){var t=this.$createElement,e=this._self._c||t;return e("div",{staticClass:"wrapper"},[e("span",{staticClass:"iconfont scroll-icon"},[this._v("")])])}]};var _={name:"Index",data:function(){return{}},components:{IndexHeaderMobile:i("VU/8")(o,h,!1,function(t){i("dHk0")},"data-v-6f243c18",null).exports,IndexHeaderPc:i("VU/8")(r,d,!1,function(t){i("43Mc")},"data-v-2e2c41ad",null).exports,IndexInfoMobile:i("VU/8")(v,m,!1,function(t){i("puza")},"data-v-0d74634a",null).exports,IndexFooter:i("VU/8")({name:"IndexFooter"},u,!1,function(t){i("RjYv")},"data-v-44e558c0",null).exports,IndexInfoPc:i("VU/8")(p,f,!1,function(t){i("811s")},"data-v-4980768f",null).exports}},y={render:function(){var t=this.$createElement,e=this._self._c||t;return e("div",[e("el-container",[e("el-header",[e("index-header-mobile"),this._v(" "),e("index-header-pc")],1),this._v(" "),e("el-main",[e("index-info-mobile"),this._v(" "),e("index-info-pc")],1),this._v(" "),e("el-footer",[e("index-footer")],1)],1)],1)},staticRenderFns:[]};var g=i("VU/8")(_,y,!1,function(t){i("UrZj")},"data-v-46f734ab",null).exports;a.default.use(c.a);var w=new c.a({routes:[{path:"/",name:"Index",component:g}]}),b=i("v5o6"),M=i.n(b),C=i("F3EI"),H=i.n(C),z=i("NYxO"),x="深圳";try{localStorage.city&&(x=localStorage.city)}catch(t){}var L={city:x},k={changeCity:function(t,e){t.city=e;try{localStorage.city=e}catch(t){}}};a.default.use(z.a);var S=new z.a.Store({state:L,mutations:k}),V=(i("v2ns"),i("zL8q")),I=i.n(V),E=(i("tvR6"),i("sfy8"),i("l0WT"),i("36b/"),i("SJZ9"),i("y9JG"),i("woVz"),i("oPmM")),B=i.n(E);a.default.config.productionTip=!1,M.a.attach(document.body),a.default.use(H.a),a.default.use(I.a),a.default.use(B.a),new a.default({el:"#app",router:w,store:S,components:{App:n},template:"<App/>"})},RjYv:function(t,e){},SJZ9:function(t,e){},UrZj:function(t,e){},dHk0:function(t,e){},l0WT:function(t,e){},oPmM:function(t,e){},pYmz:function(t,e,i){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var a="undefined"!=typeof window;a&&(window.Swiper=i("gsqX"));var s={name:"swiper",props:{options:{type:Object,default:function(){return{autoplay:3500}}},notNextTick:{type:Boolean,default:function(){return!1}}},data:function(){return{defaultSwiperClasses:{wrapperClass:"swiper-wrapper"}}},ready:function(){!this.swiper&&a&&(this.swiper=new Swiper(this.$el,this.options))},mounted:function(){var t=this,e=function(){if(!t.swiper&&a){delete t.options.notNextTick;var e=!1;for(var i in t.defaultSwiperClasses)t.defaultSwiperClasses.hasOwnProperty(i)&&t.options[i]&&(e=!0,t.defaultSwiperClasses[i]=t.options[i]);var s=function(){t.swiper=new Swiper(t.$el,t.options)};e?t.$nextTick(s):s()}}(this.options.notNextTick||this.notNextTick)?e():this.$nextTick(e)},updated:function(){this.swiper&&this.swiper.update()},beforeDestroy:function(){this.swiper&&(this.swiper.destroy(),delete this.swiper)}},n={render:function(){var t=this,e=t.$createElement,i=t._self._c||e;return i("div",{staticClass:"swiper-container"},[t._t("parallax-bg"),t._v(" "),i("div",{class:t.defaultSwiperClasses.wrapperClass},[t._t("default")],2),t._v(" "),t._t("pagination"),t._v(" "),t._t("button-prev"),t._v(" "),t._t("button-next"),t._v(" "),t._t("scrollbar")],2)},staticRenderFns:[]},c=i("VU/8")(s,n,!1,null,null,null);e.default=c.exports},puza:function(t,e){},qfNC:function(t,e){},sfy8:function(t,e){},tvR6:function(t,e){},v2ns:function(t,e){},woVz:function(t,e){},y9JG:function(t,e){!function(t){var e,i='<svg><symbol id="icongit" viewBox="0 0 1025 1024"><path d="M1004.692673 466.396616l-447.094409-447.073929c-25.743103-25.763582-67.501405-25.763582-93.264987 0l-103.873521 103.873521 78.171378 78.171378c12.533635-6.00058 26.562294-9.359266 41.389666-9.359266 53.02219 0 96.00928 42.98709 96.00928 96.00928 0 14.827372-3.358686 28.856031-9.359266 41.389666l127.97824 127.97824c12.533635-6.00058 26.562294-9.359266 41.389666-9.359266 53.02219 0 96.00928 42.98709 96.00928 96.00928s-42.98709 96.00928-96.00928 96.00928-96.00928-42.98709-96.00928-96.00928c0-14.827372 3.358686-28.856031 9.359266-41.389666l-127.97824-127.97824c-3.051489 1.454065-6.184898 2.744293-9.379746 3.870681l0 266.97461c37.273227 13.188988 63.99936 48.721433 63.99936 90.520695 0 53.02219-42.98709 96.00928-96.00928 96.00928s-96.00928-42.98709-96.00928-96.00928c0-41.799262 26.726133-77.331707 63.99936-90.520695l0-266.97461c-37.273227-13.188988-63.99936-48.721433-63.99936-90.520695 0-14.827372 3.358686-28.856031 9.359266-41.389666l-78.171378-78.171378-295.892081 295.871601c-25.743103 25.784062-25.743103 67.542365 0 93.285467l447.114889 447.073929c25.743103 25.743103 67.480925 25.743103 93.264987 0l445.00547-445.00547c25.763582-25.763582 25.763582-67.542365 0-93.285467z" ></path></symbol><symbol id="iconblogger" viewBox="0 0 1024 1024"><path d="M746.500337 444.954001l-39.047318 0c-7.852852 0-14.527878-3.227508-19.806091-9.903557-5.41022-6.588045-8.15882-14.44192-8.15882-23.559579 0-46.332235-16.316617-85.859483-48.950875-118.449739-32.722262-32.67826-72.249511-49.082882-118.581746-49.082882l-100.519368 0c-46.332235 0-85.773526 16.404622-118.449739 49.082882-32.67826 32.590256-49.082882 72.117504-49.082882 118.449739l0 201.01827c0 46.332235 16.404622 85.881996 49.082882 118.559233 32.676214 32.69975 72.117504 48.994877 118.449739 48.994877l201.125717 0c46.334281 0 85.773526-16.295128 118.451786-48.994877 32.67826-32.67826 49.080835-72.226998 49.080835-118.559233L780.094456 478.547097c0-9.140171-3.315512-16.991999-10.033517-23.668049C763.514849 448.313515 755.661997 444.954001 746.500337 444.954001zM388.575458 391.18233c7.066953-8.748245 15.399735-13.154601 25.12933-13.154601l107.281375 0c9.643638 0 18.063401 4.40738 25.131377 13.154601 7.02295 8.834202 10.600429 19.327184 10.600429 31.498388 0 12.08627-3.577478 22.578228-10.600429 31.41243-7.066953 8.834202-15.487739 13.240559-25.131377 13.240559L413.703765 467.333708c-9.729595 0-18.061354-4.40738-25.12933-13.240559-7.068999-8.834202-10.602476-19.327184-10.602476-31.41243C377.971959 410.509515 381.505435 400.016533 388.575458 391.18233zM636.121414 632.81767c-6.719028 8.855692-14.527878 13.263072-23.559579 13.263072L411.435095 646.080741c-9.031701 0-16.97051-4.40738-23.559579-13.263072-6.544043-8.81169-9.903557-19.327184-9.903557-31.498388 0-12.063757 3.359514-22.578228 9.903557-31.41243 6.589069-8.813736 14.527878-13.240559 23.559579-13.240559l201.125717 0c9.031701 0 16.84055 4.426823 23.559579 13.240559 6.544043 8.834202 9.903557 19.348674 9.903557 31.41243C646.024971 613.490485 642.664434 624.004957 636.121414 632.81767zM780.093433 65.213257 243.903497 65.213257c-98.642624 0-178.61298 80.101339-178.61298 178.744987l0 536.105001c0 98.730629 79.969333 178.722474 178.61298 178.722474l536.190959 0c98.642624 0 178.615027-79.991846 178.615027-178.722474L958.709483 243.958244C958.70846 145.31562 878.73708 65.213257 780.093433 65.213257zM869.401458 735.387743c0 74.015737-59.989279 133.983527-134.069485 133.983527L288.667004 869.37127c-74.082252 0-134.115534-59.96779-134.115534-133.983527l0-446.775486c0-74.015737 60.033281-133.961014 134.115534-133.961014l446.664969 0c74.080206 0 134.069485 59.945277 134.069485 133.961014L869.401458 735.387743z" ></path></symbol><symbol id="iconai-code" viewBox="0 0 1025 1024"><path d="M293.0688 755.2c-12.0832 0-24.2688-4.2496-33.9968-12.9024L0 512l273.4592-243.0976C294.5536 250.2144 326.912 252.0064 345.7024 273.152c18.7904 21.1456 16.896 53.504-4.2496 72.2944L154.112 512l172.9536 153.7024c21.1456 18.7904 23.04 51.1488 4.2496 72.2944C321.2288 749.4144 307.1488 755.2 293.0688 755.2zM751.0528 755.0976 1024.512 512l-259.072-230.2976c-21.1456-18.7904-53.504-16.896-72.2432 4.2496-18.7904 21.1456-16.896 53.504 4.2496 72.2944L870.4 512l-187.3408 166.5024c-21.1456 18.7904-23.04 51.1488-4.2496 72.2944C688.896 762.2144 702.976 768 717.056 768 729.1392 768 741.3248 763.7504 751.0528 755.0976zM511.5392 827.648l102.4-614.4c4.6592-27.904-14.1824-54.272-42.0864-58.9312-28.0064-4.7104-54.3232 14.1824-58.88 42.0864l-102.4 614.4c-4.6592 27.904 14.1824 54.272 42.0864 58.9312C455.5264 870.1952 458.2912 870.4 461.1072 870.4 485.6832 870.4 507.392 852.6336 511.5392 827.648z" ></path></symbol><symbol id="iconAdobe-Dreamweaver" viewBox="0 0 1024 1024"><path d="M64 72.8v873.8h896.2V72.8H64z m37.3 37.3h821.5v799.1H101.3V110.1z m79.4 168.5c0-3.1 0.6-4.4 4.3-5 14.2-0.6 54.9-1.9 97-1.9 125.1 0 187.3 80.7 187.3 199 0 164.5-107.9 214.3-191 214.3-31.4 0-77.1-0.6-93.7-1.2-2.4 0-3.7-1.9-3.7-4.9V278.5l-0.2 0.1z m67.2 346.1c10.4 0.7 19.7 1.3 31.4 1.3 69 0 120.1-48.7 120.8-153.4 0.8-102.3-52.3-141.7-118.1-141.7-13 0-23.4 0.8-33.9 1.4v292.6l-0.2-0.2z m473.8 58.6c-4.3 0-5.5-1.7-6.2-5.3-18.5-102.9-33.9-175.6-40.7-226.1h-0.6c-8 45.8-22.8 121.6-47.4 227-1.2 3.7-2.5 4.7-5.5 4.7h-60.4c-4.3 0-5.5-0.6-6.7-4.3l-66-294.3c-0.6-2.5 0.7-5 4.4-5H553c2.5 0 4.3 0.9 4.9 3.4 23.4 132.4 33.2 201 37 228.2h2.5c4.3-32 17.3-98.8 44.9-227 1.2-3.7 1.2-4.6 4.9-4.6h63.5c3.1 0 3.7 1.5 4.3 4.6 21.6 121.3 35.7 197.9 39.4 228.7h1.9c6-39.4 14.2-92.6 40.5-229.4 0.6-2.5 1.2-3.9 4.3-3.9h56.8c2.5 0 4.3 1.2 3.1 4.3L787.7 679c-0.6 3.1-1.1 4.3-5.5 4.3h-60.5z" fill="#35FA00" ></path></symbol><symbol id="iconAdobe-Photoshop" viewBox="0 0 1024 1024"><path d="M64 72.6v873.8h896.2V72.6H64z m37.3 37.3h821.5V909H101.3V109.9z m179.3 167.3c0-2.5 5.2-4.3 8.4-4.3 24.1-1.2 59.3-1.9 96.3-1.9C488.7 271 529 327.8 529 400.4c0 94.9-68.8 135.5-153.2 135.5-14.2 0-19-0.6-28.9-0.6v143.5c0 3.1-1.2 4.3-4.3 4.3h-57.7c-3.1 0-4.3-1.1-4.3-4.2V277.2z m66.3 198.4c8.6 0.6 15.4 0.6 30.2 0.6 43.7 0 84.8-15.4 84.8-74.5 0-47.4-29.3-71.5-79.2-71.5-14.8 0-28.9 0.6-35.7 1.2v144.3l-0.1-0.1z m321.4-44.4c-29.6 0-39.4 14.8-39.4 27.1 0 13.6 6.7 22.8 46.2 43.1 58.6 28.4 77 55.5 77 95.5 0 59.7-45.6 91.9-107.2 91.9-32.7 0-60.5-6.8-76.5-16-2.4-1.2-3-3.1-3-6.2v-54.8c0-3.7 1.8-5 4.3-3.1 23.3 15.4 50.5 22.2 75.1 22.2 29.6 0 41.9-12.3 41.9-29 0-13.6-8.6-25.3-46.2-45-53-25.4-75.2-51.2-75.2-94.4 0-48.1 37.6-88.1 102.9-88.1 32.1 0 54.7 4.9 67 10.4 3.1 1.9 3.7 4.9 3.7 7.4v51.2c0 3.1-1.9 5-5.6 3.7-16.6-9.9-41.1-16-65.1-16l0.1 0.1z" fill="#5dc6f4" ></path></symbol><symbol id="iconBootstrap" viewBox="0 0 1024 1024"><path d="M960 810.6c0 82.1-67.2 149.3-149.3 149.3H213.4C131.2 960 64 892.8 64 810.6V213.4C64 131.2 131.2 64 213.4 64h597.3C892.8 64 960 131.2 960 213.4v597.2z" fill="#563D7C" ></path><path d="M296 251.2h253.8c46.7 0 84.9 10.3 112.9 32.7 28 22.4 42 54.1 42 97.1 0 26.1-6.5 48.5-19.6 67.2-13.1 18.7-31.7 33.6-55.1 43.9v1.9c32.7 6.5 56.9 22.4 72.8 45.7 16.8 23.3 25.2 53.2 25.2 88.7 0 20.5-3.7 40.1-11.2 56.9-7.5 17.7-18.7 33.6-34.5 45.7-15.9 13.1-35.5 23.3-59.7 30.8-24.3 7.5-53.2 11.2-86.8 11.2H296V251.2z m91.4 218.3h149.3c22.4 0 40.1-6.5 55.1-18.7 14.9-12.1 22.4-30.8 22.4-54.1 0-26.1-6.5-44.8-19.6-56-13.1-10.3-32.7-15.9-56.9-15.9H388.3l-0.9 144.7z m0 229.6h161.5c28 0 49.5-7.5 64.4-21.5s23.3-34.5 23.3-61.6c0-26.1-7.5-45.7-23.3-59.7-15.9-14-37.3-20.5-64.4-20.5H387.4v163.3z" fill="#FFFFFF" ></path></symbol><symbol id="iconGitHub" viewBox="0 0 1024 1024"><path d="M64.6 512c0 195.6 125.4 361.9 300.1 422.9 23.5 5.9 19.9-10.8 19.9-22.2v-77.6c-135.8 15.9-141.3-74-150.5-89-18.5-31.5-61.9-39.5-49-54.5 31-15.9 62.5 4 98.9 58 26.4 39.1 77.9 32.5 104.1 26 5.7-23.5 17.9-44.5 34.7-60.9-140.7-25.2-199.4-111.1-199.4-213.3 0-49.5 16.4-95.1 48.4-131.8-20.4-60.6 1.9-112.4 4.9-120.1 58.2-5.2 118.5 41.6 123.3 45.3 33.1-8.9 70.8-13.7 112.9-13.7 42.4 0 80.3 4.9 113.5 13.9 11.3-8.6 67.3-48.8 121.4-43.9 2.9 7.7 24.7 58.3 5.5 118.1 32.5 36.8 49 82.8 49 132.4 0 102.3-59 188.3-200.2 213.2 23.5 23.3 38.1 55.5 38.1 91.1v112.7c0.8 9 0 17.9 15.1 17.9C832.7 877 960.4 709.4 960.4 512.1c0-247.5-200.6-447.9-447.9-447.9C265 64.1 64.6 264.5 64.6 512z" fill="" ></path></symbol><symbol id="iconMicrosoft-Excel" viewBox="0 0 1024 1024"><path d="M943.5 179.7H643.2v55.6h88.5v87.9h-88.5v28h88.5v88h-88.5V468h88.5v83.2h-88.5v33.4h88.5V668h-88.5v33.4h88.5v83.9h-88.5v61.2h300.3c4.7-1.4 8.7-7 11.9-16.7 3.2-9.8 4.8-17.7 4.8-23.8V189.8c0-4.8-1.6-7.7-4.8-8.7-3.9-1-7.9-1.5-11.9-1.4z m-39 605.5h-144v-83.8h144.1l-0.1 83.8z m0-117.2h-144v-83.5h144.1l-0.1 83.5z m0-116.8h-144v-82.9h144.1l-0.1 82.9z m0-112h-144v-87.9h144.1l-0.1 87.9z m0-116.5h-144v-87.4h144.1v88l-0.1-0.6zM63.8 165.8v694.7L592.7 952V72L63.8 166.1v-0.3z m313.5 525.5c-2-5.5-11.5-28.6-28.3-69.6-9.7-23.9-19.7-47.8-29.8-71.6h-0.9l-56.7 135-75.8-5.1 89.8-168-82.4-168 77.3-4.1 51.1 131.5h1l57.7-137.5 79.9-5-95.1 181.9 98 185.5-85.8-5z" fill="#107B0F" ></path></symbol><symbol id="iconMicrosoft-Powerpoint" viewBox="0 0 1024 1024"><path d="M940.9 213.3h-319v119c19.2-14.6 42.3-22 69.3-22V426H806c-0.6 32.4-11.8 59.8-33.6 81.9s-49.2 33.5-81.7 34.2c-24.6-0.5-48.4-8.5-68.3-23v79.5h250.9v39h-251v48.3H873v38.5H622.1v87.4h319c12.9 0 19-6.8 19-20.1V232.3c0-12.8-6.1-19-19-19h-0.2zM709.2 406.9V290.3c32.5 0.7 59.7 12 81.7 34 21.9 22 33.3 49.5 33.8 82.7l-115.5-0.1z m-336.9 2c-1.5-6.7-4.9-12.8-9.8-17.6-5.1-4.5-11.4-7.7-18-9.2-8.4-2.3-17.1-3.4-25.8-3.4l-26.9 0.5v112h1c9.7 0.6 20 0.6 30.8 0 10.6-0.5 20.9-3.4 30.2-8.4 11.7-8.4 18.7-19.6 20.9-34.1 2.2-14.6 1.5-28.6-2.4-41.5v1.7zM64 178.1v670.1l511.1 88.3V87.4L64 178.1z m381.3 302c-14 32.4-34.9 53.2-62.9 62.2-29.3 9.1-60.1 12.6-90.7 10.4v127.8l-67.2-7.8V321.6l106.8-5.6c19.7-1.3 39.5 1 58.5 6.7 19.2 5.7 34.4 17.1 45.7 34.4 11.2 17.2 17.5 37.2 19 59.9 1.6 21.5-1.6 43-9.2 63.1z" fill="#D24825" ></path></symbol><symbol id="iconMicrosoft-Word" viewBox="0 0 1024 1024"><path d="M960.2 203.3v617.9c0 5.3-1.9 9.5-5.6 12.9-3.6 3.4-8.5 5.3-13.4 5.2H621.6v-85.7h260.7v-39h-261v-47.8H882v-39H621.5v-48.2h260.6v-38.5H621.5v-48.3h260.6v-39H621.5V406h260.6v-38.9H621.5v-48.7h260.6v-37.1H621.5v-96.8h319.7c5.6 0 10.1 1.8 13.4 5.6 3.9 3.6 5.6 8.1 5.6 13.2zM573.7 87.8v848.5L64 848.1V178.4l509.7-90.7v0.1z m-76.9 250.5l-63.8 3.9-40.9 253.4h-0.9c-2-12-9.5-54-23-125.7L344.3 348l-59.9 3-24 118.9c-8.2 40.2-16.2 80.5-23.9 120.8h-0.5l-36.4-233.4-54.9 2.9 58.8 294.4 61 3.9 23-114.6c13.4-67.2 21.3-106.3 23-117h1.7c2.3 11.4 9.6 51.3 23 119.9l23 117.9 66.1 3.9 73.9-330.5-1.4 0.2z" fill="#2B579B" ></path></symbol><symbol id="iconJavaScript" viewBox="0 0 1024 1024"><path d="M63.5 63.5h897v897h-897v-897zM887 746.6c-6.5-40.9-33.2-75.3-112.2-107.4-27.5-12.9-58.1-21.9-67.2-42.6-3.4-12.3-3.9-19.1-1.7-26.4 5.6-24.1 34.2-31.4 56.6-24.7 14.6 4.5 28 15.7 36.5 33.6 38.6-25.3 38.6-25.3 65.6-42-10.1-15.7-15.1-22.5-21.9-29.2-23.5-26.3-54.9-39.8-105.9-38.6l-26.4 3.3c-25.3 6.2-49.3 19.6-63.9 37.6-42.6 48.3-30.3 132.3 21.3 167.1 51 38.1 125.6 46.5 135.2 82.4 9 43.7-32.5 57.7-73.5 52.7-30.3-6.7-47.1-21.9-65.6-49.9l-68.4 39.3c7.8 17.9 16.8 25.8 30.3 41.4 65 65.6 227.6 62.3 256.8-37.5 1.1-3.4 9-26.4 2.8-61.7l1.6 2.6zM551.3 475.8h-84c0 72.4-0.3 144.4-0.3 217 0 46 2.4 88.3-5.2 101.3-12.3 25.8-44.1 22.5-58.5 17.9-14.8-7.3-22.3-17.4-31-32-2.4-3.9-4.1-7.3-4.7-7.3l-68.2 42c11.4 23.5 28 43.8 49.5 56.7 32 19.1 74.9 25.2 119.9 15.1 29.3-8.4 54.5-25.8 67.7-52.7 19.1-34.8 15-77.4 14.8-125.1 0.4-76.8 0-153.6 0-230.9v-2z" fill="#F5DD1E" ></path></symbol><symbol id="iconCSS" viewBox="0 0 1024 1024"><path d="M512 512zM128.3 64.3l69.8 805.8 313.4 89.5L825.8 870l69.9-805.7H128.3z m580.9 669.3l-197.1 56.2-196.8-56.5L301.9 578h96.4l6.9 79.1 107.1 30.3 0.3 0.5h0.1l106.9-29.7L630.7 530H406.1l-8-99.9h241.1l8.8-101.9H280.2l-8-97.9H753l-43.8 503.3z" fill="#264DE4" ></path></symbol><symbol id="iconHTML" viewBox="0 0 1024 1024"><path d="M116.7 63.8l71.9 806.9 322.8 89.6 323.7-89.8 72.1-806.7H116.7z m634 263.9H372l9 101.3h360.7l-27.2 303.8-203 56.3-202.7-56.3-13.9-155.4h99.4l7.1 79 110.2 29.7 0.3-0.1L622 656.3 633.5 528h-343l-26.7-299.2h495.7l-8.8 98.9z" fill="#E44D26" ></path></symbol><symbol id="iconwebstorm" viewBox="0 0 1197 1024"><path d="M709.30958866 935.1232877c10.87397227-13.66438359 23.73698672-28.57808232 38.96301445-46.14246563l7.58219151-8.74931485c9.48493125-10.94794541 17.36712334-19.4054792 24.93698643-27.60821982 15.05753437-16.2410959 28.08493154-30.25479463 51.87945146-62.6465751 23.34657568-31.80410948 38.4287669-55.73835615 35.96712363-96.77260283-1.19589082-21.11917852-2.03835586-35.61369873-15.81369873-52.62739717-8.27671201-10.19999971-23.64246562-25.03150664-39.93698584-40.72191767-17.63013692-17.00958867-32.97123281-33.19315049-53.75342461-54.85068546-20.79862998-21.57123252-47.68767158-76.74657539-47.68767158-76.7465745-8.00547979-18.26301357-22.9643833-56.18630127-27.13150722-91.44657598L623.2178079 63.47945234H449.11095868L363.9356161 458.63287666 288.51232899 63.47945234H82.99999999l185.40410976 897.73150693h173.10821924l86.53972588-388.59863026 76.53698613 388.59863026h80.36301357l24.35753409-26.08767158" fill="#2788B5" ></path><path d="M1071.63561668 117.20410947c18.18082178-21.07808262 33.16438388-38.16164355 45.46438331-53.66712304h-276.12328799c-16.43835586 20.97945175-49.78767129 61.42191768-68.83972559 97.29041045-25.1589041 47.49041075-45.85890411 83.11643877-47.91369902 170.54383622l0.92876748 28.15890381c0 41.67945175 29.80273974 102.87534287 29.80273975 102.87534288 4.66027383 10.84931543 20.43287666 33.81369873 30.52191797 47.78630068h-0.25479493a743.25205459 743.25205459 0 0 0 5.51917793 7.26164385c29.75753408 34.09726026 74.78630156 72.20136972 94.67260312 96.66986308 23.24383594 28.68082207 23.8643833 55.61095869 25.11780821 76.54931543 3.36164414 56.59726026-19.53287666 90.70684893-44.01780821 124.12602774-37.03561612 50.50273974-50.37534229 59.86027354-78.8835621 92.87260224-18.18082178 21.04931513-33.15205459 38.10410948-45.4643833 53.53972647h285.3c16.73013692-21.47260253 40.55342432-61.12602773 59.67534287-97.22054795 25.1589041-47.49041075 45.88767158-83.12054766 47.91369815-170.46164443l-0.92876661-28.18356153c0-41.74931513-29.80273974-102.96164356-29.80273974-102.96164355-4.65616406-10.84931543-20.44520508-33.67397286-30.55068545-47.78630156h0.25479492c-3.23835644-4.33561641-5.49041133-7.23287637-5.49041133-7.23287638-29.75753408-34.12191797-74.81506817-72.17260312-94.67260225-96.66986308-23.24383594-28.75068457-23.8643833-55.63972617-25.1178082-76.56575361-3.36164414-56.53972617 19.53287666-90.70274003 44.04657481-124.12191797 37.01917793-50.37534229 50.34657568-59.83150693 78.8424662-92.80273975" fill="#FA930D" ></path></symbol><symbol id="icongithub" viewBox="0 0 1024 1024"><path d="M347.8 794.8c0 4-4.6 7.2-10.4 7.2-6.6 0.6-11.2-2.6-11.2-7.2 0-4 4.6-7.2 10.4-7.2 6-0.6 11.2 2.6 11.2 7.2z m-62.2-9c-1.4 4 2.6 8.6 8.6 9.8 5.2 2 11.2 0 12.4-4s-2.6-8.6-8.6-10.4c-5.2-1.4-11 0.6-12.4 4.6z m88.4-3.4c-5.8 1.4-9.8 5.2-9.2 9.8 0.6 4 5.8 6.6 11.8 5.2 5.8-1.4 9.8-5.2 9.2-9.2-0.6-3.8-6-6.4-11.8-5.8zM505.6 16C228.2 16 16 226.6 16 504c0 221.8 139.6 411.6 339 478.4 25.6 4.6 34.6-11.2 34.6-24.2 0-12.4-0.6-80.8-0.6-122.8 0 0-140 30-169.4-59.6 0 0-22.8-58.2-55.6-73.2 0 0-45.8-31.4 3.2-30.8 0 0 49.8 4 77.2 51.6 43.8 77.2 117.2 55 145.8 41.8 4.6-32 17.6-54.2 32-67.4-111.8-12.4-224.6-28.6-224.6-221 0-55 15.2-82.6 47.2-117.8-5.2-13-22.2-66.6 5.2-135.8 41.8-13 138 54 138 54 40-11.2 83-17 125.6-17s85.6 5.8 125.6 17c0 0 96.2-67.2 138-54 27.4 69.4 10.4 122.8 5.2 135.8 32 35.4 51.6 63 51.6 117.8 0 193-117.8 208.4-229.6 221 18.4 15.8 34 45.8 34 92.8 0 67.4-0.6 150.8-0.6 167.2 0 13 9.2 28.8 34.6 24.2C872.4 915.6 1008 725.8 1008 504 1008 226.6 783 16 505.6 16zM210.4 705.8c-2.6 2-2 6.6 1.4 10.4 3.2 3.2 7.8 4.6 10.4 2 2.6-2 2-6.6-1.4-10.4-3.2-3.2-7.8-4.6-10.4-2z m-21.6-16.2c-1.4 2.6 0.6 5.8 4.6 7.8 3.2 2 7.2 1.4 8.6-1.4 1.4-2.6-0.6-5.8-4.6-7.8-4-1.2-7.2-0.6-8.6 1.4z m64.8 71.2c-3.2 2.6-2 8.6 2.6 12.4 4.6 4.6 10.4 5.2 13 2 2.6-2.6 1.4-8.6-2.6-12.4-4.4-4.6-10.4-5.2-13-2z m-22.8-29.4c-3.2 2-3.2 7.2 0 11.8 3.2 4.6 8.6 6.6 11.2 4.6 3.2-2.6 3.2-7.8 0-12.4-2.8-4.6-8-6.6-11.2-4z" fill="" ></path></symbol><symbol id="iconshousuoxiajiantou" viewBox="0 0 1024 1024"><path d="M145.4 461.1l367.6 340 367.6-340c22.5-14.5 52.5-14.5 67.5 0 15 21.7 15 50.7 0 65.1L527.9 909.5c0 7.2-7.5 7.2-15 7.2s-7.5 0-15-7.2L77.7 526.2c-22.5-21.7-15-50.6 0-65.1 22.6-14.5 52.6-14.5 67.7 0zM528 576.9c0 7.2-7.5 7.2-15 7.2s-15 0-15-7.2L77.8 193.6c-22.5-21.7-15-50.6 0-65.1 22.5-14.4 52.5-14.4 67.5 0L513 468.4l367.6-347.2c22.5-14.5 52.5-14.5 67.5 0 15 21.7 15 50.7 0 65.1L528 576.9z m0 0" ></path></symbol><symbol id="iconVue" viewBox="0 0 1024 1024"><path d="M615.6 123.6h165.5L512 589.7 242.9 123.6H63.5L512 900.4l448.5-776.9z" fill="#41B883" ></path><path d="M781.1 123.6H615.6L512 303 408.4 123.6H242.9L512 589.7z" fill="#34495E" ></path></symbol><symbol id="iconjQuery" viewBox="0 0 1024 1024"><path d="M121.3 283.3c-79.3 113.9-69.5 262-8.8 383 1.4 2.9 2.9 5.7 4.4 8.5 0.9 1.9 1.8 3.7 2.9 5.6 0.5 1 1.1 2.1 1.8 3.1l3 5.7 5.9 9.9c1.1 1.8 2.1 3.6 3.4 5.5 2 3.5 4.5 7 6.6 10.5 1 1.5 1.9 2.9 2.9 4.4 4.1 6.5 8 11.9 12 17.1l-0.4-0.6c2.9 4 5.8 7.9 9 11.8 1 1.4 2.1 2.8 3.2 4.3l8.3 10.1c1 1.1 2 2.5 3.1 3.7 3.7 4.4 7.5 8.7 11.4 13 0 0.1 0.1 0.1 0.2 0.3 4.8 6.1 10 11.5 15.7 16.3l0.2 0.1c3 3.1 6 6.1 9.2 9.1l3.8 3.6c4.1 3.9 8.3 7.8 12.7 11.5 0.1 0 0.1 0.1 0.2 0.1l2.1 1.9c3.8 3.3 7.6 6.6 11.6 9.7l4.7 3.9c3.2 2.5 6.5 5 9.7 7.5l5.1 3.9c3.5 2.6 7.2 5.2 10.7 7.7 1.3 0.9 2.6 1.9 3.9 2.7l1.1 0.9 10.5 6.9 4.5 3c5.5 3.5 11 6.8 16.4 10.1 1.6 0.8 3.1 1.7 4.6 2.5 4 2.3 8.2 4.7 12.3 6.8 2.2 1.3 4.6 2.4 6.9 3.5 2.8 1.6 5.7 3.1 8.7 4.7 0.7 0.2 1.5 0.5 2.2 0.9h-0.1c1.2 0.6 2.4 1.1 3.6 1.8 4.5 2.2 9.2 4.4 14 6.5 0.9 0.4 1.9 0.8 2.8 1.3 5.4 2.4 10.8 4.6 16.3 6.8 1.3 0.4 2.6 1 3.9 1.5 5 1.9 10.2 3.8 15.3 5.7l1.9 0.7c5.7 1.9 11.3 3.8 17.1 5.6 1.3 0.4 2.7 0.9 4.1 1.2 5.9 1.8 11.7 3.9 17.7 5.1 383.1 69.8 494.5-230.3 494.5-230.3C844.2 824 678.2 856.1 521 820.4c-5.8-1.3-11.7-3.2-17.5-4.9-8.3-2.4-16.6-5-24.7-7.8l3.5 1.1-2.3-0.9c-5.1-1.7-10-3.6-14.9-5.5-1.2-0.5-2.4-1-3.7-1.4l-0.4-0.1c-5.5-2.2-10.8-4.5-16.1-6.8-1.1-0.4-2.1-0.9-3.3-1.3-6.3-2.7-12.5-5.6-18.6-8.6l5.2 2.3c-1.4-0.6-2.6-1.2-3.9-1.9-3.5-1.7-7-3.5-10.4-5.3-2.6-1.2-5.2-2.6-7.7-4l0.8 0.4c-4.2-2.2-8.4-4.7-12.7-7-1.3-0.9-2.7-1.7-4.2-2.5-6.3-3.7-12.6-7.5-18.7-11.5l2.4 1.4c-1.6-1-3.1-2-4.6-3.1l0.2 0.2c-4.2-2.6-8.3-5.3-12.3-8.1l0.7 0.5c-1.3-0.9-2.5-1.8-3.8-2.7-4.8-3.3-8.3-5.9-11.9-8.6l0.9 0.6c-1.6-1.3-3.2-2.5-4.9-3.7-3.3-2.6-6.6-5-9.9-7.8l-4.4-3.5c-4.9-3.9-8.9-7.4-12.9-10.9l0.4 0.4c-0.4-0.4-0.9-0.8-1.3-1.1l-13-11.8-3.7-3.5c-3.1-3.1-6.2-6.1-9.3-9.2l-3.6-3.7c-3.9-3.8-7.6-7.8-11.3-11.8l-0.2-0.3-0.6-0.6c-4-4.4-7.8-8.8-11.7-13.2-1-1.1-1.9-2.4-3-3.6l-8.4-10.3c-3.8-4.8-7.5-9.7-11.2-14.6l-1.5-2.1c-87.2-119.1-118.7-283.3-49-418.1l-62.2 77.8z" fill="#0769AD" ></path><path d="M367.3 187.4c-57.5 82.5-54.2 192.8-9.5 280 8.3 16.4 16.7 30.2 26 43.3l-0.7-1.1c8.6 12.3 18.1 26.9 29.6 36.8 4.1 4.6 8.4 9.1 12.9 13.4l3.4 3.4c4.3 4.1 8.6 8.1 13.1 12.1l0.6 0.5c4.5 3.9 9.5 8.1 14.7 12.2l0.8 0.6c1.3 0.9 2.4 1.9 3.6 2.7 5.2 4 10.5 7.9 15.9 11.8l0.6 0.3c2.3 1.7 4.8 3.2 7.4 4.8 1 0.7 2.2 1.6 3.4 2.2 3.9 2.5 7.8 4.9 11.8 7.4l1.8 0.9c3.4 2 7 4 10.5 5.9 1.2 0.8 2.4 1.3 3.7 2 2.5 1.2 5 2.5 7.4 3.8l1.2 0.5c5 2.5 10.2 4.8 15.2 7.1 5.3 2.3 10.6 4.5 16 6.6 1.9 0.6 3.6 1.4 5.4 1.9 3.8 1.4 7.8 2.7 11.6 4l5.2 1.7c5.5 1.7 11 3.9 16.7 4.8 295.7 49 364.2-178.8 364.2-178.8-61.6 88.7-180.9 131-308.1 98-6.3-1.6-12.6-3.4-18.8-5.5l2.1 0.6c-1.8-0.5-3.4-1-5.1-1.6-3.9-1.3-7.9-2.7-11.7-4.1l-5.4-2c-4.2-1.7-8.4-3.2-12.5-5-1.3-0.6-2.4-0.9-3.4-1.5-5.2-2.4-10.5-4.7-15.6-7.2l-7.7-4-4.5-2.2c-3.4-1.8-6.6-3.7-9.9-5.6-0.8-0.4-1.6-0.8-2.4-1.3l0.1 0.1c-3.9-2.5-8.1-4.8-11.8-7.4-1.3-0.7-2.4-1.6-3.6-2.3l-7.8-5.1c-5.4-3.7-10.6-7.8-15.9-11.7-1.2-1.1-2.4-2-3.5-2.9-55.9-43.9-100-104-120.9-172.1-22-70.7-17.2-150 20.9-214.4l-47 66.4z" fill="#0769AD" ></path><path d="M574.1 115.7C540.2 165.4 537 227.2 560.3 282c24.8 58.3 75.5 104 134.6 125.7 2.4 0.9 4.8 1.7 7.3 2.6l3.3 1c3.4 1.1 6.9 2.4 10.4 3.1 163.4 31.5 207.6-83.9 219.5-100.8-38.9 55.9-104.2 69.3-184.2 49.8-7-1.7-13.8-3.8-20.5-6.4l1.2 0.4c-8.3-2.9-16.5-6.3-24.4-10.1l1.4 0.6c-14.4-7-28.2-15.4-40.9-25.1l0.6 0.4c-71.7-54.3-116.1-158-69.3-242.4l-25.2 34.9z" fill="#0769AD" ></path></symbol><symbol id="iconNodejs" viewBox="0 0 1024 1024"><path d="M506.9 786.4c-3 0-5.9-0.8-8.5-2.3l-27-16c-4-2.3-2.1-3.1-0.7-3.5 5.4-1.9 6.5-2.3 12.2-5.6 0.6-0.3 1.4-0.2 2 0.1l20.8 12.3c0.7 0.4 1.8 0.4 2.5 0l80.9-46.7c0.7-0.4 1.2-1.3 1.2-2.2v-93.4c0-0.9-0.5-1.8-1.3-2.2l-80.9-46.7c-0.7-0.4-1.7-0.4-2.5 0L424.8 627c-0.8 0.4-1.3 1.3-1.3 2.2v93.4c0 0.9 0.5 1.7 1.3 2.2l22.2 12.8c12 6 19.4-1.1 19.4-8.2v-92.2c0-1.3 1-2.3 2.3-2.3h10.2c1.3 0 2.3 1 2.3 2.3v92.2c0 16-8.7 25.3-24 25.3-4.7 0-8.4 0-18.7-5.1l-21.2-12.2c-5.2-3-8.5-8.7-8.5-14.8v-93.4c0-6.1 3.2-11.7 8.5-14.7l81-46.7c5.1-2.9 11.9-2.9 17 0l80.9 46.7c5.2 3 8.5 8.7 8.5 14.7v93.4c0 6.1-3.3 11.7-8.5 14.7L515.3 784c-2.5 1.6-5.5 2.3-8.4 2.4" fill="#689F63" ></path><path d="M531.9 722c-35.4 0-42.8-16.3-42.8-29.9 0-1.3 1-2.3 2.3-2.3h10.5c1.2 0 2.1 0.8 2.3 2 1.6 10.6 6.3 16 27.7 16 17.1 0 24.3-3.9 24.3-12.9 0-5.2-2.1-9.1-28.6-11.7-22.2-2.2-35.9-7.1-35.9-24.8 0-16.3 13.8-26 36.8-26 25.9 0 38.7 9 40.4 28.3 0.1 0.7-0.2 1.3-0.6 1.8-0.4 0.5-1.1 0.7-1.7 0.7h-10.5c-1.1 0-2-0.8-2.3-1.8-2.5-11.2-8.6-14.8-25.3-14.8-18.6 0-20.8 6.5-20.8 11.3 0 5.9 2.6 7.6 27.7 10.9 24.9 3.3 36.7 8 36.7 25.4 0 17.6-14.7 27.7-40.3 27.7m98.5-98.8h2.7c2.2 0 2.7-1.6 2.7-2.5 0-2.4-1.6-2.4-2.6-2.4h-2.8v4.9z m-3.3-7.6h6c2.1 0 6.1 0 6.1 4.6 0 3.2-2.1 3.9-3.3 4.3 2.4 0.2 2.6 1.7 2.9 4 0.2 1.4 0.4 3.8 0.9 4.6h-3.7c-0.1-0.8-0.7-5.3-0.7-5.5-0.2-1-0.6-1.5-1.8-1.5h-3.1v7H627v-17.5z m-7.2 8.7c0 7.3 5.9 13.1 13 13.1 7.3 0 13.1-6 13.1-13.1 0-7.3-5.9-13-13.1-13-7.1-0.1-13.1 5.6-13 13m28.6 0c0 8.6-7 15.6-15.6 15.6-8.5 0-15.6-6.9-15.6-15.6 0-8.8 7.3-15.6 15.6-15.6 8.4 0 15.6 6.8 15.6 15.6" fill="#689F63" ></path><path d="M256.3 421c0-3.7-2-7.1-5.2-9l-85.8-49.4c-1.4-0.9-3.1-1.3-4.7-1.4h-0.9c-1.6 0.1-3.3 0.5-4.7 1.4L69.2 412c-3.2 1.9-5.2 5.3-5.2 9l0.2 133c0 1.8 1 3.6 2.6 4.5 1.6 1 3.6 1 5.1 0l51-29.2c3.2-1.9 5.2-5.3 5.2-9v-62.1c0-3.7 2-7.1 5.2-9l21.7-12.5c1.6-0.9 3.4-1.4 5.2-1.4 1.8 0 3.6 0.5 5.2 1.4l21.7 12.5c3.2 1.8 5.2 5.3 5.2 9v62.1c0 3.7 2 7.1 5.2 9l51 29.2c1.6 1 3.6 1 5.2 0 1.6-0.9 2.6-2.6 2.6-4.5V421z m404.8 69.2c0 0.9-0.5 1.8-1.3 2.2l-29.5 17c-0.8 0.5-1.8 0.5-2.6 0l-29.5-17c-0.8-0.5-1.3-1.3-1.3-2.2v-34c0-0.9 0.5-1.8 1.3-2.2l29.5-17c0.8-0.5 1.8-0.5 2.6 0l29.5 17c0.8 0.5 1.3 1.3 1.3 2.2v34z m8-251.9c-1.6-0.9-3.6-0.9-5.2 0.1-1.6 0.9-2.6 2.6-2.6 4.5v131.7c0 1.3-0.7 2.5-1.8 3.1-1.1 0.6-2.5 0.6-3.6 0l-21.5-12.4c-3.2-1.9-7.1-1.9-10.4 0l-85.8 49.5c-3.2 1.8-5.2 5.3-5.2 9v99.1c0 3.7 2 7.1 5.2 9l85.8 49.6c3.2 1.8 7.1 1.8 10.4 0l85.8-49.6c3.2-1.9 5.2-5.3 5.2-9V275.8c0-3.8-2-7.2-5.3-9.1l-51-28.4z m285.7 217c3.2-1.9 5.2-5.3 5.2-9v-24c0-3.7-2-7.1-5.2-9l-85.3-49.5a10.2 10.2 0 0 0-10.4 0l-85.8 49.5c-3.2 1.9-5.2 5.3-5.2 9v99c0 3.7 2 7.2 5.2 9l85.3 48.6c3.1 1.8 7 1.8 10.2 0.1l51.6-28.7c1.6-0.9 2.7-2.6 2.7-4.5s-1-3.6-2.6-4.5l-86.3-49.5c-1.6-0.9-2.6-2.6-2.6-4.5v-31.1c0-1.9 1-3.6 2.6-4.5l26.9-15.5c1.6-0.9 3.6-0.9 5.2 0l26.9 15.5c1.6 0.9 2.6 2.6 2.6 4.5v24.4c0 1.8 1 3.6 2.6 4.5 1.6 0.9 3.6 0.9 5.2 0l51.2-29.8z" fill="#231815" ></path><path d="M863.1 450.7c0.6-0.4 1.4-0.4 2 0l16.5 9.5c0.6 0.4 1 1 1 1.7v19c0 0.7-0.4 1.4-1 1.7l-16.5 9.5c-0.6 0.4-1.4 0.4-2 0l-16.5-9.5c-0.6-0.4-1-1-1-1.7v-19c0-0.7 0.4-1.4 1-1.7l16.5-9.5zM487.5 415.6l-0.2-0.2c-0.1-0.1-0.2-0.2-0.4-0.3-0.2-0.2-0.5-0.4-0.7-0.6-0.1-0.1-0.1-0.1-0.2-0.1l-0.9-0.6-55.9-32.3-8.9-5.1-20.6-11.9c-0.9-0.5-1.8-0.8-2.7-1.1-0.8-0.2-1.6-0.3-2.4-0.3H393.6h-0.1c-0.2 0-0.4 0.1-0.6 0.1-0.2 0-0.4 0.1-0.5 0.1-0.2 0.1-0.5 0.1-0.7 0.2-0.1 0-0.3 0.1-0.4 0.1-0.3 0.1-0.5 0.2-0.7 0.3h-0.1c-0.3 0.1-0.6 0.3-0.9 0.5l-65.8 38-19.3 11.1-0.2 0.1c-3.2 1.8-5.2 5.2-5.2 8.9v98.6c0 2.3 0.8 4.5 2.1 6.3 0.2 0.3 0.5 0.6 0.8 0.9l0.2 0.2c0.2 0.2 0.5 0.4 0.7 0.7l0.2 0.2c0.3 0.3 0.7 0.5 1.1 0.7l74.4 43 11 6.3c0.9 0.5 1.8 0.9 2.8 1.1 0.2 0 0.4 0.1 0.5 0.1 0.3 0 0.5 0.1 0.8 0.1 0.3 0 0.5 0 0.8 0.1h0.3c0.4 0 0.7 0 1.1-0.1h0.4c0.3 0 0.6-0.1 0.9-0.2 0.1 0 0.2 0 0.3-0.1h0.2l0.9-0.3c0.1 0 0.1 0 0.2-0.1l1.2-0.6 85.3-49.3c2.6-1.5 4.4-4.2 5-7.1 0.1-0.6 0.2-1.2 0.2-1.8v-98.6c-0.2-2.7-1.2-5.2-3-7z" fill="#689F63" ></path></symbol><symbol id="icongitlab" viewBox="0 0 1024 1024"><path d="M967.428571 559.085714l-124.571428-423.771428c-7.771429-23.314286-26.4-38.971429-51.314286-38.971429s-45.142857 14.057143-52.914286 37.371429l-82.514285 246.171428H368.228571L285.714286 133.828571c-7.771429-23.314286-28-37.371429-52.914286-37.371428s-45.142857 15.542857-51.314286 38.971428L57.028571 559.085714c-4.685714 15.542857 1.6 32.685714 14.057143 42.057143l440.457143 330.285714 441.942857-330.285714c12.342857-9.257143 18.628571-26.4 13.942857-42.057143z" ></path></symbol><symbol id="iconMySQL" viewBox="0 0 1024 1024"><path d="M922.624 10.752H105.984c-51.712 0-93.184 41.984-93.184 93.184v816.64c0 51.712 41.984 93.184 93.184 93.184h177.664v-64H105.984c-16.384 0-29.696-13.312-29.696-29.696V103.936c0-16.384 13.312-29.696 29.696-29.696h816.64c16.384 0 29.696 13.312 29.696 29.696v583.168c-20.48-18.944-43.008-35.84-67.072-50.176-102.912-65.024-226.816-86.016-350.208-58.88 0 0-25.6 5.632-31.744 22.016-6.144 14.848 5.12 33.792 5.12 33.792 6.656 11.776 17.92 25.6 31.232 41.472 2.56 3.072 5.632 6.656 8.704 10.752-16.896-3.072-39.936-10.24-68.096-23.552-23.552-14.848-31.232-23.04-33.792-27.136 0 0-7.68-10.24-14.848-15.872-10.752-8.704-30.208 0.512-30.208 0.512-62.976 26.112-108.032 52.224-118.784 58.368l-4.096 2.048c-25.088 12.8-39.424 17.92-47.616 19.968 0-24.576 17.92-54.784 32.768-79.872 2.56-4.096 4.608-7.68 6.656-11.776l6.656-11.264-3.072-12.288c-9.728-40.448 13.824-98.816 41.472-130.048 25.088-24.576 120.832-92.672 243.712-98.304l19.968-0.512 14.848-18.944c18.944-24.064 77.312-51.712 119.808-66.56-7.168 11.264-14.336 22.528-19.456 33.28-16.896 35.328-6.656 54.784 5.632 65.024 3.584 3.072 9.216 6.656 16.384 8.192 45.568 16.896 87.04 39.424 124.416 67.584 21.504-28.672 13.824-70.144-17.408-88.064-21.504-12.288-43.52-23.04-66.56-32.256 5.632-10.24 13.824-22.016 18.944-30.208 12.8-19.456 23.552-35.84 27.648-50.176l2.56-8.704-2.56-8.704c-4.096-14.336-16.384-25.088-32.768-28.16-45.568-8.704-177.664 44.032-222.72 95.232C421.888 302.08 312.832 367.616 269.824 410.624l-1.024 1.024c-36.352 40.448-67.584 113.152-59.392 175.104-20.992 35.84-50.688 84.992-39.936 135.68v0.512c4.096 16.896 14.336 29.696 29.184 36.352 31.744 14.848 72.704-3.584 109.056-22.016l4.608-2.56 2.048-1.024c7.168-4.096 41.984-24.576 92.672-46.592 9.728 9.216 22.528 18.944 40.448 30.208l3.072 1.536c64.512 31.232 148.48 52.224 175.104 7.68l1.024-2.048c13.824-28.16-9.216-55.808-38.912-90.624 93.184-11.776 185.344 8.192 262.656 57.344l0.512 0.512c37.376 23.04 70.656 52.224 98.816 87.04l1.536-1.024V921.6c0 16.384-13.312 29.696-29.696 29.696h-188.928v64H921.6c51.712 0 93.184-41.984 93.184-93.184V103.936c1.024-51.2-40.448-93.184-92.16-93.184z" ></path><path d="M392.704 950.272h66.56v64h-66.56zM544.768 950.272h66.56v64h-66.56z" ></path></symbol><symbol id="iconriqi" viewBox="0 0 1024 1024"><path d="M888.021333 128H768v-27.989333a35.968 35.968 0 1 0-72.021333 0V128H328.021333v-27.989333a35.968 35.968 0 1 0-71.978666 0V128H135.978667C96.256 128 64 160.213333 64 200.021333v688c0 39.765333 32.213333 71.978667 72.021333 71.978667h752c39.765333 0 71.978667-32.298667 71.978667-72.021333V200.021333C960 160.256 927.786667 128 887.978667 128zM136.021333 200.021333H256v19.968a35.968 35.968 0 1 0 72.021333 0v-19.968h368v19.968a35.968 35.968 0 1 0 71.978667 0v-19.968h120.021333v112H136.021333V200.021333z m752 688H136.021333V384h752v504.021333z" fill="#595959" ></path><path d="M505.301333 448.981333c-19.882667 0-35.968 14.421333-35.968 32.213334v298.496c0 17.792 16.085333 32.170667 35.968 32.170666 19.882667 0 35.925333-14.378667 35.925334-32.170666v-298.496c0-17.792-16.042667-32.213333-35.925334-32.213334z" fill="#595959" ></path></symbol><symbol id="iconicon_User" viewBox="0 0 1024 1024"><path d="M798.293333 981.333333H225.706667a128 128 0 0 1-103.04-51.413333 124.16 124.16 0 0 1-21.333334-110.293333 428.8 428.8 0 0 1 248.533334-275.2A277.333333 277.333333 0 1 1 512 597.333333 343.253333 343.253333 0 0 0 184.106667 843.306667 39.253333 39.253333 0 0 0 192 878.506667a42.666667 42.666667 0 0 0 34.773333 17.493333h571.52a42.666667 42.666667 0 0 0 34.773334-17.493333 39.466667 39.466667 0 0 0 6.826666-35.413334 341.333333 341.333333 0 0 0-138.24-188.16 42.666667 42.666667 0 0 1 47.36-70.826666 426.666667 426.666667 0 0 1 172.8 234.666666 124.586667 124.586667 0 0 1-21.333333 110.506667A128 128 0 0 1 798.293333 981.333333zM512 128a192 192 0 0 0-3.84 384H512a192 192 0 0 0 0-384z" ></path></symbol><symbol id="iconicon_Alendar" viewBox="0 0 1024 1024"><path d="M746.666667 746.666667h-42.666667a42.666667 42.666667 0 0 1 0-85.333334h42.666667a42.666667 42.666667 0 0 1 0 85.333334z" ></path><path d="M533.333333 746.666667h-42.666666a42.666667 42.666667 0 0 1 0-85.333334h42.666666a42.666667 42.666667 0 0 1 0 85.333334z" ></path><path d="M320 746.666667h-42.666667a42.666667 42.666667 0 0 1 0-85.333334h42.666667a42.666667 42.666667 0 0 1 0 85.333334z" ></path><path d="M746.666667 576h-42.666667a42.666667 42.666667 0 0 1 0-85.333333h42.666667a42.666667 42.666667 0 0 1 0 85.333333z" ></path><path d="M533.333333 576h-42.666666a42.666667 42.666667 0 0 1 0-85.333333h42.666666a42.666667 42.666667 0 0 1 0 85.333333z" ></path><path d="M874.666667 405.333333H149.333333a42.666667 42.666667 0 0 1 0-85.333333h725.333334a42.666667 42.666667 0 0 1 0 85.333333z" ></path><path d="M661.333333 266.666667a42.666667 42.666667 0 0 1-42.666666-42.666667v-128a42.666667 42.666667 0 0 1 85.333333 0v128a42.666667 42.666667 0 0 1-42.666667 42.666667z" ></path><path d="M362.666667 266.666667a42.666667 42.666667 0 0 1-42.666667-42.666667v-128a42.666667 42.666667 0 0 1 85.333333 0v128a42.666667 42.666667 0 0 1-42.666666 42.666667z" ></path><path d="M704 917.333333H234.666667a128 128 0 0 1-128-128V234.666667a128 128 0 0 1 128-128h554.666666a128 128 0 0 1 128 128v320a42.666667 42.666667 0 0 1-85.333333 0V234.666667a42.666667 42.666667 0 0 0-42.666667-42.666667H234.666667a42.666667 42.666667 0 0 0-42.666667 42.666667v554.666666a42.666667 42.666667 0 0 0 42.666667 42.666667h469.333333a128 128 0 0 0 123.946667-96 42.666667 42.666667 0 1 1 82.773333 21.333333A213.333333 213.333333 0 0 1 704 917.333333z" ></path></symbol><symbol id="iconicon_Call" viewBox="0 0 1024 1024"><path d="M640 384m-42.666667 0a42.666667 42.666667 0 1 0 85.333334 0 42.666667 42.666667 0 1 0-85.333334 0Z" ></path><path d="M789.333333 426.666667a42.666667 42.666667 0 0 1-42.666666-42.666667 106.666667 106.666667 0 0 0-106.666667-106.666667 42.666667 42.666667 0 0 1 0-85.333333 192 192 0 0 1 192 192 42.666667 42.666667 0 0 1-42.666667 42.666667z" ></path><path d="M938.666667 426.666667a42.666667 42.666667 0 0 1-42.666667-42.666667A256 256 0 0 0 640 128a42.666667 42.666667 0 0 1 0-85.333333 341.333333 341.333333 0 0 1 341.333333 341.333333 42.666667 42.666667 0 0 1-42.666666 42.666667z" ></path><path d="M644.266667 981.333333a178.773333 178.773333 0 0 1-46.933334-6.186666l-5.546666-1.706667c-5.76-1.92-11.733333-4.266667-17.493334-6.613333a897.066667 897.066667 0 0 1-313.386666-204.586667A898.56 898.56 0 0 1 56.96 448l-6.186667-15.786667a40.746667 40.746667 0 0 1-2.346666-8.746666A158.506667 158.506667 0 0 1 42.666667 379.093333a170.666667 170.666667 0 0 1 50.56-121.173333l91.946666-91.946667A128 128 0 0 1 277.333333 128a128 128 0 0 1 91.306667 37.973333l61.226667 61.013334a129.92 129.92 0 0 1 0 183.04l-41.6 40.96A781.226667 781.226667 0 0 0 459.733333 533.333333a42.666667 42.666667 0 0 1-60.373333 60.16 922.88 922.88 0 0 1-79.36-89.6 86.4 86.4 0 0 1 7.466667-113.28l40.96-40.96a44.586667 44.586667 0 0 0 0-62.293333l-61.226667-61.013333A42.666667 42.666667 0 0 0 277.333333 213.333333a44.16 44.16 0 0 0-31.146666 12.8l-92.8 91.946667A85.333333 85.333333 0 0 0 128 379.733333a70.186667 70.186667 0 0 0 3.2 23.466667v2.346667l4.693333 11.733333a816.853333 816.853333 0 0 0 185.386667 284.8 815.573333 815.573333 0 0 0 284.373333 185.173333l12.8 5.12h1.066667a87.253333 87.253333 0 0 0 85.333333-21.333333l93.013334-92.373333a35.84 35.84 0 0 0 5.546666-7.04 44.373333 44.373333 0 0 0-5.546666-55.04l-61.44-61.653334a44.16 44.16 0 0 0-62.08 0l-36.266667 36.266667a85.333333 85.333333 0 0 1-109.866667 9.813333 42.666667 42.666667 0 0 1 47.786667-70.4h1.28l36.48-36.266666a129.493333 129.493333 0 0 1 182.613333 0l61.44 61.653333a128 128 0 0 1 18.133334 160 113.28 113.28 0 0 1-18.986667 23.466667l-91.306667 91.093333A171.733333 171.733333 0 0 1 644.266667 981.333333z" ></path></symbol><symbol id="iconicon_Folder" viewBox="0 0 1024 1024"><path d="M746.666667 874.666667H192a128 128 0 0 1-128-128V234.666667a128 128 0 0 1 128-128h211.626667a128 128 0 0 1 104.746666 54.826666l4.48 6.4 94.08 133.76a42.666667 42.666667 0 0 0 34.773334 18.346667h158.293333a42.666667 42.666667 0 0 1 0 85.333333h-158.293333a128 128 0 0 1-104.746667-54.826666l-98.773333-140.586667A42.666667 42.666667 0 0 0 403.626667 192H192a42.666667 42.666667 0 0 0-42.666667 42.666667v512a42.666667 42.666667 0 0 0 42.666667 42.666666h554.666667a128 128 0 0 0 128-128V320a42.666667 42.666667 0 0 0-42.666667-42.666667h-183.68a42.666667 42.666667 0 0 1 0-85.333333H832a128 128 0 0 1 128 128v341.333333a213.333333 213.333333 0 0 1-213.333333 213.333334z" ></path></symbol><symbol id="iconicon_Favorite" viewBox="0 0 1024 1024"><path d="M512 682.666667a170.666667 170.666667 0 1 1 170.666667-170.666667 170.666667 170.666667 0 0 1-170.666667 170.666667z m0-256a85.333333 85.333333 0 1 0 85.333333 85.333333 85.333333 85.333333 0 0 0-85.333333-85.333333z" ></path><path d="M746.666667 941.44a91.093333 91.093333 0 0 1-42.666667-10.453333L514.56 832a5.333333 5.333333 0 0 0-5.12 0L320 930.986667a90.88 90.88 0 0 1-131.84-95.786667l36.053333-210.56a5.333333 5.333333 0 0 0-1.493333-4.906667l-152.96-149.333333a90.88 90.88 0 0 1 50.773333-154.666667l211.413334-30.72a5.76 5.76 0 0 0 4.053333-2.986666l94.506667-192a90.88 90.88 0 0 1 162.986666 0l94.506667 192a5.76 5.76 0 0 0 4.266667 2.986666l211.2 30.72a90.88 90.88 0 0 1 50.346666 155.093334l-152.96 149.333333a5.333333 5.333333 0 0 0-1.493333 4.906667 42.666667 42.666667 0 0 1-84.053333 14.506666 90.666667 90.666667 0 0 1 26.026666-80.426666l152.96-149.333334a5.333333 5.333333 0 0 0 1.493334-5.546666 5.333333 5.333333 0 0 0-4.48-3.84l-211.413334-30.72A90.666667 90.666667 0 0 1 611.413333 320l-94.506666-192a5.546667 5.546667 0 0 0-9.813334 0l-94.506666 192a90.666667 90.666667 0 0 1-68.48 49.706667l-211.413334 30.506666a5.333333 5.333333 0 0 0-4.48 3.84 5.333333 5.333333 0 0 0 1.493334 5.546667l152.96 149.333333a90.666667 90.666667 0 0 1 26.026666 81.066667l-36.053333 210.346667a5.12 5.12 0 0 0 2.133333 5.546666 5.546667 5.546667 0 0 0 5.973334 0L469.333333 756.053333a90.88 90.88 0 0 1 85.333334 0l189.013333 99.413334a5.546667 5.546667 0 0 0 5.973333 0 5.12 5.12 0 0 0 2.133334-5.546667 42.666667 42.666667 0 1 1 84.053333-14.293333 90.88 90.88 0 0 1-89.386667 106.666666z" ></path></symbol><symbol id="iconicon_Email" viewBox="0 0 1024 1024"><path d="M853.333333 850.986667H170.666667a128 128 0 0 1-128-128v-426.666667a128 128 0 0 1 128-128h682.666666a128 128 0 0 1 128 128v24.746667a42.666667 42.666667 0 0 1-21.333333 37.12l-332.8 187.52-5.12 2.986666a213.333333 213.333333 0 0 1-219.733333 0l-4.693334-2.773333-164.266666-92.586667a42.666667 42.666667 0 1 1 42.666666-74.24l165.546667 93.226667 3.84 2.346667 2.133333 1.066666a126.506667 126.506667 0 0 0 131.413334 0l2.773333-1.493333 3.2-1.92L896 296.106667a42.666667 42.666667 0 0 0-42.666667-42.666667H170.666667a42.666667 42.666667 0 0 0-42.666667 42.666667v426.666666a42.666667 42.666667 0 0 0 42.666667 42.666667h682.666666a42.666667 42.666667 0 0 0 42.666667-42.666667V512a42.666667 42.666667 0 0 1 85.333333 0v210.986667a128 128 0 0 1-128 128z" ></path></symbol><symbol id="iconicon_Friends" viewBox="0 0 1024 1024"><path d="M720 938.666667H176a123.946667 123.946667 0 0 1-99.626667-49.706667 120.32 120.32 0 0 1-19.84-106.666667 405.333333 405.333333 0 0 1 231.893334-261.546666 260.266667 260.266667 0 0 1-46.72-45.226667 265.6 265.6 0 1 1 471.68-158.933333A269.653333 269.653333 0 0 1 443.306667 573.866667 320 320 0 0 0 138.453333 805.973333a34.773333 34.773333 0 0 0 6.186667 31.573334 38.826667 38.826667 0 0 0 31.36 15.786666h544a38.826667 38.826667 0 0 0 31.36-15.786666 35.2 35.2 0 0 0 6.186667-31.786667 322.773333 322.773333 0 0 0-35.413334-79.146667 42.666667 42.666667 0 0 1-1.066666-42.666666 42.666667 42.666667 0 0 1 37.333333-21.333334h132.266667A170.666667 170.666667 0 0 0 725.333333 533.333333a42.666667 42.666667 0 0 1 0-85.333333 128 128 0 0 0 0-256 42.666667 42.666667 0 0 1 0-85.333333 213.333333 213.333333 0 0 1 133.973334 379.306666A256 256 0 0 1 981.333333 704a42.666667 42.666667 0 0 1-42.666666 42.666667h-111.36c4.48 11.52 8.533333 23.253333 12.16 35.413333a120.746667 120.746667 0 0 1-19.84 106.666667A123.946667 123.946667 0 0 1 720 938.666667zM448 128a202.88 202.88 0 0 0-25.386667 1.706667 180.48 180.48 0 0 0 11.52 358.4 16.213333 16.213333 0 0 1 4.48 0h3.84a184.533333 184.533333 0 0 0 186.24-174.506667A180.266667 180.266667 0 0 0 448 128z" ></path></symbol><symbol id="iconicon_Home" viewBox="0 0 1024 1024"><path d="M725.333333 981.333333H469.333333a85.333333 85.333333 0 0 1-85.333333-85.333333V661.333333a132.906667 132.906667 0 0 1 73.813333-116.053333A128 128 0 0 1 640 661.333333v149.333334a42.666667 42.666667 0 0 1-85.333333 0v-149.333334a42.666667 42.666667 0 0 0-12.586667-30.506666 44.373333 44.373333 0 0 0-50.133333-7.68A50.133333 50.133333 0 0 0 469.333333 661.333333v234.666667h256a42.666667 42.666667 0 0 0 42.666667-42.666667V494.293333a42.666667 42.666667 0 0 1 42.666667-42.666666h24.96L542.293333 158.08a42.666667 42.666667 0 0 0-60.586666 0L188.373333 451.626667H213.333333a42.666667 42.666667 0 0 1 42.666667 42.666666v355.626667A45.653333 45.653333 0 0 0 300.16 896a42.666667 42.666667 0 0 1-2.986667 85.333333A130.773333 130.773333 0 0 1 170.666667 849.92V536.96H85.333333a42.666667 42.666667 0 0 1-39.466666-26.24 42.666667 42.666667 0 0 1 9.386666-46.506667L421.546667 97.92a128 128 0 0 1 180.906666 0l366.293334 366.293333a42.666667 42.666667 0 0 1 9.386666 46.506667A42.666667 42.666667 0 0 1 938.666667 536.96h-85.333334V853.333333a128 128 0 0 1-128 128z" ></path></symbol><symbol id="iconicon_List" viewBox="0 0 1024 1024"><path d="M704 725.333333H362.666667a42.666667 42.666667 0 0 1 0-85.333333h341.333333a42.666667 42.666667 0 0 1 0 85.333333z" ></path><path d="M704 554.666667H362.666667a42.666667 42.666667 0 0 1 0-85.333334h341.333333a42.666667 42.666667 0 0 1 0 85.333334z" ></path><path d="M490.666667 384h-128a42.666667 42.666667 0 0 1 0-85.333333h128a42.666667 42.666667 0 0 1 0 85.333333z" ></path><path d="M686.933333 405.333333a42.666667 42.666667 0 0 1-30.506666-12.8l-89.6-91.52a42.666667 42.666667 0 0 1 61.013333-59.733333l59.093333 60.373333 178.56-182.186666a42.666667 42.666667 0 0 1 61.013334 59.733333l-209.066667 213.333333a42.666667 42.666667 0 0 1-30.506667 12.8z" ></path><path d="M725.333333 917.333333H256a128 128 0 0 1-128-128V234.666667a128 128 0 0 1 128-128h277.333333a42.666667 42.666667 0 0 1 0 85.333333H256a42.666667 42.666667 0 0 0-42.666667 42.666667v554.666666a42.666667 42.666667 0 0 0 42.666667 42.666667h469.333333a128 128 0 0 0 128-128v-192a42.666667 42.666667 0 0 1 85.333334 0v192a213.333333 213.333333 0 0 1-213.333334 213.333333z" ></path></symbol><symbol id="iconicon_Location" viewBox="0 0 1024 1024"><path d="M512 597.333333a170.666667 170.666667 0 1 1 170.666667-170.666666 170.666667 170.666667 0 0 1-170.666667 170.666666z m0-256a85.333333 85.333333 0 1 0 85.333333 85.333334 85.333333 85.333333 0 0 0-85.333333-85.333334z" ></path><path d="M512 992.426667a42.666667 42.666667 0 0 1-30.08-12.586667l-256-256A405.333333 405.333333 0 0 1 512 31.573333a405.333333 405.333333 0 0 1 286.506667 691.84l-15.786667 16a42.666667 42.666667 0 0 1-60.373333-60.373333l16-15.786667A320 320 0 0 0 512 116.906667a320 320 0 0 0-226.346667 546.346666L512 889.386667l89.813333-89.813334A42.666667 42.666667 0 1 1 661.333333 859.946667l-120.106666 119.893333a42.666667 42.666667 0 0 1-29.226667 12.586667z" ></path></symbol><symbol id="iconicon_Map" viewBox="0 0 1024 1024"><path d="M288.853333 665.173333a42.666667 42.666667 0 0 1-30.08-72.746666l90.453334-90.453334a128 128 0 0 1 180.906666 0L576 547.2a42.666667 42.666667 0 0 0 60.373333 0l90.666667-90.666667a42.666667 42.666667 0 0 1 60.16 60.373334l-90.453333 90.453333a128 128 0 0 1-180.906667 0L469.333333 562.133333a42.666667 42.666667 0 0 0-60.373333 0L320 652.8a42.666667 42.666667 0 0 1-31.146667 12.373333z" ></path><path d="M597.333333 213.333333m-42.666666 0a42.666667 42.666667 0 1 0 85.333333 0 42.666667 42.666667 0 1 0-85.333333 0Z" ></path><path d="M597.333333 437.12a42.666667 42.666667 0 0 1-30.08-12.586667l-90.666666-90.453333a170.666667 170.666667 0 1 1 241.493333 0l-90.666667 90.453333a42.666667 42.666667 0 0 1-30.08 12.586667zM597.333333 128a85.333333 85.333333 0 0 0-60.373333 145.706667L597.333333 334.08l60.373334-60.373333A85.333333 85.333333 0 0 0 597.333333 128z" ></path><path d="M704 917.333333H234.666667a128 128 0 0 1-128-128V234.666667a128 128 0 0 1 128-128h106.666666a42.666667 42.666667 0 0 1 0 85.333333h-106.666666a42.666667 42.666667 0 0 0-42.666667 42.666667v554.666666a42.666667 42.666667 0 0 0 42.666667 42.666667h469.333333a128 128 0 0 0 128-128V256a42.666667 42.666667 0 0 1 85.333333 0v448a213.333333 213.333333 0 0 1-213.333333 213.333333z" ></path></symbol><symbol id="iconicon_Notepad" viewBox="0 0 1024 1024"><path d="M490.666667 618.666667h-128a42.666667 42.666667 0 0 1 0-85.333334h128a42.666667 42.666667 0 0 1 0 85.333334z" ></path><path d="M405.333333 469.333333h-42.666666a42.666667 42.666667 0 0 1 0-85.333333h42.666666a42.666667 42.666667 0 0 1 0 85.333333z" ></path><path d="M192 746.666667H64a42.666667 42.666667 0 0 1 0-85.333334h128a42.666667 42.666667 0 0 1 0 85.333334z" ></path><path d="M192 554.666667H64a42.666667 42.666667 0 0 1 0-85.333334h128a42.666667 42.666667 0 0 1 0 85.333334z" ></path><path d="M192 362.666667H64a42.666667 42.666667 0 0 1 0-85.333334h128a42.666667 42.666667 0 0 1 0 85.333334z" ></path><path d="M682.666667 917.333333H213.333333a128 128 0 0 1-128-128V234.666667a128 128 0 0 1 128-128h554.666667a128 128 0 0 1 128 128v362.666666a42.666667 42.666667 0 0 1-42.666667 42.666667h-128a128 128 0 0 1 0-256h21.333334a42.666667 42.666667 0 0 1 0 85.333333h-21.333334a42.666667 42.666667 0 0 0 0 85.333334h85.333334V234.666667a42.666667 42.666667 0 0 0-42.666667-42.666667H213.333333a42.666667 42.666667 0 0 0-42.666666 42.666667v554.666666a42.666667 42.666667 0 0 0 42.666666 42.666667h469.333334a128 128 0 0 0 123.946666-96 42.666667 42.666667 0 0 1 82.773334 21.333333A213.333333 213.333333 0 0 1 682.666667 917.333333z" ></path></symbol><symbol id="iconicon_Read" viewBox="0 0 1024 1024"><path d="M768 661.333333h-85.333333a42.666667 42.666667 0 0 1 0-85.333333h85.333333a42.666667 42.666667 0 0 1 0 85.333333z" ></path><path d="M768 490.666667h-85.333333a42.666667 42.666667 0 0 1 0-85.333334h85.333333a42.666667 42.666667 0 0 1 0 85.333334z" ></path><path d="M341.333333 661.333333h-85.333333a42.666667 42.666667 0 0 1 0-85.333333h85.333333a42.666667 42.666667 0 0 1 0 85.333333z" ></path><path d="M341.333333 490.666667h-85.333333a42.666667 42.666667 0 0 1 0-85.333334h85.333333a42.666667 42.666667 0 0 1 0 85.333334z" ></path><path d="M469.333333 926.72a85.333333 85.333333 0 0 1-21.333333-2.56l-341.333333-85.333333a85.333333 85.333333 0 0 1-64-82.773334V246.613333a85.333333 85.333333 0 0 1 106.666666-82.773333l247.253334 72.533333a42.666667 42.666667 0 0 1-23.893334 81.92L126.293333 246.186667 128 756.053333l341.333333 85.333334V331.946667a85.333333 85.333333 0 0 1 64-82.773334l341.333334-85.333333a85.333333 85.333333 0 0 1 106.666666 82.773333v509.44a85.333333 85.333333 0 0 1-64 82.773334l-264.746666 76.8a42.666667 42.666667 0 0 1-23.893334-81.92l266.453334-77.226667 0.853333-509.866667-341.333333 85.333334v509.44a85.333333 85.333333 0 0 1-85.333334 85.333333z" ></path></symbol></svg>';if((e=document.getElementsByTagName("script"))[e.length-1].getAttribute("data-injectcss")&&!t.__iconfont__svg__cssinject__){t.__iconfont__svg__cssinject__=!0;try{document.write("<style>.svgfont {display: inline-block;width: 1em;height: 1em;fill: currentColor;vertical-align: -0.1em;font-size:16px;}</style>")}catch(e){console&&console.log(e)}}!function(e){if(document.addEventListener)if(~["complete","loaded","interactive"].indexOf(document.readyState))setTimeout(e,0);else{document.addEventListener("DOMContentLoaded",function t(){document.removeEventListener("DOMContentLoaded",t,!1),e()},!1)}else document.attachEvent&&(a=e,s=t.document,n=!1,(c=function(){try{s.documentElement.doScroll("left")}catch(t){return void setTimeout(c,50)}i()})(),s.onreadystatechange=function(){"complete"==s.readyState&&(s.onreadystatechange=null,i())});function i(){n||(n=!0,a())}var a,s,n,c}(function(){var t,e;(t=document.createElement("div")).innerHTML=i,i=null,(e=t.getElementsByTagName("svg")[0])&&(e.setAttribute("aria-hidden","true"),e.style.position="absolute",e.style.width=0,e.style.height=0,e.style.overflow="hidden",function(t,e){e.firstChild?function(t,e){e.parentNode.insertBefore(t,e)}(t,e.firstChild):e.appendChild(t)}(e,document.body))})}(window)},zZKr:function(t,e){}},["NHnr"]); //# sourceMappingURL=app.7678959f9b17a348c1c8.js.map
perlin_surflet.rs
use crate::{ gradient, math, noise_fns::{NoiseFn, Seedable}, permutationtable::PermutationTable, }; /// Noise function that outputs 2/3/4-dimensional Perlin noise. #[derive(Clone, Copy, Debug)] pub struct Perlin { seed: u32, perm_table: PermutationTable, } impl Perlin { pub const DEFAULT_SEED: u32 = 0; pub fn new() -> Self { Self { seed: Self::DEFAULT_SEED,
} impl Default for Perlin { fn default() -> Self { Self::new() } } impl Seedable for Perlin { /// Sets the seed value for Perlin noise fn set_seed(self, seed: u32) -> Self { // If the new seed is the same as the current seed, just return self. if self.seed == seed { return self; } // Otherwise, regenerate the permutation table based on the new seed. Self { seed, perm_table: PermutationTable::new(seed), } } fn seed(&self) -> u32 { self.seed } } /// 2-dimensional perlin noise impl NoiseFn<[f64; 2]> for Perlin { fn get(&self, point: [f64; 2]) -> f64 { const SCALE_FACTOR: f64 = 3.160_493_827_160_493_7; #[inline(always)] fn surflet(perm_table: &PermutationTable, corner: [isize; 2], distance: [f64; 2]) -> f64 { let attn = 1.0 - math::dot2(distance, distance); if attn > 0.0 { attn.powi(4) * math::dot2(distance, gradient::get2(perm_table.get2(corner))) } else { 0.0 } } let floored = math::map2(point, f64::floor); let near_corner = math::to_isize2(floored); let far_corner = math::add2(near_corner, math::one2()); let near_distance = math::sub2(point, floored); let far_distance = math::sub2(near_distance, math::one2()); let f00 = surflet( &self.perm_table, [near_corner[0], near_corner[1]], [near_distance[0], near_distance[1]], ); let f10 = surflet( &self.perm_table, [far_corner[0], near_corner[1]], [far_distance[0], near_distance[1]], ); let f01 = surflet( &self.perm_table, [near_corner[0], far_corner[1]], [near_distance[0], far_distance[1]], ); let f11 = surflet( &self.perm_table, [far_corner[0], far_corner[1]], [far_distance[0], far_distance[1]], ); // Multiply by arbitrary value to scale to -1..1 math::clamp((f00 + f10 + f01 + f11) * SCALE_FACTOR, -1.0, 1.0) } } /// 3-dimensional perlin noise impl NoiseFn<[f64; 3]> for Perlin { fn get(&self, point: [f64; 3]) -> f64 { const SCALE_FACTOR: f64 = 3.889_855_325_553_107_4; #[inline(always)] fn surflet(perm_table: &PermutationTable, corner: [isize; 3], distance: [f64; 3]) -> f64 { let attn = 1.0 - math::dot3(distance, distance); if attn > 0.0 { attn.powi(4) * math::dot3(distance, gradient::get3(perm_table.get3(corner))) } else { 0.0 } } let floored = math::map3(point, f64::floor); let near_corner = math::to_isize3(floored); let far_corner = math::add3(near_corner, math::one3()); let near_distance = math::sub3(point, floored); let far_distance = math::sub3(near_distance, math::one3()); let f000 = surflet( &self.perm_table, [near_corner[0], near_corner[1], near_corner[2]], [near_distance[0], near_distance[1], near_distance[2]], ); let f100 = surflet( &self.perm_table, [far_corner[0], near_corner[1], near_corner[2]], [far_distance[0], near_distance[1], near_distance[2]], ); let f010 = surflet( &self.perm_table, [near_corner[0], far_corner[1], near_corner[2]], [near_distance[0], far_distance[1], near_distance[2]], ); let f110 = surflet( &self.perm_table, [far_corner[0], far_corner[1], near_corner[2]], [far_distance[0], far_distance[1], near_distance[2]], ); let f001 = surflet( &self.perm_table, [near_corner[0], near_corner[1], far_corner[2]], [near_distance[0], near_distance[1], far_distance[2]], ); let f101 = surflet( &self.perm_table, [far_corner[0], near_corner[1], far_corner[2]], [far_distance[0], near_distance[1], far_distance[2]], ); let f011 = surflet( &self.perm_table, [near_corner[0], far_corner[1], far_corner[2]], [near_distance[0], far_distance[1], far_distance[2]], ); let f111 = surflet( &self.perm_table, [far_corner[0], far_corner[1], far_corner[2]], [far_distance[0], far_distance[1], far_distance[2]], ); // Multiply by arbitrary value to scale to -1..1 math::clamp( (f000 + f100 + f010 + f110 + f001 + f101 + f011 + f111) * SCALE_FACTOR, -1.0, 1.0, ) } } /// 4-dimensional perlin noise impl NoiseFn<[f64; 4]> for Perlin { fn get(&self, point: [f64; 4]) -> f64 { const SCALE_FACTOR: f64 = 4.424_369_240_215_691; #[inline(always)] fn surflet(perm_table: &PermutationTable, corner: [isize; 4], distance: [f64; 4]) -> f64 { let attn = 1.0 - math::dot4(distance, distance); if attn > 0.0 { attn.powi(4) * math::dot4(distance, gradient::get4(perm_table.get4(corner))) } else { 0.0 } } let floored = math::map4(point, f64::floor); let near_corner = math::to_isize4(floored); let far_corner = math::add4(near_corner, math::one4()); let near_distance = math::sub4(point, floored); let far_distance = math::sub4(near_distance, math::one4()); let f0000 = surflet( &self.perm_table, [ near_corner[0], near_corner[1], near_corner[2], near_corner[3], ], [ near_distance[0], near_distance[1], near_distance[2], near_distance[3], ], ); let f1000 = surflet( &self.perm_table, [ far_corner[0], near_corner[1], near_corner[2], near_corner[3], ], [ far_distance[0], near_distance[1], near_distance[2], near_distance[3], ], ); let f0100 = surflet( &self.perm_table, [ near_corner[0], far_corner[1], near_corner[2], near_corner[3], ], [ near_distance[0], far_distance[1], near_distance[2], near_distance[3], ], ); let f1100 = surflet( &self.perm_table, [far_corner[0], far_corner[1], near_corner[2], near_corner[3]], [ far_distance[0], far_distance[1], near_distance[2], near_distance[3], ], ); let f0010 = surflet( &self.perm_table, [ near_corner[0], near_corner[1], far_corner[2], near_corner[3], ], [ near_distance[0], near_distance[1], far_distance[2], near_distance[3], ], ); let f1010 = surflet( &self.perm_table, [far_corner[0], near_corner[1], far_corner[2], near_corner[3]], [ far_distance[0], near_distance[1], far_distance[2], near_distance[3], ], ); let f0110 = surflet( &self.perm_table, [near_corner[0], far_corner[1], far_corner[2], near_corner[3]], [ near_distance[0], far_distance[1], far_distance[2], near_distance[3], ], ); let f1110 = surflet( &self.perm_table, [far_corner[0], far_corner[1], far_corner[2], near_corner[3]], [ far_distance[0], far_distance[1], far_distance[2], near_distance[3], ], ); let f0001 = surflet( &self.perm_table, [ near_corner[0], near_corner[1], near_corner[2], far_corner[3], ], [ near_distance[0], near_distance[1], near_distance[2], far_distance[3], ], ); let f1001 = surflet( &self.perm_table, [far_corner[0], near_corner[1], near_corner[2], far_corner[3]], [ far_distance[0], near_distance[1], near_distance[2], far_distance[3], ], ); let f0101 = surflet( &self.perm_table, [near_corner[0], far_corner[1], near_corner[2], far_corner[3]], [ near_distance[0], far_distance[1], near_distance[2], far_distance[3], ], ); let f1101 = surflet( &self.perm_table, [far_corner[0], far_corner[1], near_corner[2], far_corner[3]], [ far_distance[0], far_distance[1], near_distance[2], far_distance[3], ], ); let f0011 = surflet( &self.perm_table, [near_corner[0], near_corner[1], far_corner[2], far_corner[3]], [ near_distance[0], near_distance[1], far_distance[2], far_distance[3], ], ); let f1011 = surflet( &self.perm_table, [far_corner[0], near_corner[1], far_corner[2], far_corner[3]], [ far_distance[0], near_distance[1], far_distance[2], far_distance[3], ], ); let f0111 = surflet( &self.perm_table, [near_corner[0], far_corner[1], far_corner[2], far_corner[3]], [ near_distance[0], far_distance[1], far_distance[2], far_distance[3], ], ); let f1111 = surflet( &self.perm_table, [far_corner[0], far_corner[1], far_corner[2], far_corner[3]], [ far_distance[0], far_distance[1], far_distance[2], far_distance[3], ], ); // Multiply by arbitrary value to scale to -1..1 math::clamp( (f0000 + f1000 + f0100 + f1100 + f0010 + f1010 + f0110 + f1110 + f0001 + f1001 + f0101 + f1101 + f0011 + f1011 + f0111 + f1111) * SCALE_FACTOR, -1.0, 1.0, ) } }
perm_table: PermutationTable::new(Self::DEFAULT_SEED), } }
api_routes.go
/** * Author: Admiral Helmut * Created: 12.06.2019 * * (C) **/ package routes import ( "github.com/kataras/iris/v12" "github.com/efi4st/efi4st/dbprovider" ) func
(ctx iris.Context) { projects := dbprovider.GetDBManager().GetProjects() ctx.ViewData("projectList", projects) ctx.View("index.html") } func Documentation(ctx iris.Context) { ctx.View("documentation.html") }
Index
defaultGet.go
package utils import "os" func DefaultGetEnv(key, defaultVal string) string { val := os.Getenv(key)
val = defaultVal } return val }
if val == "" {
edit-profile.page.ts
import { Component, OnDestroy, OnInit } from '@angular/core'; import { AngularFireAuth } from '@angular/fire/auth'; import { AngularFireStorage } from '@angular/fire/storage'; import { FormBuilder, FormControl, FormGroup, Validators } from '@angular/forms'; import { Router } from '@angular/router'; import { Camera, CameraOptions } from '@ionic-native/Camera/ngx'; import { File } from '@ionic-native/file/ngx'; import { ActionSheetController, LoadingController, NavController, Platform, ToastController } from '@ionic/angular'; import { auth } from 'firebase/app'; import { identity, pickBy } from 'lodash'; import { Subscription } from 'rxjs'; import { IUser } from 'src/app/interfaces/user.interface'; import { AuthService } from 'src/app/services/auth.service'; import { FirebaseService } from 'src/app/services/firebase.service'; import { HelperService } from 'src/app/services/helper.service'; @Component({ selector: 'app-edit-profile', templateUrl: './edit-profile.page.html', styleUrls: ['./edit-profile.page.scss'], }) export class EditProfilePage implements OnInit, OnDestroy { public user: IUser; private userSub$: Subscription; public form: FormGroup; constructor( public navCtrl: NavController, public loadingCtrl: LoadingController, public toastCtrl: ToastController, private authService: AuthService, public router: Router, public helperService: HelperService, public angularFireAuth: AngularFireAuth, private fb: FormBuilder, private firebaseService: FirebaseService, public storage: AngularFireStorage, private camera: Camera, public actionSheetController: ActionSheetController, private file: File, private platform: Platform, ) { this.initForm(); this.userSub$ = this.angularFireAuth.user.pipe(user => this.firebaseService.getCurrentUserFirebase(this.angularFireAuth.auth.currentUser.uid) ).subscribe((res: IUser) => { this.user = res; this.form.patchValue(this.user); if (this.user.providerId !== 'google.com') { this.form.get('currentPassword').setValidators([Validators.required]); } console.log(this.user); }); } ngOnInit() { } initForm() { this.form = this.fb.group({ displayName: ['', Validators.required], email: [{ value: '', disabled: true }, Validators.required], phoneNumber: ['', Validators.required], currentPassword: [''], changePassword: [false] }); } async sendData() { const loader = await this.loadingCtrl.create({ duration: 2000 }); loader.present(); loader.onWillDismiss().then(async l => { const toast = await this.toastCtrl.create({ showCloseButton: true, cssClass: 'bg-profile', message: 'Your Data was Edited!', duration: 2000, position: 'bottom' }); toast.present(); this.navCtrl.navigateForward('/home-results'); }); } async updateProfile() { try { this.helperService.markFormGroupTouched(this.form); console.log(this.form.value.changePassword); if (this.form.invalid) { return; } this.helperService.showLoading(); const check = this.user.providerId === 'google.com' ? true : await this.checkPassword(this.form.value.currentPassword); if (check) { const updateDisplayName = this.angularFireAuth.auth.currentUser.updateProfile({ displayName: this.form.value.displayName }); const user: IUser = { displayName: this.form.value.displayName, email: this.angularFireAuth.auth.currentUser.email, uid: this.angularFireAuth.auth.currentUser.uid, phoneNumber: this.form.value.phoneNumber, photoURL: this.angularFireAuth.auth.currentUser.photoURL, providerId: this.user.providerId }; const params = pickBy(user, identity); const update = this.firebaseService.updateUserInfo(params); if (this.form.value.changePassword) { const updatePassword = this.angularFireAuth.auth.currentUser.updatePassword(this.form.value.password); await Promise.all([updateDisplayName, update, updatePassword]); } else { await Promise.all([updateDisplayName, update]); } const toast = await this.toastCtrl.create({ showCloseButton: true, closeButtonText: 'Đóng', message: 'Cập nhật thành công.', duration: 2000, position: 'bottom', color: 'success' }); toast.present(); this.resetForm('changePassword'); this.resetForm('currentPassword'); } else { throw { message: 'Mật khẩu không chính xác' }; } // const updatePhone = await this.angularFireAuth.auth.currentUser.updatePhoneNumber(this.form.value.phoneNumber); } catch (e) { const toast = await this.toastCtrl.create({ showCloseButton: true, closeButtonText: 'Đóng', message: e.message || 'Cập nhật thất bại, vui lòng kiểm tra lại thông tin.', duration: 2000, position: 'bottom', color: 'danger' }); toast.present(); } finally { this.helperService.hideLoading(); } } async checkPassword(password) { try { const verify = await this.angularFireAuth.auth.currentUser.reauthenticateWithCredential( auth.EmailAuthProvider.credential( this.angularFireAuth.auth.currentUser.email, password ) ); console.log(verify); return true; } catch (e) { console.log(e); return false; } } onChangeCheckbox(event) { if (event) { this.form.addControl('password', new FormControl('', Validators.required)); this.form.addControl('confirmPassword', new FormControl('', [Validators.required, this.helperService.matchValues('password')])); } else { this.form.removeControl('password'); this.form.removeControl('confirmPassword'); } this.form.updateValueAndValidity(); } resetForm(name) { this.form.get(name).reset(); } async pickImage(sourceType) { try { const options: CameraOptions = { quality: 100, sourceType: sourceType, destinationType: this.camera.DestinationType.FILE_URI, encodingType: this.camera.EncodingType.JPEG, mediaType: this.camera.MediaType.PICTURE }; const imageData = await this.camera.getPicture(options); const currentName = imageData.substr(imageData.lastIndexOf('/') + 1); const correctPath = imageData.substr(0, imageData.lastIndexOf('/') + 1);
const url = await res.ref.getDownloadURL(); const authUrl = this.angularFireAuth.auth.currentUser.updateProfile({ photoURL: url }); const databaseUrl = this.firebaseService.updateUserInfo({ uid: this.angularFireAuth.auth.currentUser.uid, photoURL: url }); await Promise.all([authUrl, databaseUrl]); const toast = await this.toastCtrl.create({ showCloseButton: true, closeButtonText: 'Đóng', message: 'Cập nhật thành công.', duration: 2000, position: 'bottom', color: 'success' }); toast.present(); this.helperService.hideLoading(); } catch (e) { this.helperService.hideLoading(); console.log(e); if(e === 'No Image Selected'){ return; } const toast = await this.toastCtrl.create({ showCloseButton: true, closeButtonText: 'Đóng', message: 'Cập nhật thất bại, vui lòng kiểm tra lại thông tin.', duration: 2000, position: 'bottom', color: 'danger' }); toast.present(); } } async selectImage() { const actionSheet = await this.actionSheetController.create({ header: 'Chọn ảnh từ', buttons: [{ text: 'Thư viện', handler: () => { this.pickImage(this.camera.PictureSourceType.PHOTOLIBRARY); } }, { text: 'Máy ảnh', handler: () => { this.pickImage(this.camera.PictureSourceType.CAMERA); } }, { text: 'Huỷ', role: 'cancel' } ] }); await actionSheet.present(); } ngOnDestroy(): void { if (this.userSub$) { this.userSub$.unsubscribe(); } } }
const displayImageName = Date.now().toString(); this.helperService.showLoading(); const image = await this.file.readAsDataURL(correctPath, currentName); const res = await this.storage.ref(`/avatar/${displayImageName}`).putString(image.split(',')[1], 'base64');
router_test.go
/* Copyright 2020 rickycorte Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package router import ( "fmt" "io/ioutil" "net/http" "net/http/httptest" "testing" ) func RunRequest(router *Router, method, path string, status int, expected string, t *testing.T) { recorder := httptest.NewRecorder() req, _ := http.NewRequest(method, path, nil) router.ServeHTTP(recorder, req) b, _ := ioutil.ReadAll(recorder.Body) body := string(b) if recorder.Code != status { t.Errorf("Mismatch result code of %s. Expected: %v, got: %v", path, status, recorder.Code) } if body != expected { t.Errorf("Mismatch in execution of %s. Expected: %s, got: %s", path, expected, body) } } func printHello(w http.ResponseWriter, _ *http.Request, _ *ParameterList) { w.WriteHeader(200) fmt.Fprintf(w, "hello") } func printMethod(w http.ResponseWriter, r *http.Request, _ *ParameterList) { w.WriteHeader(200) fmt.Fprintf(w, r.Method) } func writeData(w http.ResponseWriter, r *http.Request, p *ParameterList) { w.WriteHeader(200) fmt.Fprintf(w, p.Get("user")+"-"+p.Get("activity")+"-"+p.Get("comment")) } func panicHandler(w http.ResponseWriter, _ *http.Request, _ *ParameterList) { panic("PANIC") } func fw(w http.ResponseWriter, r *http.Request, p *ParameterList) { w.WriteHeader(200) path := "" if p != nil { path = p.Get("*") } fmt.Fprintf(w, "Got: "+path) } func TestStaticRoutes(t *testing.T) { router := MakeRouter() router.Handle("GET", "/", printHello) router.GET("/static/path/to/hello", printHello) router.Handle("GET", "/static/path/hello.html", printHello) RunRequest(router, "GET", "/", 200, "hello", t) RunRequest(router, "GET", "/static/path/to/hello", 200, "hello", t) RunRequest(router, "GET", "/static/path/to/hello.html", 404, "Not Found", t) RunRequest(router, "GET", "/static/path/hello.html", 200, "hello", t) } func TestParametricRoutes(t *testing.T) { router := MakeRouter() router.GET("/activity/:user", writeData) router.POST("/activity/:user/:activity", writeData) router.GET("/activity/:user/:activity/comments/:comment", writeData) RunRequest(router, "GET", "/activity/raccoon", 200, "raccoon--", t) RunRequest(router, "POST", "/activity/raccoon/123", 200, "raccoon-123-", t) RunRequest(router, "GET", "/activity/raccoon/123/comments/456", 200, "raccoon-123-456", t) } func TestNotFound(t *testing.T) { router := MakeRouter() router.GET("/zello/yes", printHello) router.GET("/hello", printHello) router.GET("/activity/:user", writeData) // completely wrong path RunRequest(router, "GET", "/notFound", 404, "Not Found", t) // partial match RunRequest(router, "GET", "/zello/random", 404, "Not Found", t) RunRequest(router, "GET", "/activity", 404, "Not Found", t) RunRequest(router, "GET", "/activity/", 404, "Not Found", t) //test method with no handlers RunRequest(router, "PUT", "/activity/123", 405, "Method Not Allowed", t) RunRequest(router, "PATCH", "/activity/123", 405, "Method Not Allowed", t) RunRequest(router, "DELETE", "/activity/123", 405, "Method Not Allowed", t) } func
(t *testing.T) { router := MakeRouter() router.GET("/zello/yes", printHello) router.GET("/hello", printHello) router.GET("/activity/:user", writeData) RunRequest(router, "HEAD", "/activity/", 405, "Method Not Allowed", t) } func TestIndexMethods(t *testing.T) { router := MakeRouter() router.GET("/", printMethod) router.POST("/", printMethod) router.PUT("/", printMethod) router.PATCH("/", printMethod) router.DELETE("/", printMethod) RunRequest(router, "GET", "/", 200, "GET", t) RunRequest(router, "POST", "/", 200, "POST", t) RunRequest(router, "PUT", "/", 200, "PUT", t) RunRequest(router, "PATCH", "/", 200, "PATCH", t) RunRequest(router, "DELETE", "/", 200, "DELETE", t) } func TestPanicHandler(t *testing.T) { router := MakeRouter() router.GET("/panic", panicHandler) RunRequest(router, "GET", "/panic", 500, "Something went wrong with your request", t) } func TestAsteriscParamter(t *testing.T) { router := MakeRouter() router.GET("/:*", fw) router.GET("/hello", printHello) RunRequest(router, "GET", "/", 200, "Got: ", t) RunRequest(router, "GET", "/1234", 200, "Got: /1234", t) RunRequest(router, "GET", "/hello", 200, "hello", t) } func TestPrefix(t *testing.T) { router := MakeRouter() router.GET("/a", printHello) router.UsePrefix("/api") // this router should handle /api/a is if is was only /a because prefix is ignored RunRequest(router, "GET", "/", 404, "Not Found", t) RunRequest(router, "GET", "/a", 200, "hello", t) RunRequest(router, "GET", "/api/a", 200, "hello", t) }
TestUnsupportedMethod
lis2ds12.py
#!/usr/bin/env python # Author: Jon Trulson <[email protected]> # Copyright (c) 2016-2017 Intel Corporation. # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. from __future__ import print_function import time, sys, signal, atexit from upm import pyupm_lis2ds12 as sensorObj def main(): # Instantiate a LIS2DS12 instance using default i2c bus and address sensor = sensorObj.LIS2DS12() # For SPI, bus 0, you would pass -1 as the address, and a valid pin for CS: # LIS2DS12(0, -1, 10); ## Exit handlers ## # This function stops python from printing a stacktrace when you # hit control-C def
(signum, frame): raise SystemExit # This function lets you run code on exit def exitHandler(): print("Exiting") sys.exit(0) # Register exit handlers atexit.register(exitHandler) signal.signal(signal.SIGINT, SIGINTHandler) # now output data every 250 milliseconds while (1): sensor.update() data = sensor.getAccelerometer() print("Accelerometer x:", data[0], end=' ') print(" y:", data[1], end=' ') print(" z:", data[2], end=' ') print(" g") # we show both C and F for temperature print("Compensation Temperature:", sensor.getTemperature(), "C /", end=' ') print(sensor.getTemperature(True), "F") print() time.sleep(.250) if __name__ == '__main__': main()
SIGINTHandler
init.go
package collect import ( "os" logging "gopkg.in/op/go-logging.v1" ) func init()
{ // initialing the logger is necessary for yq to not print format := logging.MustStringFormatter( `%{color}%{time:15:04:05} %{shortfunc} [%{level:.4s}]%{color:reset} %{message}`, ) backend := logging.AddModuleLevel(logging.NewBackendFormatter(logging.NewLogBackend(os.Stderr, "", 0), format)) backend.SetLevel(logging.ERROR, "") logging.SetBackend(backend) }
rsrvmem.rs
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License.. pub use self::platform::*; use core::fmt; use core::ptr::NonNull; pub struct RsrvMemAlloc; #[derive(Clone, PartialEq, Eq, Debug)] pub enum ProtectAttr { Read, ReadWrite, ReadExec, ReadWriteExec, } impl RsrvMemAlloc { /// Allocate a range of EPC memory from the reserved memory area /// /// # Parameters /// /// **count** /// /// Count of pages to allocate region /// /// # Return value /// /// Starting address of the new allocated memory area on success; /// #[inline] pub unsafe fn alloc(&self, count: u32) -> Result<NonNull<u8>, RsrvMemAllocErr> { NonNull::new(platform::alloc(count)).ok_or(RsrvMemAllocErr) } /// Allocate a range of EPC memory with a fixed address from the reserved memory area /// /// # Parameters /// /// **addr** /// /// The desired starting address to allocate the reserved memory. Should be page aligned. /// /// **count** /// /// Count of pages to allocate region /// /// # Return value /// /// Starting address of the new allocated memory area on success; /// #[inline] pub unsafe fn alloc_with_addr( &self, addr: NonNull<u8>, count: u32, ) -> Result<NonNull<u8>, RsrvMemAllocErr> { NonNull::new(platform::alloc_with_addr(addr.as_ptr(), count)).ok_or(RsrvMemAllocErr) } #[inline] pub unsafe fn alloc_zeroed(&self, count: u32) -> Result<NonNull<u8>, RsrvMemAllocErr> { NonNull::new(platform::alloc_zeroed(count)).ok_or(RsrvMemAllocErr) } /// Free a range of EPC memory from the reserved memory area /// /// # Parameters /// /// ** ptr** /// /// Starting address of region to be freed. Page aligned. /// /// **count** /// /// Count of pages to allocate region /// #[inline] pub unsafe fn dealloc(&self, addr: NonNull<u8>, count: u32) -> Result<(), RsrvMemAllocErr> { platform::dealloc(addr.as_ptr(), count) } /// Modify the access permissions of the pages in the reserved memory area. /// /// # Parameters /// /// # Parameters /// /// ** ptr** /// /// Starting address of region to be freed. Page aligned. /// /// **count** /// /// Count of pages to allocate region /// /// **port** /// /// The target memory protection. /// #[inline] pub unsafe fn protect( &self, addr: NonNull<u8>, count: u32, prot: ProtectAttr, ) -> Result<(), RsrvMemAllocErr> { platform::protect(addr.as_ptr(), count, prot) } } #[derive(Clone, PartialEq, Eq, Debug)] pub struct RsrvMemAllocErr; impl fmt::Display for RsrvMemAllocErr { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result
} mod platform { use super::ProtectAttr; use super::RsrvMemAllocErr; use core::ffi::c_void; use core::ptr; const SGX_PROT_READ: u32 = 0x1; const SGX_PROT_WRITE: u32 = 0x2; const SGX_PROT_EXEC: u32 = 0x4; const SE_PAGE_SIZE: usize = 0x1000; type size_t = usize; type int32_t = i32; type sgx_status_t = u32; extern "C" { pub fn sgx_alloc_rsrv_mem(length: size_t) -> *mut c_void; pub fn sgx_alloc_rsrv_mem_ex(desired_addr: *const c_void, length: size_t) -> *mut c_void; pub fn sgx_free_rsrv_mem(addr: *const c_void, length: size_t) -> int32_t; pub fn sgx_tprotect_rsrv_mem( addr: *const c_void, length: size_t, prot: i32, ) -> sgx_status_t; } #[inline] pub unsafe fn alloc(count: u32) -> *mut u8 { sgx_alloc_rsrv_mem(count as usize * SE_PAGE_SIZE) as *mut u8 } #[inline] pub unsafe fn alloc_with_addr(addr: *mut u8, count: u32) -> *mut u8 { sgx_alloc_rsrv_mem_ex(addr as *const c_void, count as usize * SE_PAGE_SIZE) as *mut u8 } #[inline] pub unsafe fn alloc_zeroed(count: u32) -> *mut u8 { let raw = alloc(count); if !raw.is_null() { ptr::write_bytes(raw, 0, count as usize * SE_PAGE_SIZE); } raw } #[inline] pub unsafe fn dealloc(addr: *mut u8, count: u32) -> Result<(), RsrvMemAllocErr> { if sgx_free_rsrv_mem(addr as *const c_void, count as usize * SE_PAGE_SIZE) == 0 { Ok(()) } else { Err(RsrvMemAllocErr) } } #[inline] pub unsafe fn protect( addr: *mut u8, count: u32, prot: ProtectAttr, ) -> Result<(), RsrvMemAllocErr> { let attr = match prot { ProtectAttr::Read => SGX_PROT_READ, ProtectAttr::ReadWrite => SGX_PROT_READ | SGX_PROT_WRITE, ProtectAttr::ReadExec => SGX_PROT_READ | SGX_PROT_EXEC, ProtectAttr::ReadWriteExec => SGX_PROT_READ | SGX_PROT_WRITE | SGX_PROT_EXEC, }; if sgx_tprotect_rsrv_mem( addr as *const c_void, count as usize * SE_PAGE_SIZE, attr as i32, ) == 0 { Ok(()) } else { Err(RsrvMemAllocErr) } } }
{ f.write_str("reserves memory allocation failed") }
generate-seeds.py
#!/usr/bin/env python3 # Copyright (c) 2014-2017 Wladimir J. van der Laan # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. ''' Script to generate list of seed nodes for chainparams.cpp. This script expects two text files in the directory that is passed as an argument: nodes_main.txt nodes_test.txt These files must consist of lines in the format <ip> <ip>:<port> [<ipv6>] [<ipv6>]:<port> <onion>.onion 0xDDBBCCAA (IPv4 little-endian old pnSeeds format) The output will be two data structures with the peers in binary format: static SeedSpec6 pnSeed6_main[]={ ... } static SeedSpec6 pnSeed6_test[]={ ... } These should be pasted into `src/chainparamsseeds.h`. ''' from base64 import b32decode from binascii import a2b_hex import sys, os import re # ipv4 in ipv6 prefix pchIPv4 = bytearray([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff]) # tor-specific ipv6 prefix pchOnionCat = bytearray([0xFD,0x87,0xD8,0x7E,0xEB,0x43]) def name_to_ipv6(addr): if len(addr)>6 and addr.endswith('.onion'): vchAddr = b32decode(addr[0:-6], True) if len(vchAddr) != 16-len(pchOnionCat): raise ValueError('Invalid onion %s' % s) return pchOnionCat + vchAddr elif '.' in addr: # IPv4 return pchIPv4 + bytearray((int(x) for x in addr.split('.'))) elif ':' in addr: # IPv6 sub = [[], []] # prefix, suffix x = 0 addr = addr.split(':') for i,comp in enumerate(addr): if comp == '': if i == 0 or i == (len(addr)-1): # skip empty component at beginning or end continue x += 1 # :: skips to suffix assert(x < 2) else: # two bytes per component val = int(comp, 16) sub[x].append(val >> 8) sub[x].append(val & 0xff) nullbytes = 16 - len(sub[0]) - len(sub[1]) assert((x == 0 and nullbytes == 0) or (x == 1 and nullbytes > 0)) return bytearray(sub[0] + ([0] * nullbytes) + sub[1]) elif addr.startswith('0x'): # IPv4-in-little-endian return pchIPv4 + bytearray(reversed(a2b_hex(addr[2:]))) else: raise ValueError('Could not parse address %s' % addr) def parse_spec(s, defaultport): match = re.match('\[([0-9a-fA-F:]+)\](?::([0-9]+))?$', s) if match: # ipv6 host = match.group(1) port = match.group(2) elif s.count(':') > 1: # ipv6, no port host = s port = '' else: (host,_,port) = s.partition(':') if not port: port = defaultport else: port = int(port) host = name_to_ipv6(host) return (host,port) def process_nodes(g, f, structname, defaultport): g.write('static SeedSpec6 %s[] = {\n' % structname) first = True for line in f: comment = line.find('#') if comment != -1: line = line[0:comment] line = line.strip() if not line: continue if not first: g.write(',\n') first = False (host,port) = parse_spec(line, defaultport) hoststr = ','.join(('0x%02x' % b) for b in host) g.write(' {{%s}, %i}' % (hoststr, port)) g.write('\n};\n') def main(): if len(sys.argv)<2: print(('Usage: %s <path_to_nodes_txt>' % sys.argv[0]), file=sys.stderr) exit(1) g = sys.stdout indir = sys.argv[1]
g.write(' * AUTOGENERATED by contrib/seeds/generate-seeds.py\n') g.write(' *\n') g.write(' * Each line contains a 16-byte IPv6 address and a port.\n') g.write(' * IPv4 as well as onion addresses are wrapped inside a IPv6 address accordingly.\n') g.write(' */\n') with open(os.path.join(indir,'nodes_main.txt'),'r') as f: process_nodes(g, f, 'pnSeed6_main', 46412) g.write('\n') with open(os.path.join(indir,'nodes_test.txt'),'r') as f: process_nodes(g, f, 'pnSeed6_test', 47412) g.write('#endif // BITCOIN_CHAINPARAMSSEEDS_H\n') if __name__ == '__main__': main()v
g.write('#ifndef BITCOIN_CHAINPARAMSSEEDS_H\n') g.write('#define BITCOIN_CHAINPARAMSSEEDS_H\n') g.write('/**\n') g.write(' * List of fixed seed nodes for the paymastercoin network\n')
mitre_test.go
// Unless explicitly stated otherwise all files in this repository are licensed // under the Apache License Version 2.0. // This product includes software developed at Datadog (https://www.datadoghq.com/). // Copyright 2016-2020 Datadog, Inc. // +build functionaltests package tests import ( "bytes" "fmt" "os" "testing" "time" "github.com/DataDog/datadog-agent/pkg/security/config" "github.com/DataDog/datadog-agent/pkg/security/policy" sprobe "github.com/DataDog/datadog-agent/pkg/security/probe" ) type testCase struct { action func(t *testing.T) expectedRule string } func TestMitre(t *testing.T)
{ reader := bytes.NewBufferString(config.DefaultPolicy) policy, err := policy.LoadPolicy(reader) if err != nil { t.Fatal(err) } test, err := newTestModule(policy.Macros, policy.Rules, testOpts{ enableFilters: true, }) if err != nil { t.Fatal(err) } defer test.Close() time.Sleep(time.Second) testCases := []testCase{ { action: func(t *testing.T) { f, err := os.Open("/etc/shadow") if err != nil { t.Fatal(err) } f.Close() }, expectedRule: "credential_modified", }, { action: func(t *testing.T) { f, err := os.Open(fmt.Sprintf("/proc/%d/mem", os.Getpid())) if err != nil { t.Fatal(err) } f.Close() }, expectedRule: "memory_dump", }, { action: func(t *testing.T) { f, err := os.Create("/var/log/service.log") if err != nil { t.Fatal(err) } f.Close() if err := os.Truncate(fmt.Sprintf("/var/log/service.log"), 0); err != nil { t.Fatal(err) } }, expectedRule: "logs_altered", }, { action: func(t *testing.T) { if err := os.Remove("/var/log/service.log"); err != nil { t.Fatal(err) } }, expectedRule: "logs_removed", }, { action: func(t *testing.T) { f, err := os.Create("/usr/local/bin/pleaseremoveme") if err != nil { t.Fatal(err) } f.Close() if err := os.Chmod("/usr/local/bin/pleaseremoveme", 0777); err != nil { t.Fatal(err) } os.Remove("/usr/local/bin/pleaseremoveme") }, expectedRule: "permissions_changed", }, { action: func(t *testing.T) { f, err := os.Create("/.removeme") if err != nil { t.Fatal(err) } f.Close() os.Remove("/.removeme") }, expectedRule: "hidden_file", }, { action: func(t *testing.T) { os.Mkdir("/lib/modules", 0660) f, err := os.Create("/lib/modules/removeme.ko") if err != nil { t.Fatal(err) } f.Close() os.Remove("/lib/modules/removeme.ko") }, expectedRule: "kernel_module", }, } for _, tc := range testCases { t.Run(fmt.Sprintf("rule %s", tc.expectedRule), func(t *testing.T) { tc.action(t) timeout := time.After(3 * time.Second) for { select { case event := <-test.events: if _, ok := event.event.(*sprobe.Event); ok { if event.rule.ID == tc.expectedRule { return } } else { t.Error("invalid event") } case <-timeout: t.Error("timeout") return } } }) } }
model_graphics_controller_list_all_of.go
/* Cisco Intersight Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. The Intersight OpenAPI document defines the complete set of properties that are returned in the HTTP response. From that perspective, a client can expect that no additional properties are returned, unless these properties are explicitly defined in the OpenAPI document. However, when a client uses an older version of the Intersight OpenAPI document, the server may send additional properties because the software is more recent than the client. In that case, the client may receive properties that it does not know about. Some generated SDKs perform a strict validation of the HTTP response body against the OpenAPI document. API version: 1.0.9-5517 Contact: [email protected] */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. package intersight import ( "encoding/json" ) // GraphicsControllerListAllOf struct for GraphicsControllerListAllOf type GraphicsControllerListAllOf struct { // The total number of 'graphics.Controller' resources matching the request, accross all pages. The 'Count' attribute is included when the HTTP GET request includes the '$inlinecount' parameter. Count *int32 `json:"Count,omitempty"` // The array of 'graphics.Controller' resources matching the request. Results []GraphicsController `json:"Results,omitempty"` AdditionalProperties map[string]interface{} } type _GraphicsControllerListAllOf GraphicsControllerListAllOf // NewGraphicsControllerListAllOf instantiates a new GraphicsControllerListAllOf object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed func NewGraphicsControllerListAllOf() *GraphicsControllerListAllOf { this := GraphicsControllerListAllOf{} return &this } // NewGraphicsControllerListAllOfWithDefaults instantiates a new GraphicsControllerListAllOf object // This constructor will only assign default values to properties that have it defined, // but it doesn't guarantee that properties required by API are set func NewGraphicsControllerListAllOfWithDefaults() *GraphicsControllerListAllOf { this := GraphicsControllerListAllOf{} return &this } // GetCount returns the Count field value if set, zero value otherwise. func (o *GraphicsControllerListAllOf) GetCount() int32 { if o == nil || o.Count == nil { var ret int32 return ret } return *o.Count } // GetCountOk returns a tuple with the Count field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *GraphicsControllerListAllOf) GetCountOk() (*int32, bool) { if o == nil || o.Count == nil { return nil, false } return o.Count, true } // HasCount returns a boolean if a field has been set. func (o *GraphicsControllerListAllOf) HasCount() bool { if o != nil && o.Count != nil { return true } return false } // SetCount gets a reference to the given int32 and assigns it to the Count field. func (o *GraphicsControllerListAllOf) SetCount(v int32) { o.Count = &v } // GetResults returns the Results field value if set, zero value otherwise (both if not set or set to explicit null). func (o *GraphicsControllerListAllOf) GetResults() []GraphicsController { if o == nil { var ret []GraphicsController return ret } return o.Results } // GetResultsOk returns a tuple with the Results field value if set, nil otherwise // and a boolean to check if the value has been set. // NOTE: If the value is an explicit nil, `nil, true` will be returned func (o *GraphicsControllerListAllOf) GetResultsOk() (*[]GraphicsController, bool) { if o == nil || o.Results == nil { return nil, false } return &o.Results, true } // HasResults returns a boolean if a field has been set. func (o *GraphicsControllerListAllOf) HasResults() bool { if o != nil && o.Results != nil { return true } return false } // SetResults gets a reference to the given []GraphicsController and assigns it to the Results field. func (o *GraphicsControllerListAllOf) SetResults(v []GraphicsController) { o.Results = v } func (o GraphicsControllerListAllOf) MarshalJSON() ([]byte, error) { toSerialize := map[string]interface{}{} if o.Count != nil { toSerialize["Count"] = o.Count } if o.Results != nil { toSerialize["Results"] = o.Results } for key, value := range o.AdditionalProperties { toSerialize[key] = value } return json.Marshal(toSerialize) } func (o *GraphicsControllerListAllOf) UnmarshalJSON(bytes []byte) (err error) { varGraphicsControllerListAllOf := _GraphicsControllerListAllOf{} if err = json.Unmarshal(bytes, &varGraphicsControllerListAllOf); err == nil { *o = GraphicsControllerListAllOf(varGraphicsControllerListAllOf) } additionalProperties := make(map[string]interface{}) if err = json.Unmarshal(bytes, &additionalProperties); err == nil { delete(additionalProperties, "Count") delete(additionalProperties, "Results") o.AdditionalProperties = additionalProperties } return err } type NullableGraphicsControllerListAllOf struct { value *GraphicsControllerListAllOf isSet bool } func (v NullableGraphicsControllerListAllOf) Get() *GraphicsControllerListAllOf { return v.value } func (v *NullableGraphicsControllerListAllOf) Set(val *GraphicsControllerListAllOf) { v.value = val v.isSet = true } func (v NullableGraphicsControllerListAllOf) IsSet() bool { return v.isSet } func (v *NullableGraphicsControllerListAllOf) Unset() { v.value = nil v.isSet = false } func
(val *GraphicsControllerListAllOf) *NullableGraphicsControllerListAllOf { return &NullableGraphicsControllerListAllOf{value: val, isSet: true} } func (v NullableGraphicsControllerListAllOf) MarshalJSON() ([]byte, error) { return json.Marshal(v.value) } func (v *NullableGraphicsControllerListAllOf) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) }
NewNullableGraphicsControllerListAllOf
substitution_matrix.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Thu Dec 19 10:16:35 2019 @author: naiara """ # SCORE MATRIX OR SUBSTITUTION MATRIX """ 1 = "ACAGGTGGACCT" 2 = "ACTGGTCGACTC" P(A) = 5/24 P(A, A) = 2/12 P(C, C) = 2/12 P(G, T) = 1 P(C) = 6/24 P(A, C) = 1 P(C, G) = 1/12 P(T, T) =1/12 P(G) = 7/24 P(A, G) = 1 P(C, T) = 1/12 P(T) = 6/24 P(A, T) = 1/12 P(G, G) = 3/12 We assume that whe the pairs do not exist the probability will be 1 s(a,b) = int[k * log(P(ab) / Qa * Qb] -> K = 1 A C T G A 1.4294292643817876 1.2833012287035497 1.2163544390729364 1.3180633349627615 C 1.2833012287035497 1.271066772286538 1.1719352992845236 1.2388820889151366 T 1.2163544390729364 1.1719352992845236 1.1671364164027547 1.1371731930253115 G 1.3180633349627615 1.2388820889151366 1.1371731930253115 1.271066772286538 """ import math seq1 = "ACAGGTGGACCT" seq2 = "ACTGGTCGACTT" seq = "CTATATGG" seq = "CCGGATCG" def print_matrix(row): var = ""; list_bases = ["A", "C", "G", "T"] for i in range(len(row)): var += list_bases[i] + "\t" for j in row[i]: var += str(j) + "\t" var += "\n" print(var) def sustitution_matrix(seq1, seq2): if len(seq1) == len(seq2): prob_res = {}; list_pairs = []; dic_pairs = {}; list_bases = ["A", "C", "G", "T"]; k = 1; result_list = []; bases = ""; total_pairs = 0 total_residuos = len(seq1) + len(seq2) prob_res["A"] = (seq1.count("A") + seq2.count("A")) / total_residuos prob_res["C"] = (seq1.count("C") + seq2.count("C")) / total_residuos prob_res["G"] = (seq1.count("G") + seq2.count("G")) / total_residuos prob_res["T"] = (seq1.count("T") + seq2.count("T")) / total_residuos print(prob_res, "\n") for i in range(len(seq1)): list_pairs.append(seq1[i] + seq2[i]) for j in list_pairs: if not j in dic_pairs: bases = j if not bases[::-1] in dic_pairs: if j == bases[::-1]: total_pairs = list_pairs.count(j) else: total_pairs = list_pairs.count(j) + list_pairs.count(bases[::-1]) dic_pairs[j] = total_pairs / len(seq1) dic_pairs[bases[::-1]] = dic_pairs[j] print(dic_pairs, "\n") for i in range(len(list_bases)): list_prob = [] for j in range(len(list_bases)): pro_1 = prob_res[list_bases[i]] pro_2 = prob_res[list_bases[j]] if (list_bases[i]+list_bases[j] in dic_pairs): pro_both = dic_pairs[list_bases[i]+list_bases[j]] + 1 else: pro_both = 1 if pro_1 == 0 or pro_2 == 0: list_prob.append(0) else: list_prob.append( k * math.log10(pro_both / (pro_1 * pro_2))) result_list.append(list_prob)
else: print("Length of the sequences are different") return result_list print_matrix(sustitution_matrix(seq1, seq2))
ffmpeg_test.go
// ffmpeg package contains helpers to unescape and escape FFmpeg strings package ffmpeg import ( "testing" ) func TestUnescape(t *testing.T) { tests := []struct { name string in string want string }{ {"string that needs no escaping", "lorem ipsum", "lorem ipsum"}, {"escaped with backslash", "lorem\\ ipsum", "lorem ipsum"}, {"escaped with quotes", "'lorem ipsum'", "lorem ipsum"}, {"backslash within quotes", "'lorem\\ ipsum'", "lorem\\ ipsum"}, {"different escapes in same", "lorem\\ ipsum 'lorem ipsum'", "lorem ipsum lorem ipsum"}, {"escaped quotes", "lorem\\' ipsum\\'", "lorem' ipsum'"}, {"mixed escapes", "lorem\\' ip'su\\'m", "lorem' ipsu\\m"}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := Unescape(tt.in); got != tt.want
}) } } func TestEscape(t *testing.T) { tests := []struct { name string in string want string }{ {"escapes backslash", "lorem\\ipsum", "lorem\\\\ipsum"}, {"escapes quotation marks", "lorem'ipsum", "lorem\\'ipsum"}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := Escape(tt.in); got != tt.want { t.Errorf("Escape() = %v, want %v", got, tt.want) } }) } }
{ t.Errorf("Unescape() = %v, want %v", got, tt.want) }
utils.py
import sys import traceback import logging logger = logging.getLogger(__name__) def logging_traceback():
exc_type, exc_value, exc_traceback = sys.exc_info() lines = traceback.format_exception(exc_type, exc_value, exc_traceback) error_message = ''.join(lines) logger.error(error_message)
CinemaRenderTest.py
import pytest import pytest_xvfb import os import cinemasci @pytest.fixture(autouse=True, scope='session') def ensure_xvfb(): if not pytest_xvfb.xvfb_available(): raise Exception("Tests need Xvfb to run.") def test_render(): # create a test database
os.system("./bin/create-database --database scratch/cinema.cdb") # open a cinema database cdb = cinemasci.DatabaseReader(); cdb.inputs["Path"].setValue( 'scratch/cinema.cdb' ); # Select Some Data Products\n", query = cinemasci.DatabaseQuery(); query.inputs["Table"].setValue(cdb.outputs['Table']); query.inputs["Query"].setValue('SELECT * FROM input LIMIT 5 OFFSET 0'); # Read Data Products imageReader = cinemasci.ImageReader(); imageReader.inputs["Table"].setValue(query.outputs['Table']) # Read Data Products imageRenderer = cinemasci.ImageRenderer(); imageRenderer.inputs["Image"].setValue( imageReader.outputs["Images"] ); # print images images = imageRenderer.outputs["Image"].getValue(); print(images)
transpositionFileCipher.py
import os, sys, time, Transposition.transpositionEncrypt as ENC, \ Transposition.transpositionDecrypt as DEC def main(): f_key = 10 # f_mode = 'encrypt' f_mode = 'decrypt' if f_mode == 'decrypt': input_filename = 'frankenstein.encrypt.txt' else: input_filename = 'frankenstein.txt' output_filename = f'frankenstein.{f_mode}.txt' if not os.path.exists(input_filename): print(f'File {input_filename} not exist, Quitting...') sys.exit() if os.path.exists(output_filename):
# read file file_obj = open(input_filename) content = file_obj.read() file_obj.close() print(f'{f_mode.title()}ing...') start_time = time.time() if f_mode == 'encrypt': transformed = ENC.encrypt_msg(f_key, content) else: transformed = DEC.decrypt_msg(f_key, content) total_time = round(time.time() - start_time, 2) print(f'{f_mode.title()}sion tookes {total_time} seconds.') # write to file output_file_obj = open(output_filename, 'w') output_file_obj.write(transformed) output_file_obj.close() print(f'{output_filename} with {len(content)} {f_mode}ed done.') if __name__ == '__main__': main()
print( f'File {output_filename} existed, will be overwrite. (C)ontinue or (Q)uit?') response = input('> ') if not response.lower().startswith('c'): sys.exit()
resource_aws_api_gateway_stage.go
package aws import ( "fmt" "log" "strings" "time" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/arn" "github.com/aws/aws-sdk-go/service/apigateway" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-aws/aws/internal/keyvaluetags" ) func resourceAwsApiGatewayStage() *schema.Resource { return &schema.Resource{ Create: resourceAwsApiGatewayStageCreate, Read: resourceAwsApiGatewayStageRead, Update: resourceAwsApiGatewayStageUpdate, Delete: resourceAwsApiGatewayStageDelete, Importer: &schema.ResourceImporter{ State: func(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { idParts := strings.Split(d.Id(), "/") if len(idParts) != 2 || idParts[0] == "" || idParts[1] == "" { return nil, fmt.Errorf("Unexpected format of ID (%q), expected REST-API-ID/STAGE-NAME", d.Id()) } restApiID := idParts[0] stageName := idParts[1] d.Set("stage_name", stageName) d.Set("rest_api_id", restApiID) d.SetId(fmt.Sprintf("ags-%s-%s", restApiID, stageName)) return []*schema.ResourceData{d}, nil }, }, Schema: map[string]*schema.Schema{ "access_log_settings": { Type: schema.TypeList, MaxItems: 1, Optional: true, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "destination_arn": { Type: schema.TypeString, Required: true, }, "format": { Type: schema.TypeString, Required: true, }, }, }, }, "cache_cluster_enabled": { Type: schema.TypeBool, Optional: true, }, "cache_cluster_size": { Type: schema.TypeString, Optional: true, ValidateFunc: validation.StringInSlice([]string{ apigateway.CacheClusterSize05, apigateway.CacheClusterSize16, apigateway.CacheClusterSize61, apigateway.CacheClusterSize118, apigateway.CacheClusterSize135, apigateway.CacheClusterSize237, apigateway.CacheClusterSize284, apigateway.CacheClusterSize582, }, true), }, "client_certificate_id": { Type: schema.TypeString, Optional: true, }, "deployment_id": { Type: schema.TypeString, Required: true, }, "description": { Type: schema.TypeString, Optional: true, }, "documentation_version": { Type: schema.TypeString, Optional: true, }, "execution_arn": { Type: schema.TypeString, Computed: true, }, "invoke_url": { Type: schema.TypeString, Computed: true, }, "rest_api_id": { Type: schema.TypeString, Required: true, ForceNew: true, }, "stage_name": { Type: schema.TypeString, Required: true, ForceNew: true, }, "variables": { Type: schema.TypeMap, Optional: true, Elem: &schema.Schema{Type: schema.TypeString}, }, "tags": tagsSchema(), "xray_tracing_enabled": { Type: schema.TypeBool, Optional: true, }, "arn": { Type: schema.TypeString, Computed: true, }, }, } } func resourceAwsApiGatewayStageCreate(d *schema.ResourceData, meta interface{}) error { conn := meta.(*AWSClient).apigatewayconn input := apigateway.CreateStageInput{ RestApiId: aws.String(d.Get("rest_api_id").(string)), StageName: aws.String(d.Get("stage_name").(string)), DeploymentId: aws.String(d.Get("deployment_id").(string)), } waitForCache := false if v, ok := d.GetOk("cache_cluster_enabled"); ok { input.CacheClusterEnabled = aws.Bool(v.(bool)) waitForCache = true } if v, ok := d.GetOk("cache_cluster_size"); ok { input.CacheClusterSize = aws.String(v.(string)) waitForCache = true } if v, ok := d.GetOk("description"); ok { input.Description = aws.String(v.(string)) } if v, ok := d.GetOk("xray_tracing_enabled"); ok { input.TracingEnabled = aws.Bool(v.(bool)) } if v, ok := d.GetOk("documentation_version"); ok { input.DocumentationVersion = aws.String(v.(string)) } if vars, ok := d.GetOk("variables"); ok { variables := make(map[string]string) for k, v := range vars.(map[string]interface{}) { variables[k] = v.(string) } input.Variables = aws.StringMap(variables) } if v, ok := d.GetOk("tags"); ok { input.Tags = keyvaluetags.New(v.(map[string]interface{})).IgnoreAws().ApigatewayTags() } out, err := conn.CreateStage(&input) if err != nil { return fmt.Errorf("Error creating API Gateway Stage: %s", err) } d.SetId(fmt.Sprintf("ags-%s-%s", d.Get("rest_api_id").(string), d.Get("stage_name").(string))) if waitForCache && *out.CacheClusterStatus != apigateway.CacheClusterStatusNotAvailable { stateConf := &resource.StateChangeConf{ Pending: []string{ apigateway.CacheClusterStatusCreateInProgress, apigateway.CacheClusterStatusDeleteInProgress, apigateway.CacheClusterStatusFlushInProgress, }, Target: []string{apigateway.CacheClusterStatusAvailable}, Refresh: apiGatewayStageCacheRefreshFunc(conn, d.Get("rest_api_id").(string), d.Get("stage_name").(string)), Timeout: 90 * time.Minute, } _, err := stateConf.WaitForState() if err != nil { return err } } if _, ok := d.GetOk("client_certificate_id"); ok { return resourceAwsApiGatewayStageUpdate(d, meta) } if _, ok := d.GetOk("access_log_settings"); ok { return resourceAwsApiGatewayStageUpdate(d, meta) } return resourceAwsApiGatewayStageRead(d, meta) } func resourceAwsApiGatewayStageRead(d *schema.ResourceData, meta interface{}) error { conn := meta.(*AWSClient).apigatewayconn ignoreTagsConfig := meta.(*AWSClient).IgnoreTagsConfig log.Printf("[DEBUG] Reading API Gateway Stage %s", d.Id()) restApiId := d.Get("rest_api_id").(string) stageName := d.Get("stage_name").(string) input := apigateway.GetStageInput{ RestApiId: aws.String(restApiId), StageName: aws.String(stageName), } stage, err := conn.GetStage(&input) if isAWSErr(err, apigateway.ErrCodeNotFoundException, "") { log.Printf("[WARN] API Gateway Stage (%s) not found, removing from state", d.Id()) d.SetId("") return nil } if err != nil
log.Printf("[DEBUG] Received API Gateway Stage: %s", stage) if err := d.Set("access_log_settings", flattenApiGatewayStageAccessLogSettings(stage.AccessLogSettings)); err != nil { return fmt.Errorf("error setting access_log_settings: %s", err) } d.Set("client_certificate_id", stage.ClientCertificateId) if stage.CacheClusterStatus != nil && *stage.CacheClusterStatus == apigateway.CacheClusterStatusDeleteInProgress { d.Set("cache_cluster_enabled", false) d.Set("cache_cluster_size", nil) } else { d.Set("cache_cluster_enabled", stage.CacheClusterEnabled) d.Set("cache_cluster_size", stage.CacheClusterSize) } d.Set("deployment_id", stage.DeploymentId) d.Set("description", stage.Description) d.Set("documentation_version", stage.DocumentationVersion) d.Set("xray_tracing_enabled", stage.TracingEnabled) if err := d.Set("tags", keyvaluetags.ApigatewayKeyValueTags(stage.Tags).IgnoreAws().IgnoreConfig(ignoreTagsConfig).Map()); err != nil { return fmt.Errorf("error setting tags: %s", err) } stageArn := arn.ARN{ Partition: meta.(*AWSClient).partition, Region: meta.(*AWSClient).region, Service: "apigateway", Resource: fmt.Sprintf("/restapis/%s/stages/%s", d.Get("rest_api_id").(string), d.Get("stage_name").(string)), }.String() d.Set("arn", stageArn) if err := d.Set("variables", aws.StringValueMap(stage.Variables)); err != nil { return fmt.Errorf("error setting variables: %s", err) } d.Set("invoke_url", buildApiGatewayInvokeURL(meta.(*AWSClient), restApiId, stageName)) executionArn := arn.ARN{ Partition: meta.(*AWSClient).partition, Service: "execute-api", Region: meta.(*AWSClient).region, AccountID: meta.(*AWSClient).accountid, Resource: fmt.Sprintf("%s/%s", restApiId, stageName), }.String() d.Set("execution_arn", executionArn) return nil } func resourceAwsApiGatewayStageUpdate(d *schema.ResourceData, meta interface{}) error { conn := meta.(*AWSClient).apigatewayconn stageArn := arn.ARN{ Partition: meta.(*AWSClient).partition, Region: meta.(*AWSClient).region, Service: "apigateway", Resource: fmt.Sprintf("/restapis/%s/stages/%s", d.Get("rest_api_id").(string), d.Get("stage_name").(string)), }.String() if d.HasChange("tags") { o, n := d.GetChange("tags") if err := keyvaluetags.ApigatewayUpdateTags(conn, stageArn, o, n); err != nil { return fmt.Errorf("error updating tags: %s", err) } } operations := make([]*apigateway.PatchOperation, 0) waitForCache := false if d.HasChange("cache_cluster_enabled") { operations = append(operations, &apigateway.PatchOperation{ Op: aws.String(apigateway.OpReplace), Path: aws.String("/cacheClusterEnabled"), Value: aws.String(fmt.Sprintf("%t", d.Get("cache_cluster_enabled").(bool))), }) waitForCache = true } if d.HasChange("cache_cluster_size") { operations = append(operations, &apigateway.PatchOperation{ Op: aws.String(apigateway.OpReplace), Path: aws.String("/cacheClusterSize"), Value: aws.String(d.Get("cache_cluster_size").(string)), }) waitForCache = true } if d.HasChange("client_certificate_id") { operations = append(operations, &apigateway.PatchOperation{ Op: aws.String(apigateway.OpReplace), Path: aws.String("/clientCertificateId"), Value: aws.String(d.Get("client_certificate_id").(string)), }) } if d.HasChange("deployment_id") { operations = append(operations, &apigateway.PatchOperation{ Op: aws.String(apigateway.OpReplace), Path: aws.String("/deploymentId"), Value: aws.String(d.Get("deployment_id").(string)), }) } if d.HasChange("description") { operations = append(operations, &apigateway.PatchOperation{ Op: aws.String(apigateway.OpReplace), Path: aws.String("/description"), Value: aws.String(d.Get("description").(string)), }) } if d.HasChange("xray_tracing_enabled") { operations = append(operations, &apigateway.PatchOperation{ Op: aws.String(apigateway.OpReplace), Path: aws.String("/tracingEnabled"), Value: aws.String(fmt.Sprintf("%t", d.Get("xray_tracing_enabled").(bool))), }) } if d.HasChange("documentation_version") { operations = append(operations, &apigateway.PatchOperation{ Op: aws.String(apigateway.OpReplace), Path: aws.String("/documentationVersion"), Value: aws.String(d.Get("documentation_version").(string)), }) } if d.HasChange("variables") { o, n := d.GetChange("variables") oldV := o.(map[string]interface{}) newV := n.(map[string]interface{}) operations = append(operations, diffVariablesOps(oldV, newV)...) } if d.HasChange("access_log_settings") { accessLogSettings := d.Get("access_log_settings").([]interface{}) if len(accessLogSettings) == 1 { operations = append(operations, &apigateway.PatchOperation{ Op: aws.String(apigateway.OpReplace), Path: aws.String("/accessLogSettings/destinationArn"), Value: aws.String(d.Get("access_log_settings.0.destination_arn").(string)), }, &apigateway.PatchOperation{ Op: aws.String(apigateway.OpReplace), Path: aws.String("/accessLogSettings/format"), Value: aws.String(d.Get("access_log_settings.0.format").(string)), }) } else if len(accessLogSettings) == 0 { operations = append(operations, &apigateway.PatchOperation{ Op: aws.String(apigateway.OpRemove), Path: aws.String("/accessLogSettings"), }) } } input := apigateway.UpdateStageInput{ RestApiId: aws.String(d.Get("rest_api_id").(string)), StageName: aws.String(d.Get("stage_name").(string)), PatchOperations: operations, } log.Printf("[DEBUG] Updating API Gateway Stage: %s", input) out, err := conn.UpdateStage(&input) if err != nil { return fmt.Errorf("Updating API Gateway Stage failed: %s", err) } if waitForCache && *out.CacheClusterStatus != apigateway.CacheClusterStatusNotAvailable { stateConf := &resource.StateChangeConf{ Pending: []string{ apigateway.CacheClusterStatusCreateInProgress, apigateway.CacheClusterStatusFlushInProgress, }, Target: []string{ apigateway.CacheClusterStatusAvailable, // There's an AWS API bug (raised & confirmed in Sep 2016 by support) // which causes the stage to remain in deletion state forever apigateway.CacheClusterStatusDeleteInProgress, }, Refresh: apiGatewayStageCacheRefreshFunc(conn, d.Get("rest_api_id").(string), d.Get("stage_name").(string)), Timeout: 30 * time.Minute, } _, err := stateConf.WaitForState() if err != nil { return err } } return resourceAwsApiGatewayStageRead(d, meta) } func diffVariablesOps(oldVars, newVars map[string]interface{}) []*apigateway.PatchOperation { ops := make([]*apigateway.PatchOperation, 0) prefix := "/variables/" for k := range oldVars { if _, ok := newVars[k]; !ok { ops = append(ops, &apigateway.PatchOperation{ Op: aws.String(apigateway.OpRemove), Path: aws.String(prefix + k), }) } } for k, v := range newVars { newValue := v.(string) if oldV, ok := oldVars[k]; ok { oldValue := oldV.(string) if oldValue == newValue { continue } } ops = append(ops, &apigateway.PatchOperation{ Op: aws.String(apigateway.OpReplace), Path: aws.String(prefix + k), Value: aws.String(newValue), }) } return ops } func apiGatewayStageCacheRefreshFunc(conn *apigateway.APIGateway, apiId, stageName string) func() (interface{}, string, error) { return func() (interface{}, string, error) { input := apigateway.GetStageInput{ RestApiId: aws.String(apiId), StageName: aws.String(stageName), } out, err := conn.GetStage(&input) if err != nil { return 42, "", err } return out, *out.CacheClusterStatus, nil } } func resourceAwsApiGatewayStageDelete(d *schema.ResourceData, meta interface{}) error { conn := meta.(*AWSClient).apigatewayconn log.Printf("[DEBUG] Deleting API Gateway Stage: %s", d.Id()) input := apigateway.DeleteStageInput{ RestApiId: aws.String(d.Get("rest_api_id").(string)), StageName: aws.String(d.Get("stage_name").(string)), } _, err := conn.DeleteStage(&input) if isAWSErr(err, apigateway.ErrCodeNotFoundException, "") { return nil } if err != nil { return fmt.Errorf("error deleting API Gateway REST API (%s) Stage (%s): %w", d.Get("rest_api_id").(string), d.Get("stage_name").(string), err) } return nil } func flattenApiGatewayStageAccessLogSettings(accessLogSettings *apigateway.AccessLogSettings) []map[string]interface{} { result := make([]map[string]interface{}, 0, 1) if accessLogSettings != nil { result = append(result, map[string]interface{}{ "destination_arn": aws.StringValue(accessLogSettings.DestinationArn), "format": aws.StringValue(accessLogSettings.Format), }) } return result }
{ return fmt.Errorf("error getting API Gateway REST API (%s) Stage (%s): %w", restApiId, stageName, err) }
private_dns_arecord_types.go
/* Copyright The Kubeform Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */
// Code generated by Kubeform. DO NOT EDIT. package v1alpha1 import ( base "kubeform.dev/kubeform/apis/base/v1alpha1" core "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) // +genclient // +k8s:openapi-gen=true // +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object // +kubebuilder:object:root=true // +kubebuilder:subresource:status // +kubebuilder:printcolumn:name="Phase",type=string,JSONPath=`.status.phase` type PrivateDNSARecord struct { metav1.TypeMeta `json:",inline,omitempty"` metav1.ObjectMeta `json:"metadata,omitempty"` Spec PrivateDNSARecordSpec `json:"spec,omitempty"` Status PrivateDNSARecordStatus `json:"status,omitempty"` } type PrivateDNSARecordSpec struct { ProviderRef core.LocalObjectReference `json:"providerRef" tf:"-"` ID string `json:"id,omitempty" tf:"id,omitempty"` Name string `json:"name" tf:"name"` Records []string `json:"records" tf:"records"` ResourceGroupName string `json:"resourceGroupName" tf:"resource_group_name"` // +optional Tags map[string]string `json:"tags,omitempty" tf:"tags,omitempty"` Ttl int64 `json:"ttl" tf:"ttl"` ZoneName string `json:"zoneName" tf:"zone_name"` } type PrivateDNSARecordStatus struct { // Resource generation, which is updated on mutation by the API Server. // +optional ObservedGeneration int64 `json:"observedGeneration,omitempty"` // +optional Output *PrivateDNSARecordSpec `json:"output,omitempty"` // +optional State *base.State `json:"state,omitempty"` // +optional Phase base.Phase `json:"phase,omitempty"` // +optional TerraformErrors []string `json:"terraformErrors,omitempty"` } // +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object // +kubebuilder:object:root=true // PrivateDNSARecordList is a list of PrivateDNSARecords type PrivateDNSARecordList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` // Items is a list of PrivateDNSARecord CRD objects Items []PrivateDNSARecord `json:"items,omitempty"` }
debug.go
// Code generated by go-bindata. // sources: // in/a/test.asset // in/b/test.asset // in/c/test.asset // in/test.asset // DO NOT EDIT! package main import ( "fmt" "io/ioutil" "strings" "os" "path/filepath" ) // bindataRead reads the given file from disk. It returns an error on failure. func bindataRead(path, name string) ([]byte, error) { buf, err := ioutil.ReadFile(path) if err != nil { err = fmt.Errorf("Error reading asset %s at %s: %v", name, path, err) } return buf, err } type asset struct { bytes []byte info os.FileInfo } // inATestAsset reads file data from disk. It returns an error on failure. func inATestAsset() (*asset, error) { path := "/Users/tamird/src/go/src/github.com/jteeuwen/go-bindata/testdata/in/a/test.asset" name := "in/a/test.asset" bytes, err := bindataRead(path, name) if err != nil
fi, err := os.Stat(path) if err != nil { err = fmt.Errorf("Error reading asset info %s at %s: %v", name, path, err) } a := &asset{bytes: bytes, info: fi} return a, err } // inBTestAsset reads file data from disk. It returns an error on failure. func inBTestAsset() (*asset, error) { path := "/Users/tamird/src/go/src/github.com/jteeuwen/go-bindata/testdata/in/b/test.asset" name := "in/b/test.asset" bytes, err := bindataRead(path, name) if err != nil { return nil, err } fi, err := os.Stat(path) if err != nil { err = fmt.Errorf("Error reading asset info %s at %s: %v", name, path, err) } a := &asset{bytes: bytes, info: fi} return a, err } // inCTestAsset reads file data from disk. It returns an error on failure. func inCTestAsset() (*asset, error) { path := "/Users/tamird/src/go/src/github.com/jteeuwen/go-bindata/testdata/in/c/test.asset" name := "in/c/test.asset" bytes, err := bindataRead(path, name) if err != nil { return nil, err } fi, err := os.Stat(path) if err != nil { err = fmt.Errorf("Error reading asset info %s at %s: %v", name, path, err) } a := &asset{bytes: bytes, info: fi} return a, err } // inTestAsset reads file data from disk. It returns an error on failure. func inTestAsset() (*asset, error) { path := "/Users/tamird/src/go/src/github.com/jteeuwen/go-bindata/testdata/in/test.asset" name := "in/test.asset" bytes, err := bindataRead(path, name) if err != nil { return nil, err } fi, err := os.Stat(path) if err != nil { err = fmt.Errorf("Error reading asset info %s at %s: %v", name, path, err) } a := &asset{bytes: bytes, info: fi} return a, err } // Asset loads and returns the asset for the given name. // It returns an error if the asset could not be found or // could not be loaded. func Asset(name string) ([]byte, error) { cannonicalName := strings.Replace(name, "\\", "/", -1) if f, ok := _bindata[cannonicalName]; ok { a, err := f() if err != nil { return nil, fmt.Errorf("Asset %s can't read by error: %v", name, err) } return a.bytes, nil } return nil, fmt.Errorf("Asset %s not found", name) } // MustAsset is like Asset but panics when Asset would return an error. // It simplifies safe initialization of global variables. func MustAsset(name string) []byte { a, err := Asset(name) if (err != nil) { panic("asset: Asset(" + name + "): " + err.Error()) } return a } // AssetInfo loads and returns the asset info for the given name. // It returns an error if the asset could not be found or // could not be loaded. func AssetInfo(name string) (os.FileInfo, error) { cannonicalName := strings.Replace(name, "\\", "/", -1) if f, ok := _bindata[cannonicalName]; ok { a, err := f() if err != nil { return nil, fmt.Errorf("AssetInfo %s can't read by error: %v", name, err) } return a.info, nil } return nil, fmt.Errorf("AssetInfo %s not found", name) } // AssetNames returns the names of the assets. func AssetNames() []string { names := make([]string, 0, len(_bindata)) for name := range _bindata { names = append(names, name) } return names } // _bindata is a table, holding each asset generator, mapped to its name. var _bindata = map[string]func() (*asset, error){ "in/a/test.asset": inATestAsset, "in/b/test.asset": inBTestAsset, "in/c/test.asset": inCTestAsset, "in/test.asset": inTestAsset, } // AssetDir returns the file names below a certain // directory embedded in the file by go-bindata. // For example if you run go-bindata on data/... and data contains the // following hierarchy: // data/ // foo.txt // img/ // a.png // b.png // then AssetDir("data") would return []string{"foo.txt", "img"} // AssetDir("data/img") would return []string{"a.png", "b.png"} // AssetDir("foo.txt") and AssetDir("notexist") would return an error // AssetDir("") will return []string{"data"}. func AssetDir(name string) ([]string, error) { node := _bintree if len(name) != 0 { cannonicalName := strings.Replace(name, "\\", "/", -1) pathList := strings.Split(cannonicalName, "/") for _, p := range pathList { node = node.Children[p] if node == nil { return nil, fmt.Errorf("Asset %s not found", name) } } } if node.Func != nil { return nil, fmt.Errorf("Asset %s not found", name) } rv := make([]string, 0, len(node.Children)) for childName := range node.Children { rv = append(rv, childName) } return rv, nil } type bintree struct { Func func() (*asset, error) Children map[string]*bintree } var _bintree = &bintree{nil, map[string]*bintree{ "in": &bintree{nil, map[string]*bintree{ "a": &bintree{nil, map[string]*bintree{ "test.asset": &bintree{inATestAsset, map[string]*bintree{ }}, }}, "b": &bintree{nil, map[string]*bintree{ "test.asset": &bintree{inBTestAsset, map[string]*bintree{ }}, }}, "c": &bintree{nil, map[string]*bintree{ "test.asset": &bintree{inCTestAsset, map[string]*bintree{ }}, }}, "test.asset": &bintree{inTestAsset, map[string]*bintree{ }}, }}, }} // RestoreAsset restores an asset under the given directory func RestoreAsset(dir, name string) error { data, err := Asset(name) if err != nil { return err } info, err := AssetInfo(name) if err != nil { return err } err = os.MkdirAll(_filePath(dir, filepath.Dir(name)), os.FileMode(0755)) if err != nil { return err } err = ioutil.WriteFile(_filePath(dir, name), data, info.Mode()) if err != nil { return err } err = os.Chtimes(_filePath(dir, name), info.ModTime(), info.ModTime()) if err != nil { return err } return nil } // RestoreAssets restores an asset under the given directory recursively func RestoreAssets(dir, name string) error { children, err := AssetDir(name) // File if err != nil { return RestoreAsset(dir, name) } // Dir for _, child := range children { err = RestoreAssets(dir, filepath.Join(name, child)) if err != nil { return err } } return nil } func _filePath(dir, name string) string { cannonicalName := strings.Replace(name, "\\", "/", -1) return filepath.Join(append([]string{dir}, strings.Split(cannonicalName, "/")...)...) }
{ return nil, err }
api_op_NiftyDisassociateNatTable.go
// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT. package computing import ( "context" "github.com/aokumasan/nifcloud-sdk-go-v2/internal/nifcloudutil" "github.com/aws/aws-sdk-go-v2/aws" ) type NiftyDisassociateNatTableInput struct { _ struct{} `type:"structure"` Agreement *bool `locationName:"Agreement" type:"boolean"` AssociationId *string `locationName:"AssociationId" type:"string"` } // String returns the string representation func (s NiftyDisassociateNatTableInput) String() string { return nifcloudutil.Prettify(s) } type NiftyDisassociateNatTableOutput struct { _ struct{} `type:"structure"` RequestId *string `locationName:"requestId" type:"string"` Return *bool `locationName:"return" type:"boolean"` } // String returns the string representation func (s NiftyDisassociateNatTableOutput) String() string { return nifcloudutil.Prettify(s) } const opNiftyDisassociateNatTable = "NiftyDisassociateNatTable" // NiftyDisassociateNatTableRequest returns a request value for making API operation for // NIFCLOUD Computing. // // // Example sending a request using NiftyDisassociateNatTableRequest. // req := client.NiftyDisassociateNatTableRequest(params) // resp, err := req.Send(context.TODO()) // if err == nil { // fmt.Println(resp) // } // // Please also see https://docs.aws.amazon.com/goto/WebAPI/computing-2016-11-15/NiftyDisassociateNatTable func (c *Client) NiftyDisassociateNatTableRequest(input *NiftyDisassociateNatTableInput) NiftyDisassociateNatTableRequest { op := &aws.Operation{ Name: opNiftyDisassociateNatTable, HTTPMethod: "POST", HTTPPath: "/api/", } if input == nil { input = &NiftyDisassociateNatTableInput{} } req := c.newRequest(op, input, &NiftyDisassociateNatTableOutput{}) return NiftyDisassociateNatTableRequest{Request: req, Input: input, Copy: c.NiftyDisassociateNatTableRequest} } // NiftyDisassociateNatTableRequest is the request type for the // NiftyDisassociateNatTable API operation. type NiftyDisassociateNatTableRequest struct { *aws.Request Input *NiftyDisassociateNatTableInput Copy func(*NiftyDisassociateNatTableInput) NiftyDisassociateNatTableRequest } // Send marshals and sends the NiftyDisassociateNatTable API request. func (r NiftyDisassociateNatTableRequest) Send(ctx context.Context) (*NiftyDisassociateNatTableResponse, error) { r.Request.SetContext(ctx) err := r.Request.Send() if err != nil { return nil, err } resp := &NiftyDisassociateNatTableResponse{ NiftyDisassociateNatTableOutput: r.Request.Data.(*NiftyDisassociateNatTableOutput), response: &aws.Response{Request: r.Request}, } return resp, nil } // NiftyDisassociateNatTableResponse is the response type for the // NiftyDisassociateNatTable API operation. type NiftyDisassociateNatTableResponse struct { *NiftyDisassociateNatTableOutput
} // SDKResponseMetdata returns the response metadata for the // NiftyDisassociateNatTable request. func (r *NiftyDisassociateNatTableResponse) SDKResponseMetdata() *aws.Response { return r.response }
response *aws.Response
ReviewsCard.js
import React, { Component, Fragment } from "react"; import "./ReviewsCard.css"; class ReviewsCard extends Component { renderReviews() { return this.props.reviews.map(review => { return ( <Fragment> <div className="review__card" key={review._id} data-status={review.status} > {/* data-status is to know if the product is pending, approved, or denied */} <div className="product__info"> <a href={review.product_url} className="product__img"> {/* <img src={this.props.products.avatar} alt="ECOMRUSH" /> */} <img src="http://placehold.it/200x200" alt="ECOMRUSH" /> </a> <div className="product__title"> {/* <h6>{review.product_name}</h6> */} <h6>Adidas NMD Here White</h6> <div className="customer__rating">
</div> <div className="customer__name"> {review.responses[0].answer} </div> </div> </div> <div className="product__review"> <p>{review.responses[2].answer}</p> <p className="cs__response">Customer Service Response: {review.response}</p> </div> <div className="review__buttons"> <a>REPLY</a> <button className="review__btn blue" value="approved"> <span role="img" aria-label="thumbs up"> 👍🏼 </span> </button> <button className="review__btn red" value="denied"> <span role="img" aria-label="prohibited"> 🚫 </span> </button> </div> </div> </Fragment> ); }); } render() { return <div>{this.renderReviews()}</div>; } } export default ReviewsCard;
{review.responses[1].answer} Stars
test_drag.py
# This code is part of Qiskit. # # (C) Copyright IBM 2021. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. """Test drag calibration experiment.""" from test.base import QiskitExperimentsTestCase import unittest import numpy as np from qiskit.circuit import Parameter from qiskit.exceptions import QiskitError from qiskit.pulse import DriveChannel, Drag import qiskit.pulse as pulse from qiskit.qobj.utils import MeasLevel from qiskit import transpile from qiskit_experiments.exceptions import CalibrationError from qiskit_experiments.library import RoughDrag, RoughDragCal from qiskit_experiments.test.mock_iq_backend import DragBackend from qiskit_experiments.calibration_management.basis_gate_library import FixedFrequencyTransmon from qiskit_experiments.calibration_management import Calibrations class TestDragEndToEnd(QiskitExperimentsTestCase): """Test the drag experiment.""" def setUp(self): """Setup some schedules.""" super().setUp() beta = Parameter("β") with pulse.build(name="xp") as xp: pulse.play(Drag(duration=160, amp=0.208519, sigma=40, beta=beta), DriveChannel(0)) self.x_plus = xp self.test_tol = 0.05 def test_reps(self): """Test that setting reps raises and error if reps is not of length three.""" drag = RoughDrag(0, self.x_plus) with self.assertRaises(CalibrationError): drag.set_experiment_options(reps=[1, 2, 3, 4]) def test_end_to_end(self): """Test the drag experiment end to end.""" backend = DragBackend(gate_name="Drag(xp)") drag = RoughDrag(1, self.x_plus) expdata = drag.run(backend) self.assertExperimentDone(expdata) result = expdata.analysis_results(1) self.assertTrue(abs(result.value.n - backend.ideal_beta) < self.test_tol) self.assertEqual(result.quality, "good") # Small leakage will make the curves very flat, in this case one should # rather increase beta. backend = DragBackend(error=0.0051, gate_name="Drag(xp)") drag = RoughDrag(0, self.x_plus) drag.analysis.set_options(p0={"beta": 1.2}) exp_data = drag.run(backend) self.assertExperimentDone(exp_data) result = exp_data.analysis_results(1) self.assertTrue(abs(result.value.n - backend.ideal_beta) < self.test_tol) self.assertEqual(result.quality, "good") # Large leakage will make the curves oscillate quickly. backend = DragBackend(error=0.05, gate_name="Drag(xp)") drag = RoughDrag(1, self.x_plus, betas=np.linspace(-4, 4, 31)) drag.set_run_options(shots=200) drag.analysis.set_options(p0={"beta": 1.8, "freq0": 0.08, "freq1": 0.16, "freq2": 0.32}) exp_data = drag.run(backend) self.assertExperimentDone(exp_data) result = exp_data.analysis_results(1) meas_level = exp_data.metadata["job_metadata"][-1]["run_options"]["meas_level"] self.assertEqual(meas_level, MeasLevel.CLASSIFIED) self.assertTrue(abs(result.value.n - backend.ideal_beta) < self.test_tol) self.assertEqual(result.quality, "good") class TestDragCircuits(QiskitExperimentsTestCase): """Test the circuits of the drag calibration.""" def setUp(self): """Setup some schedules.""" super().setUp() beta = Parameter("β") with pulse.build(name="xp") as xp: pulse.play(Drag(duration=160, amp=0.208519, sigma=40, beta=beta), DriveChannel(0)) self.x_plus = xp def test_default_circuits(self): """Test the default circuit.""" backend = DragBackend(error=0.005, gate_name="Drag(xp)") drag = RoughDrag(0, self.x_plus) drag.set_experiment_options(reps=[2, 4, 8]) drag.backend = DragBackend(gate_name="Drag(xp)") circuits = drag.circuits() for idx, expected in enumerate([4, 8, 16]): ops = transpile(circuits[idx * 51], backend).count_ops() self.assertEqual(ops["Drag(xp)"], expected) def te
elf): """Check that the experiment raises with unassigned parameters.""" beta = Parameter("β") amp = Parameter("amp") with pulse.build(name="xp") as xp: pulse.play(Drag(duration=160, amp=amp, sigma=40, beta=beta), DriveChannel(0)) with self.assertRaises(QiskitError): RoughDrag(1, xp, betas=np.linspace(-3, 3, 21)) class TestRoughDragCalUpdate(QiskitExperimentsTestCase): """Test that a Drag calibration experiment properly updates the calibrations.""" def setUp(self): """Setup the tests""" super().setUp() library = FixedFrequencyTransmon() self.backend = DragBackend(gate_name="Drag(x)") self.cals = Calibrations.from_backend(self.backend, library) self.test_tol = 0.05 def test_update(self): """Test that running RoughDragCal updates the calibrations.""" qubit = 0 prev_beta = self.cals.get_parameter_value("β", (0,), "x") self.assertEqual(prev_beta, 0) expdata = RoughDragCal(qubit, self.cals, backend=self.backend).run() self.assertExperimentDone(expdata) new_beta = self.cals.get_parameter_value("β", (0,), "x") self.assertTrue(abs(new_beta - self.backend.ideal_beta) < self.test_tol) self.assertTrue(abs(new_beta) > self.test_tol) def test_dragcal_experiment_config(self): """Test RoughDragCal config can round trip""" exp = RoughDragCal(0, self.cals, backend=self.backend) loaded_exp = RoughDragCal.from_config(exp.config()) self.assertNotEqual(exp, loaded_exp) self.assertTrue(self.json_equiv(exp, loaded_exp)) @unittest.skip("Calibration experiments are not yet JSON serializable") def test_dragcal_roundtrip_serializable(self): """Test round trip JSON serialization""" exp = RoughDragCal(0, self.cals) self.assertRoundTripSerializable(exp, self.json_equiv) def test_drag_experiment_config(self): """Test RoughDrag config can roundtrip""" with pulse.build(name="xp") as sched: pulse.play(pulse.Drag(160, 0.5, 40, Parameter("β")), pulse.DriveChannel(0)) exp = RoughDrag(0, backend=self.backend, schedule=sched) loaded_exp = RoughDrag.from_config(exp.config()) self.assertNotEqual(exp, loaded_exp) self.assertTrue(self.json_equiv(exp, loaded_exp)) @unittest.skip("Schedules are not yet JSON serializable") def test_drag_roundtrip_serializable(self): """Test round trip JSON serialization""" with pulse.build(name="xp") as sched: pulse.play(pulse.Drag(160, 0.5, 40, Parameter("β")), pulse.DriveChannel(0)) exp = RoughDrag(0, backend=self.backend, schedule=sched) self.assertRoundTripSerializable(exp, self.json_equiv)
st_raise_multiple_parameter(s
image.rs
use super::{ context::GraphicsContext, gpu::arc::{ArcTexture, ArcTextureView}, Canvas, Color, Draw, DrawParam, Drawable, Rect, WgpuContext, }; use crate::{ context::{Has, HasMut, HasTwo}, filesystem::Filesystem, Context, GameError, GameResult, }; use image::ImageEncoder; use std::path::Path; use std::{io::Read, num::NonZeroU32}; // maintaing a massive enum of all possible texture formats? // screw that. /// Describes the pixel format of an image. pub type ImageFormat = wgpu::TextureFormat; /// Describes the format of an encoded image. pub type ImageEncodingFormat = ::image::ImageFormat; /// Handle to an image stored in GPU memory. #[derive(Debug, Clone)] pub struct Image { pub(crate) texture: ArcTexture, pub(crate) view: ArcTextureView, pub(crate) format: ImageFormat, pub(crate) width: u32, pub(crate) height: u32, pub(crate) samples: u32, } impl Image { /// Creates a new image specifically for use with a [Canvas](crate::graphics::Canvas). pub fn new_canvas_image( gfx: &impl Has<GraphicsContext>, format: ImageFormat, width: u32, height: u32, samples: u32, ) -> Self
/// Creates a new image initialized with given pixel data. pub fn from_pixels( gfx: &impl Has<GraphicsContext>, pixels: &[u8], format: ImageFormat, width: u32, height: u32, ) -> Self { let gfx = gfx.retrieve(); Self::from_pixels_wgpu(&gfx.wgpu, pixels, format, width, height) } pub(crate) fn from_pixels_wgpu( wgpu: &WgpuContext, pixels: &[u8], format: ImageFormat, width: u32, height: u32, ) -> Self { let image = Self::new( wgpu, format, width, height, 1, wgpu::TextureUsages::COPY_DST | wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_SRC, ); wgpu.queue.write_texture( image.texture.as_image_copy(), pixels, wgpu::ImageDataLayout { offset: 0, bytes_per_row: Some( NonZeroU32::new(format.describe().block_size as u32 * width).unwrap(), ), rows_per_image: None, }, wgpu::Extent3d { width, height, depth_or_array_layers: 1, }, ); image } /// A little helper function that creates a new `Image` that is just a solid square of the given size and color. Mainly useful for debugging. pub fn from_solid(gfx: &impl Has<GraphicsContext>, size: u32, color: Color) -> Self { let pixels = (0..(size * size)) .flat_map(|_| { let (r, g, b, a) = color.to_rgba(); [r, g, b, a] }) .collect::<Vec<_>>(); Self::from_pixels( gfx, &pixels, wgpu::TextureFormat::Rgba8UnormSrgb, size, size, ) } /// Creates a new image initialized with pixel data loaded from an encoded image `Read` (e.g. PNG or JPEG). #[allow(unused_results)] pub fn from_path( ctxs: &impl HasTwo<Filesystem, GraphicsContext>, path: impl AsRef<Path>, srgb: bool, ) -> GameResult<Self> { let fs = ctxs.retrieve_first(); let gfx = ctxs.retrieve_second(); let mut encoded = Vec::new(); fs.open(path)?.read_to_end(&mut encoded)?; let decoded = image::load_from_memory(&encoded[..]) .map_err(|_| GameError::ResourceLoadError(String::from("failed to load image")))?; let rgba8 = decoded.to_rgba8(); let (width, height) = (rgba8.width(), rgba8.height()); Ok(Self::from_pixels( gfx, rgba8.as_ref(), if srgb { ImageFormat::Rgba8UnormSrgb } else { ImageFormat::Rgba8Unorm }, width, height, )) } fn new( wgpu: &WgpuContext, format: ImageFormat, width: u32, height: u32, samples: u32, usage: wgpu::TextureUsages, ) -> Self { assert!(width > 0); assert!(height > 0); assert!(samples > 0); let texture = ArcTexture::new(wgpu.device.create_texture(&wgpu::TextureDescriptor { label: None, size: wgpu::Extent3d { width, height, depth_or_array_layers: 1, }, mip_level_count: 1, sample_count: samples, dimension: wgpu::TextureDimension::D2, format, usage, })); let view = ArcTextureView::new(texture.as_ref().create_view(&wgpu::TextureViewDescriptor { label: None, format: Some(format), dimension: Some(wgpu::TextureViewDimension::D2), aspect: wgpu::TextureAspect::All, base_mip_level: 0, mip_level_count: Some(NonZeroU32::new(1).unwrap()), base_array_layer: 0, array_layer_count: Some(NonZeroU32::new(1).unwrap()), })); Image { texture, view, format, width, height, samples, } } /// Returns the underlying [`wgpu::Texture`] and [`wgpu::TextureView`] for this [`Image`]. #[inline] pub fn wgpu(&self) -> (&wgpu::Texture, &wgpu::TextureView) { (&self.texture, &self.view) } /// Reads the pixels of this `ImageView` and returns as `Vec<u8>`. /// The format matches the GPU image format. /// /// **This is a very expensive operation - call sparingly.** pub fn to_pixels(&self, gfx: &impl Has<GraphicsContext>) -> GameResult<Vec<u8>> { let gfx = gfx.retrieve(); if self.samples > 1 { return Err(GameError::RenderError(String::from( "cannot read the pixels of a multisampled image; resolve this image with a canvas", ))); } let block_size = self.format.describe().block_size as u64; let buffer = gfx.wgpu.device.create_buffer(&wgpu::BufferDescriptor { label: None, size: block_size * self.width as u64 * self.height as u64, usage: wgpu::BufferUsages::COPY_DST | wgpu::BufferUsages::MAP_READ, mapped_at_creation: false, }); let cmd = { let mut encoder = gfx .wgpu .device .create_command_encoder(&wgpu::CommandEncoderDescriptor { label: None }); encoder.copy_texture_to_buffer( self.texture.as_image_copy(), wgpu::ImageCopyBuffer { buffer: &buffer, layout: wgpu::ImageDataLayout { offset: 0, bytes_per_row: Some( NonZeroU32::new(block_size as u32 * self.width).unwrap(), ), rows_per_image: None, }, }, wgpu::Extent3d { width: self.width, height: self.height, depth_or_array_layers: 1, }, ); encoder.finish() }; gfx.wgpu.queue.submit([cmd]); // wait... let fut = buffer.slice(..).map_async(wgpu::MapMode::Read); gfx.wgpu.device.poll(wgpu::Maintain::Wait); pollster::block_on(fut)?; let out = buffer.slice(..).get_mapped_range().to_vec(); Ok(out) } /// Encodes the `ImageView` to the given file format and return the encoded bytes. /// /// **This is a very expensive operation - call sparingly.** pub fn encode( &self, ctx: &Context, format: ImageEncodingFormat, path: impl AsRef<std::path::Path>, ) -> GameResult { let color = match self.format { ImageFormat::Rgba8Unorm | ImageFormat::Rgba8UnormSrgb => ::image::ColorType::Rgba8, ImageFormat::R8Unorm => ::image::ColorType::L8, ImageFormat::R16Unorm => ::image::ColorType::L16, format => { return Err(GameError::RenderError(format!( "cannot ImageView::encode for the {:#?} GPU image format", format ))) } }; let pixels = self.to_pixels(ctx)?; let f = ctx.fs.create(path)?; let writer = &mut std::io::BufWriter::new(f); match format { ImageEncodingFormat::Png => ::image::codecs::png::PngEncoder::new(writer) .write_image(&pixels, self.width, self.height, color) .map_err(Into::into), ImageEncodingFormat::Bmp => ::image::codecs::bmp::BmpEncoder::new(writer) .encode(&pixels, self.width, self.height, color) .map_err(Into::into), _ => Err(GameError::RenderError(String::from( "cannot ImageView::encode for formats other than Png and Bmp", ))), } } /// Returns the image format of this image. #[inline] pub fn format(&self) -> ImageFormat { self.format } /// Returns the number of MSAA samples this image has. #[inline] pub fn samples(&self) -> u32 { self.samples } /// Returns the width (in pixels) of the image. #[inline] pub fn width(&self) -> u32 { self.width } /// Returns the height (in pixels) of the image. #[inline] pub fn height(&self) -> u32 { self.height } /// Helper function that calculates a sub-rectangle of this image in UV coordinates, given pixel coordinates. pub fn uv_rect(&self, x: u32, y: u32, w: u32, h: u32) -> Rect { Rect { x: x as f32 / self.width as f32, y: y as f32 / self.height as f32, w: w as f32 / self.width as f32, h: h as f32 / self.height as f32, } } } impl Drawable for Image { fn draw(&self, canvas: &mut Canvas, param: DrawParam) { canvas.push_draw( Draw::Mesh { mesh: canvas.default_resources().mesh.clone(), image: self.clone(), }, param, ); } fn dimensions(&self, _gfx: &mut impl HasMut<GraphicsContext>) -> Option<Rect> { Some(Rect { x: 0., y: 0., w: self.width() as _, h: self.height() as _, }) } } /// An image which is sized relative to the screen. /// This is primarily for canvas images. #[derive(Debug, Clone)] pub struct ScreenImage { image: Image, format: wgpu::TextureFormat, size: (f32, f32), samples: u32, } impl ScreenImage { /// Creates a new [ScreenImage] with the given parameters. /// /// `width` and `height` specify the fraction of the framebuffer width and height that the [Image] will have. /// For example, `width = 1.0` and `height = 1.0` means the image will be the same size as the framebuffer. /// /// If `format` is `None` then the format will be inferred from the surface format. pub fn new( gfx: &impl Has<GraphicsContext>, format: impl Into<Option<ImageFormat>>, width: f32, height: f32, samples: u32, ) -> Self { let gfx = gfx.retrieve(); assert!(width > 0.); assert!(height > 0.); assert!(samples > 0); let format = format.into().unwrap_or(gfx.surface_format); ScreenImage { image: Self::create(gfx, format, (width, height), samples), format, size: (width, height), samples, } } /// Returns the inner [Image], also recreating it if the framebuffer has been resized. pub fn image(&mut self, gfx: &impl Has<GraphicsContext>) -> Image { if Self::size(gfx, self.size) != (self.image.width(), self.image.height()) { self.image = Self::create(gfx, self.format, self.size, self.samples); } self.image.clone() } fn size(gfx: &impl Has<GraphicsContext>, (width, height): (f32, f32)) -> (u32, u32) { let gfx = gfx.retrieve(); let size = gfx.window.inner_size(); let width = (size.width as f32 * width) as u32; let height = (size.height as f32 * height) as u32; (width.max(1), height.max(1)) } fn create( gfx: &impl Has<GraphicsContext>, format: wgpu::TextureFormat, size: (f32, f32), samples: u32, ) -> Image { let (width, height) = Self::size(gfx, size); Image::new_canvas_image(gfx, format, width, height, samples) } }
{ let gfx = gfx.retrieve(); Self::new( &gfx.wgpu, format, width, height, samples, wgpu::TextureUsages::RENDER_ATTACHMENT | wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_SRC, ) }
test_scaling_policy.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from tempest.lib import decorators from senlin_tempest_plugin.common import constants from senlin_tempest_plugin.common import utils from senlin_tempest_plugin.functional import base class TestScalingPolicy(base.BaseSenlinFunctionalTest): def setUp(self): super(TestScalingPolicy, self).setUp() self.profile_id = utils.create_a_profile(self) self.addCleanup(utils.delete_a_profile, self, self.profile_id) self.cluster_id = utils.create_a_cluster(self, self.profile_id, min_size=0, max_size=5, desired_capacity=1) self.addCleanup(utils.delete_a_cluster, self, self.cluster_id) @decorators.attr(type=['functional']) @decorators.idempotent_id('6b513a5d-75b6-447a-b95d-e17b84ac9ee8') def test_scaling_policy(self): # Create a scaling policy targets on CLUSTER_SCALE_OUT action
spec = constants.spec_scaling_policy spec['properties'] = { 'event': 'CLUSTER_SCALE_OUT', 'adjustment': { 'type': 'CHANGE_IN_CAPACITY', 'number': 2, 'min_step': 1, 'best_effort': True } } policy_id = utils.create_a_policy(self, spec) scaleout_policy = utils.get_a_policy(self, policy_id) self.addCleanup(utils.delete_a_policy, self, scaleout_policy['id']) # Create a scaling policy targets on CLUSTER_SCALE_IN action spec['properties'] = { 'event': 'CLUSTER_SCALE_IN', 'adjustment': { 'type': 'CHANGE_IN_PERCENTAGE', 'number': 50, 'min_step': 2, 'best_effort': False } } policy_id = utils.create_a_policy(self, spec) scalein_policy = utils.get_a_policy(self, policy_id) self.addCleanup(utils.delete_a_policy, self, scalein_policy['id']) # Attach scale in/out policies to cluster for policy in [scaleout_policy, scalein_policy]: utils.cluster_attach_policy(self, self.cluster_id, policy['id']) self.addCleanup(utils.cluster_detach_policy, self, self.cluster_id, policy['id']) # Scale out cluster without count specified utils.cluster_scale_out(self, self.cluster_id) # Verify scale out result cluster = utils.get_a_cluster(self, self.cluster_id) self.assertEqual('ACTIVE', cluster['status']) self.assertEqual(3, cluster['desired_capacity']) self.assertEqual(3, len(cluster['nodes'])) # Scale out cluster with count set to 1 utils.cluster_scale_out(self, self.cluster_id, count=1) # Verify scale out result cluster = utils.get_a_cluster(self, self.cluster_id) self.assertEqual('ACTIVE', cluster['status']) self.assertEqual(4, cluster['desired_capacity']) self.assertEqual(4, len(cluster['nodes'])) # Keep scaling out cluster with count set to 2 to # verify best_effort parameter utils.cluster_scale_out(self, self.cluster_id, count=2) # Verify scale out result cluster = utils.get_a_cluster(self, self.cluster_id) self.assertEqual('ACTIVE', cluster['status']) self.assertEqual(5, cluster['desired_capacity']) self.assertEqual(5, len(cluster['nodes'])) # Scale in cluster without count specified utils.cluster_scale_in(self, self.cluster_id) # Verify scale in result cluster = utils.get_a_cluster(self, self.cluster_id) self.assertEqual('ACTIVE', cluster['status']) self.assertEqual(3, cluster['desired_capacity']) self.assertEqual(3, len(cluster['nodes'])) # Scale in cluster without count specified to # verify min_step parameter utils.cluster_scale_in(self, self.cluster_id) # Verify scale in result cluster = utils.get_a_cluster(self, self.cluster_id) self.assertEqual('ACTIVE', cluster['status']) self.assertEqual(1, cluster['desired_capacity']) self.assertEqual(1, len(cluster['nodes'])) # Keep scaling in cluster with count set to 2 to # verify best_effort parameter res = utils.cluster_scale_in(self, self.cluster_id, count=2, expected_status='FAILED') # Verify action result and action failure reason cluster = utils.get_a_cluster(self, self.cluster_id) self.assertEqual('ACTIVE', cluster['status']) self.assertEqual(1, cluster['desired_capacity']) self.assertEqual(1, len(cluster['nodes'])) reason = ("Policy check failure: Failed policy '%s': The target " "capacity (-1) is less than the cluster's " "min_size (0).") % scalein_policy['name'] self.assertEqual(reason, res)
integration_test.rs
/* Rusty Backend * This test will demonstrate an example of how to use this library. * It also verifies that the library can be used as intended. * Author: Käthe Specht * Date: 2021-09-01 */ use std::time::Duration; use rustract::{error::RustractError, init}; use tokio::time::timeout; use warp::Filter; /// Uses the rusty backend library to generate a backend based on an example database. #[tokio::test] async fn main() -> Result<(), RustractError> { // Test this library's config integration let _db = init("./tests/example_config.json", true)?; // Create a future from the warp_test function let future = warp_test(); // Wrap the future with a `Timeout` set to expire. let result = timeout(Duration::from_millis(500), future).await; // This seems odd, but if the warp test fails the server should have exited before the timeout assert!(result.is_err()); // std::fs::remove_file("./tests/example_database.json").unwrap(); Ok(()) } /// Tests the warp library. /// /// TODO: Add a client test to this that panics on failure! async fn warp_test() -> Result<(), RustractError> {
let hello_world = warp::path::end().map(|| "Hello, World at root!"); let numb = warp::path!(u16).map(|a| format!("{}", a)); let path = warp::path("hello").and(numb); let routes = warp::get() .and(hello_world .or(path) ); warp::serve(routes).run(([127, 0, 0, 1], 3030)).await; Ok(()) }
variable_assignment.py
# coding: utf-8 """ Influx API Service No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 OpenAPI spec version: 0.1.0 Generated by: https://openapi-generator.tech """ import pprint import re # noqa: F401 import six class VariableAssignment(object): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. """ """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ openapi_types = { 'type': 'str', 'id': 'Identifier', 'init': 'Expression' } attribute_map = { 'type': 'type', 'id': 'id', 'init': 'init' } def __init__(self, type=None, id=None, init=None): # noqa: E501 """VariableAssignment - a model defined in OpenAPI""" # noqa: E501 self._type = None self._id = None self._init = None self.discriminator = None if type is not None: self.type = type if id is not None: self.id = id if init is not None: self.init = init @property def type(self): """Gets the type of this VariableAssignment. # noqa: E501 type of AST node # noqa: E501 :return: The type of this VariableAssignment. # noqa: E501 :rtype: str """ return self._type @type.setter def type(self, type): """Sets the type of this VariableAssignment. type of AST node # noqa: E501 :param type: The type of this VariableAssignment. # noqa: E501 :type: str """ self._type = type @property def id(self): """Gets the id of this VariableAssignment. # noqa: E501 :return: The id of this VariableAssignment. # noqa: E501 :rtype: Identifier """ return self._id @id.setter def id(self, id): """Sets the id of this VariableAssignment. :param id: The id of this VariableAssignment. # noqa: E501 :type: Identifier """ self._id = id @property def init(self):
@init.setter def init(self, init): """Sets the init of this VariableAssignment. :param init: The init of this VariableAssignment. # noqa: E501 :type: Expression """ self._init = init def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, VariableAssignment): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
"""Gets the init of this VariableAssignment. # noqa: E501 :return: The init of this VariableAssignment. # noqa: E501 :rtype: Expression """ return self._init
Goods.ts
import {Column, Entity, PrimaryGeneratedColumn} from 'typeorm'; /** * 商品 */ @Entity('goods', {schema: 'itemall'}) export class Goods { @PrimaryGeneratedColumn({type: 'int', name: 'id'}) id: number; @Column('varchar', {name: 'cfav', nullable: true, length: 255}) cfav: string | null; @Column('varchar', {name: 'clientUrl', nullable: true, length: 1000}) clientUrl: string | null; @Column('varchar', {name: 'cparam', nullable: true, length: 255}) cparam: string | null; @Column('varchar', {name: 'iid', nullable: true, length: 255}) iid: string | null; @Column('varchar', {name: 'itemMarks', nullable: true, length: 255}) itemMarks: string | null; @Column('varchar', {name: 'itemType', nullable: true, length: 255}) itemType: string | null;
name: 'leftbottom_taglist', nullable: true, length: 255, }) leftbottomTaglist: string | null; @Column('varchar', {name: 'lefttop_taglist', nullable: true, length: 255}) lefttopTaglist: string | null; @Column('varchar', {name: 'link', nullable: true, length: 1000}) link: string | null; @Column('varchar', {name: 'orgPrice', nullable: true, length: 255}) orgPrice: string | null; @Column('varchar', {name: 'popularStar', nullable: true, length: 255}) popularStar: string | null; @Column({type: 'double', name: 'price', nullable: true}) price: number; @Column('varchar', {name: 'props', nullable: true, length: 255}) props: string | null; @Column('varchar', {name: 'ptpC', nullable: true, length: 255}) ptpC: string | null; @Column('varchar', {name: 'sale', nullable: true, length: 255}) sale: string | null; @Column('varchar', {name: 'shopId', nullable: true, length: 255}) shopId: string | null; @Column('varchar', {name: 'show', nullable: true, length: 255}) show: string | null; @Column('varchar', {name: 'showLarge', nullable: true, length: 255}) showLarge: string | null; @Column('varchar', {name: 'title', nullable: true, length: 255}) title: string | null; @Column('varchar', {name: 'titleTags', nullable: true, length: 255}) titleTags: string | null; @Column('varchar', {name: 'type', nullable: true, length: 255}) type: string | null; }
@Column('varchar', {
content_renderer.go
package helpers import ( "bytes" "html" "github.com/miekg/mmark" "github.com/russross/blackfriday" "github.com/spf13/viper" ) // Wraps a blackfriday.Renderer, typically a blackfriday.Html // Enabling Hugo to customise the rendering experience type HugoHtmlRenderer struct { blackfriday.Renderer } func (renderer *HugoHtmlRenderer) BlockCode(out *bytes.Buffer, text []byte, lang string) { if viper.GetBool("PygmentsCodeFences")
else { renderer.Renderer.BlockCode(out, text, lang) } } // Wraps a mmark.Renderer, typically a mmark.html // Enabling Hugo to customise the rendering experience type HugoMmarkHtmlRenderer struct { mmark.Renderer } func (renderer *HugoMmarkHtmlRenderer) BlockCode(out *bytes.Buffer, text []byte, lang string, caption []byte, subfigure bool, callouts bool) { if viper.GetBool("PygmentsCodeFences") { str := html.UnescapeString(string(text)) out.WriteString(Highlight(str, lang, "")) } else { renderer.Renderer.BlockCode(out, text, lang, caption, subfigure, callouts) } }
{ opts := viper.GetString("PygmentsOptions") str := html.UnescapeString(string(text)) out.WriteString(Highlight(str, lang, opts)) }
transform_suite_test.go
package transform_test import (
"github.com/tidepool-org/platform/test" ) func TestSuite(t *testing.T) { test.Test(t) }
"testing"
bridge.go
package bridge import "fmt" /* 桥接模式分离抽象部分和实现部分。使得两部分独立扩展。 桥接模式类似于策略模式,区别在于策略模式封装一系列算法使得算法可以互相替换。 策略模式使抽象部分和实现部分分离,可以独立变化。 */ type AbstractMessage interface { SendMessage(text, to string) } type MessageImplementer interface { Send(text, to string) } type SMS struct { } func ViaSMS() MessageImplementer { return &SMS{} } func (*SMS) Send(msg, to string) { fmt.Printf("Send msg %v to %v via SMS \n", msg, to) } type Email struct { } func ViaEmail() MessageImplementer { return &Email{} } func (*Email) Send(msg, to string) { fmt.Printf("Send msg %v to %v via Email \n", msg, to) }
func NewCommentMessage(via MessageImplementer) *CommonMessage { return &CommonMessage{via: via} } func (m *CommonMessage) SendMessage(msg, to string) { m.via.Send(msg, to) } type Urgency struct { via MessageImplementer } func NewUrgencyMessage(via MessageImplementer) *Urgency { return &Urgency{via: via} } func (m *Urgency) SendMessage(msg, to string) { m.via.Send(msg, to) }
type CommonMessage struct { via MessageImplementer }