file_name
stringlengths
3
137
prefix
stringlengths
0
918k
suffix
stringlengths
0
962k
middle
stringlengths
0
812k
sigmoid_function.py
import numpy as np import matplotlib.pyplot as plt def func(x): return 1 / (1 + np.exp(-x)) # Return evenly spaced numbers over a specified interval. xdata = np.linspace(-8, 8, 960,endpoint=True) ydata = func(xdata) plt.plot(xdata,ydata)
plt.show()
net_cdf_timeseries_rain.py
# coding: utf-8 """ 3Di API 3Di simulation API (latest version: 3.0) Framework release: 1.0.16 3Di core release: 2.0.11 deployed on: 07:33AM (UTC) on September 04, 2020 # noqa: E501 The version of the OpenAPI document: 3.0 Contact: [email protected] Generated by: https://openapi-generator.tech """ import pprint import re # noqa: F401 import six from openapi_client.configuration import Configuration class NetCDFTimeseriesRain(object): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. """ """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ openapi_types = { 'url': 'str', 'multiplier': 'float', 'simulation': 'str', 'offset': 'int', 'duration': 'int', 'timestamps': 'list[int]', 'interval': 'int', 'values_reference': 'str', 'fill_value': 'str', 'units': 'str', 'file': 'FileReadOnly', 'uid': 'str' } attribute_map = { 'url': 'url', 'multiplier': 'multiplier', 'simulation': 'simulation', 'offset': 'offset', 'duration': 'duration', 'timestamps': 'timestamps', 'interval': 'interval', 'values_reference': 'values_reference', 'fill_value': 'fill_value', 'units': 'units', 'file': 'file', 'uid': 'uid' } def __init__(self, url=None, multiplier=None, simulation=None, offset=None, duration=None, timestamps=None, interval=None, values_reference=None, fill_value=None, units=None, file=None, uid=None, local_vars_configuration=None): # noqa: E501 """NetCDFTimeseriesRain - a model defined in OpenAPI""" # noqa: E501 if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._url = None self._multiplier = None self._simulation = None self._offset = None self._duration = None self._timestamps = None self._interval = None self._values_reference = None self._fill_value = None self._units = None self._file = None self._uid = None self.discriminator = None if url is not None: self.url = url if multiplier is not None: self.multiplier = multiplier if simulation is not None: self.simulation = simulation self.offset = offset self.duration = duration self.timestamps = timestamps self.interval = interval self.values_reference = values_reference if fill_value is not None: self.fill_value = fill_value self.units = units if file is not None: self.file = file if uid is not None: self.uid = uid @property def
(self): """Gets the url of this NetCDFTimeseriesRain. # noqa: E501 :return: The url of this NetCDFTimeseriesRain. # noqa: E501 :rtype: str """ return self._url @url.setter def url(self, url): """Sets the url of this NetCDFTimeseriesRain. :param url: The url of this NetCDFTimeseriesRain. # noqa: E501 :type: str """ self._url = url @property def multiplier(self): """Gets the multiplier of this NetCDFTimeseriesRain. # noqa: E501 :return: The multiplier of this NetCDFTimeseriesRain. # noqa: E501 :rtype: float """ return self._multiplier @multiplier.setter def multiplier(self, multiplier): """Sets the multiplier of this NetCDFTimeseriesRain. :param multiplier: The multiplier of this NetCDFTimeseriesRain. # noqa: E501 :type: float """ self._multiplier = multiplier @property def simulation(self): """Gets the simulation of this NetCDFTimeseriesRain. # noqa: E501 :return: The simulation of this NetCDFTimeseriesRain. # noqa: E501 :rtype: str """ return self._simulation @simulation.setter def simulation(self, simulation): """Sets the simulation of this NetCDFTimeseriesRain. :param simulation: The simulation of this NetCDFTimeseriesRain. # noqa: E501 :type: str """ self._simulation = simulation @property def offset(self): """Gets the offset of this NetCDFTimeseriesRain. # noqa: E501 offset of event in simulation in seconds # noqa: E501 :return: The offset of this NetCDFTimeseriesRain. # noqa: E501 :rtype: int """ return self._offset @offset.setter def offset(self, offset): """Sets the offset of this NetCDFTimeseriesRain. offset of event in simulation in seconds # noqa: E501 :param offset: The offset of this NetCDFTimeseriesRain. # noqa: E501 :type: int """ if (self.local_vars_configuration.client_side_validation and offset is not None and offset > 2147483647): # noqa: E501 raise ValueError("Invalid value for `offset`, must be a value less than or equal to `2147483647`") # noqa: E501 if (self.local_vars_configuration.client_side_validation and offset is not None and offset < -2147483648): # noqa: E501 raise ValueError("Invalid value for `offset`, must be a value greater than or equal to `-2147483648`") # noqa: E501 self._offset = offset @property def duration(self): """Gets the duration of this NetCDFTimeseriesRain. # noqa: E501 Duration of event in seconds # noqa: E501 :return: The duration of this NetCDFTimeseriesRain. # noqa: E501 :rtype: int """ return self._duration @duration.setter def duration(self, duration): """Sets the duration of this NetCDFTimeseriesRain. Duration of event in seconds # noqa: E501 :param duration: The duration of this NetCDFTimeseriesRain. # noqa: E501 :type: int """ if (self.local_vars_configuration.client_side_validation and duration is not None and duration > 2147483647): # noqa: E501 raise ValueError("Invalid value for `duration`, must be a value less than or equal to `2147483647`") # noqa: E501 if (self.local_vars_configuration.client_side_validation and duration is not None and duration < -2147483648): # noqa: E501 raise ValueError("Invalid value for `duration`, must be a value greater than or equal to `-2147483648`") # noqa: E501 self._duration = duration @property def timestamps(self): """Gets the timestamps of this NetCDFTimeseriesRain. # noqa: E501 in simulation in seconds # noqa: E501 :return: The timestamps of this NetCDFTimeseriesRain. # noqa: E501 :rtype: list[int] """ return self._timestamps @timestamps.setter def timestamps(self, timestamps): """Sets the timestamps of this NetCDFTimeseriesRain. in simulation in seconds # noqa: E501 :param timestamps: The timestamps of this NetCDFTimeseriesRain. # noqa: E501 :type: list[int] """ self._timestamps = timestamps @property def interval(self): """Gets the interval of this NetCDFTimeseriesRain. # noqa: E501 interval in seconds # noqa: E501 :return: The interval of this NetCDFTimeseriesRain. # noqa: E501 :rtype: int """ return self._interval @interval.setter def interval(self, interval): """Sets the interval of this NetCDFTimeseriesRain. interval in seconds # noqa: E501 :param interval: The interval of this NetCDFTimeseriesRain. # noqa: E501 :type: int """ if (self.local_vars_configuration.client_side_validation and interval is not None and interval > 2147483647): # noqa: E501 raise ValueError("Invalid value for `interval`, must be a value less than or equal to `2147483647`") # noqa: E501 if (self.local_vars_configuration.client_side_validation and interval is not None and interval < 0): # noqa: E501 raise ValueError("Invalid value for `interval`, must be a value greater than or equal to `0`") # noqa: E501 self._interval = interval @property def values_reference(self): """Gets the values_reference of this NetCDFTimeseriesRain. # noqa: E501 :return: The values_reference of this NetCDFTimeseriesRain. # noqa: E501 :rtype: str """ return self._values_reference @values_reference.setter def values_reference(self, values_reference): """Sets the values_reference of this NetCDFTimeseriesRain. :param values_reference: The values_reference of this NetCDFTimeseriesRain. # noqa: E501 :type: str """ if (self.local_vars_configuration.client_side_validation and values_reference is not None and len(values_reference) > 255): raise ValueError("Invalid value for `values_reference`, length must be less than or equal to `255`") # noqa: E501 self._values_reference = values_reference @property def fill_value(self): """Gets the fill_value of this NetCDFTimeseriesRain. # noqa: E501 :return: The fill_value of this NetCDFTimeseriesRain. # noqa: E501 :rtype: str """ return self._fill_value @fill_value.setter def fill_value(self, fill_value): """Sets the fill_value of this NetCDFTimeseriesRain. :param fill_value: The fill_value of this NetCDFTimeseriesRain. # noqa: E501 :type: str """ if (self.local_vars_configuration.client_side_validation and fill_value is not None and len(fill_value) > 128): raise ValueError("Invalid value for `fill_value`, length must be less than or equal to `128`") # noqa: E501 if (self.local_vars_configuration.client_side_validation and fill_value is not None and len(fill_value) < 1): raise ValueError("Invalid value for `fill_value`, length must be greater than or equal to `1`") # noqa: E501 self._fill_value = fill_value @property def units(self): """Gets the units of this NetCDFTimeseriesRain. # noqa: E501 :return: The units of this NetCDFTimeseriesRain. # noqa: E501 :rtype: str """ return self._units @units.setter def units(self, units): """Sets the units of this NetCDFTimeseriesRain. :param units: The units of this NetCDFTimeseriesRain. # noqa: E501 :type: str """ if self.local_vars_configuration.client_side_validation and units is None: # noqa: E501 raise ValueError("Invalid value for `units`, must not be `None`") # noqa: E501 allowed_values = ["mm", "mm/h"] # noqa: E501 if self.local_vars_configuration.client_side_validation and units not in allowed_values: # noqa: E501 raise ValueError( "Invalid value for `units` ({0}), must be one of {1}" # noqa: E501 .format(units, allowed_values) ) self._units = units @property def file(self): """Gets the file of this NetCDFTimeseriesRain. # noqa: E501 :return: The file of this NetCDFTimeseriesRain. # noqa: E501 :rtype: FileReadOnly """ return self._file @file.setter def file(self, file): """Sets the file of this NetCDFTimeseriesRain. :param file: The file of this NetCDFTimeseriesRain. # noqa: E501 :type: FileReadOnly """ self._file = file @property def uid(self): """Gets the uid of this NetCDFTimeseriesRain. # noqa: E501 :return: The uid of this NetCDFTimeseriesRain. # noqa: E501 :rtype: str """ return self._uid @uid.setter def uid(self, uid): """Sets the uid of this NetCDFTimeseriesRain. :param uid: The uid of this NetCDFTimeseriesRain. # noqa: E501 :type: str """ self._uid = uid def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, NetCDFTimeseriesRain): return False return self.to_dict() == other.to_dict() def __ne__(self, other): """Returns true if both objects are not equal""" if not isinstance(other, NetCDFTimeseriesRain): return True return self.to_dict() != other.to_dict()
url
ClassifierApp.tsx
/*--------------------------------------------------------------------------------------------- * Copyright (c) Bentley Systems, Incorporated. All rights reserved. * See LICENSE.md in the project root for license terms and full copyright notice. *--------------------------------------------------------------------------------------------*/ import "@bentley/icons-generic-webfont/dist/bentley-icons-generic-webfont.css"; import "common/samples-common.scss"; import { ContextRealityModelProps, ModelProps, ModelQueryParams, SpatialClassificationProps } from "@bentley/imodeljs-common"; import { ContextRealityModelState, findAvailableUnattachedRealityModels, IModelConnection, ScreenViewport, SpatialModelState, SpatialViewState, Viewport } from "@bentley/imodeljs-frontend"; import { Presentation, SelectionChangesListener } from "@bentley/presentation-frontend"; export default class
{ private static _selectionListener: SelectionChangesListener; public static removeSelectionListener() { Presentation.selection.selectionChange.removeListener(this._selectionListener); } public static addSelectionListener(listener: SelectionChangesListener) { this._selectionListener = listener; Presentation.selection.selectionChange.addListener(this._selectionListener); } public static async turnOnAvailableRealityModel(viewPort: ScreenViewport, imodel: IModelConnection) { const style = viewPort.displayStyle.clone(); // Get first available reality models and attach them to displayStyle const availableModels: ContextRealityModelProps[] = await findAvailableUnattachedRealityModels(imodel.contextId!, imodel); for (const crmProp of availableModels) { style.attachRealityModel(crmProp); viewPort.displayStyle = style; break; } } /** * Query the iModel to get available spatial classifiers. * Also do a custom sort and filtering for the purposes of this sample. */ public static async getAvailableClassifierListForViewport(vp?: Viewport): Promise<{ [key: string]: string }> { const models: { [key: string]: string } = {}; if (!vp || !(vp.view instanceof SpatialViewState)) return Promise.resolve(models); const modelQueryParams: ModelQueryParams = { from: SpatialModelState.classFullName, wantPrivate: false, }; let curModelProps: ModelProps[] = new Array<ModelProps>(); curModelProps = await vp.iModel.models.queryProps(modelQueryParams); // Custom sort to put 'Commercial' first. It makes the best default example. curModelProps = curModelProps.sort((a, b) => { if (b.name?.includes("Commercial")) return 1; if (a.name?.includes("Commercial")) return -1; return a.name!.localeCompare(b.name!); }); // Filter out models that are not classifiers and form {[key: string]: string } object for (const modelProps of curModelProps) { if (modelProps.id && modelProps.name !== "PhiladelphiaClassification" && modelProps.name !== "Philadelphia_Pictometry") { const modelId = modelProps.id; const name = modelProps.name ? modelProps.name : modelId; models[modelId] = name.substring(0, name.indexOf(",")); } } return Promise.resolve(models); } // Update the classifier in the ViewPort public static updateRealityDataClassifiers(vp: ScreenViewport, classifier: SpatialClassificationProps.Classifier) { // Get the first reality model in the view const existingRealityModels: ContextRealityModelState[] = []; vp.displayStyle.forEachRealityModel( (modelState: ContextRealityModelState) => existingRealityModels.push(modelState), ); const realityModel = existingRealityModels[0]; // Loop through all classifiers in the reality model. // If the classifier exists, update it with classifier properties // If the classifier is not found, add it to realityModel.classifiers let existingClassifier: boolean = false; if (realityModel && realityModel.classifiers) { Array.from(realityModel.classifiers).forEach((storedClassifier) => { if (classifier.name === storedClassifier.name) { existingClassifier = true; storedClassifier.name = classifier.name; storedClassifier.expand = classifier.expand; storedClassifier.flags = classifier.flags; storedClassifier.modelId = classifier.modelId; } }); if (!existingClassifier) realityModel.classifiers.push(classifier); realityModel.classifiers.active = classifier; vp.invalidateScene(); return; } } }
ClassifierApp
crt1.rs
use cortexm3::{generic_isr, nvic, systick_handler, SVC_Handler}; use setup; extern "C" { // Symbols defined in the linker file static mut _erelocate: u32; static mut _etext: u32; static mut _ezero: u32; static mut _srelocate: u32; static mut _szero: u32; fn reset_handler(); // _estack is not really a function, but it makes the types work // You should never actually invoke it!! fn _estack(); } unsafe extern "C" fn unhandled_interrupt()
unsafe extern "C" fn hard_fault_handler() { 'loop0: loop {} } #[link_section=".vectors"] #[cfg_attr(rustfmt, rustfmt_skip)] // no_mangle Ensures that the symbol is kept until the final binary #[no_mangle] pub static BASE_VECTORS: [unsafe extern fn(); 50] = [ _estack, reset_handler, unhandled_interrupt, // NMI hard_fault_handler, // Hard Fault unhandled_interrupt, // MPU fault unhandled_interrupt, // Bus fault unhandled_interrupt, // Usage fault unhandled_interrupt, // Reserved unhandled_interrupt, // Reserved unhandled_interrupt, // Reserved unhandled_interrupt, // Reserved SVC_Handler, // SVC unhandled_interrupt, // Debug monitor, unhandled_interrupt, // Reserved unhandled_interrupt, // PendSV systick_handler, // Systick generic_isr, // GPIO Int handler generic_isr, // I2C generic_isr, // RF Core Command & Packet Engine 1 generic_isr, // AON SpiSplave Rx, Tx and CS generic_isr, // AON RTC generic_isr, // UART0 Rx and Tx generic_isr, // AUX software event 0 generic_isr, // SSI0 Rx and Tx generic_isr, // SSI1 Rx and Tx generic_isr, // RF Core Command & Packet Engine 0 generic_isr, // RF Core Hardware generic_isr, // RF Core Command Acknowledge generic_isr, // I2S generic_isr, // AUX software event 1 generic_isr, // Watchdog timer generic_isr, // Timer 0 subtimer A generic_isr, // Timer 0 subtimer B generic_isr, // Timer 1 subtimer A generic_isr, // Timer 1 subtimer B generic_isr, // Timer 2 subtimer A generic_isr, // Timer 2 subtimer B generic_isr, // Timer 3 subtimer A generic_isr, // Timer 3 subtimer B generic_isr, // Crypto Core Result available generic_isr, // uDMA Software generic_isr, // uDMA Error generic_isr, // Flash controller generic_isr, // Software Event 0 generic_isr, // AUX combined event generic_isr, // AON programmable 0 generic_isr, // Dynamic Programmable interrupt // source (Default: PRCM) generic_isr, // AUX Comparator A generic_isr, // AUX ADC new sample or ADC DMA // done, ADC underflow, ADC overflow generic_isr // TRNG event ]; #[no_mangle] pub unsafe extern "C" fn init() { let mut current_block; let mut p_src: *mut u32; let mut p_dest: *mut u32; // Move the relocate segment. This assumes it is located after the text // segment, which is where the storm linker file puts it p_src = &mut _etext as (*mut u32); p_dest = &mut _srelocate as (*mut u32); if p_src != p_dest { current_block = 1; } else { current_block = 2; } 'loop1: loop { if current_block == 1 { if !(p_dest < &mut _erelocate as (*mut u32)) { current_block = 2; continue; } *{ let _old = p_dest; p_dest = p_dest.offset(1isize); _old } = *{ let _old = p_src; p_src = p_src.offset(1isize); _old }; current_block = 1; } else { p_dest = &mut _szero as (*mut u32); break; } } 'loop3: loop { if !(p_dest < &mut _ezero as (*mut u32)) { break; } *{ let _old = p_dest; p_dest = p_dest.offset(1isize); _old } = 0u32; } setup::perform(); nvic::enable_all(); }
{ 'loop0: loop {} }
export.py
# # This file is part of LiteX. # # This file is Copyright (c) 2013-2014 Sebastien Bourdeauducq <[email protected]> # This file is Copyright (c) 2014-2019 Florent Kermarrec <[email protected]> # This file is Copyright (c) 2018 Dolu1990 <[email protected]> # This file is Copyright (c) 2019 Gabriel L. Somlo <[email protected]> # This file is Copyright (c) 2018 Jean-François Nguyen <[email protected]> # This file is Copyright (c) 2019 Antmicro <www.antmicro.com> # This file is Copyright (c) 2013 Robert Jordens <[email protected]> # This file is Copyright (c) 2018 Sean Cross <[email protected]> # This file is Copyright (c) 2018 Sergiusz Bazanski <[email protected]> # This file is Copyright (c) 2018-2016 Tim 'mithro' Ansell <[email protected]> # This file is Copyright (c) 2015 whitequark <[email protected]> # This file is Copyright (c) 2018 William D. Jones <[email protected]> # This file is Copyright (c) 2020 Piotr Esden-Tempski <[email protected]> # SPDX-License-Identifier: BSD-2-Clause import os import json import time import datetime import inspect from shutil import which from sysconfig import get_platform from migen import * from litex.soc.interconnect.csr import CSRStatus from litex.build.tools import generated_banner from litex.soc.doc.rst import reflow from litex.soc.doc.module import gather_submodules, ModuleNotDocumented, DocumentedModule, DocumentedInterrupts from litex.soc.doc.csr import DocumentedCSRRegion from litex.soc.interconnect.csr import _CompoundCSR # CPU files ---------------------------------------------------------------------------------------- def get_cpu_mak(cpu, compile_software): # Select between CLANG and GCC. clang = os.getenv("CLANG", "") if clang != "": clang = bool(int(clang)) else: clang = None if cpu.clang_triple is None: if clang: raise ValueError(cpu.name + " is not supported with CLANG.") else: clang = False else: # Default to gcc unless told otherwise. if clang is None: clang = False assert isinstance(clang, bool) if clang: triple = cpu.clang_triple flags = cpu.clang_flags else: triple = cpu.gcc_triple flags = cpu.gcc_flags # Select triple when more than one. def select_triple(triple): r = None if not isinstance(triple, tuple): triple = (triple,) override = os.getenv("LITEX_ENV_CC_TRIPLE") if override: triple = (override,) + triple p = get_platform() for i in range(len(triple)): t = triple[i] # Use native toolchain if host and target platforms are the same. if t == 'riscv64-unknown-elf' and p == 'linux-riscv64': r = '--native--' break if which(t+"-gcc"): r = t break if r is None: if not compile_software: return "--not-found--" msg = "Unable to find any of the cross compilation toolchains:\n" for i in range(len(triple)): msg += "- " + triple[i] + "\n" raise OSError(msg) return r # Return informations. return [ ("TRIPLE", select_triple(triple)), ("CPU", cpu.name), ("CPUFLAGS", flags), ("CPUENDIANNESS", cpu.endianness), ("CLANG", str(int(clang))), ("CPU_DIRECTORY", os.path.dirname(inspect.getfile(cpu.__class__))), ] def get_linker_output_format(cpu): return f"OUTPUT_FORMAT(\"{cpu.linker_output_format}\")\n" def get_linker_regions(regions): r = "MEMORY {\n" for name, region in regions.items(): r += f"\t{name} : ORIGIN = 0x{region.origin:08x}, LENGTH = 0x{region.length:08x}\n" r += "}\n" return r # C Export ----------------------------------------------------------------------------------------- def get_git_header(): from litex.build.tools import get_migen_git_revision, get_litex_git_revision r = generated_banner("//") r += "#ifndef __GENERATED_GIT_H\n#define __GENERATED_GIT_H\n\n" r += f"#define MIGEN_GIT_SHA1 \"{get_migen_git_revision()}\"\n" r += f"#define LITEX_GIT_SHA1 \"{get_litex_git_revision()}\"\n" r += "#endif\n" return r def get_mem_header(regions): r = generated_banner("//") r += "#ifndef __GENERATED_MEM_H\n#define __GENERATED_MEM_H\n\n" for name, region in regions.items(): r += f"#ifndef {name.upper()}_BASE\n" r += f"#define {name.upper()}_BASE 0x{region.origin:08x}L\n" r += f"#define {name.upper()}_SIZE 0x{region.length:08x}\n" r += "#endif\n\n" r += "#ifndef MEM_REGIONS\n" r += "#define MEM_REGIONS \""; for name, region in regions.items(): r += f"{name.upper()} {' '*(8-len(name))} 0x{region.origin:08x} 0x{region.size:x} \\n" r = r[:-2] r += "\"\n" r += "#endif\n" r += "#endif\n" return r def get_soc_header(constants, with_access_functions=True): r = generated_banner("//") r += "#ifndef __GENERATED_SOC_H\n#define __GENERATED_SOC_H\n" funcs = "" for name, value in constants.items(): if value is None: r += "#define "+name+"\n" continue if isinstance(value, str): value = "\"" + value + "\"" ctype = "const char *" else: value = str(value) ctype = "int" r += "#define "+name+" "+value+"\n" if with_access_functions: funcs += "static inline "+ctype+" "+name.lower()+"_read(void) {\n" funcs += "\treturn "+value+";\n}\n" if with_access_functions: r += "\n#ifndef __ASSEMBLER__\n" r += funcs r += "#endif // !__ASSEMBLER__\n" r += "\n#endif\n" return r def _get_rw_functions_c(reg_name, reg_base, nwords, busword, alignment, read_only, with_access_functions): r = "" addr_str = f"CSR_{reg_name.upper()}_ADDR" size_str = f"CSR_{reg_name.upper()}_SIZE" r += f"#define {addr_str} (CSR_BASE + {hex(reg_base)}L)\n" r += f"#define {size_str} {nwords}\n" size = nwords*busword//8 if size > 8: # Downstream should select appropriate `csr_[rd|wr]_buf_uintX()` pair! return r elif size > 4: ctype = "uint64_t" elif size > 2: ctype = "uint32_t" elif size > 1: ctype = "uint16_t" else: ctype = "uint8_t" stride = alignment//8; if with_access_functions: r += f"static inline {ctype} {reg_name}_read(void) {{\n" if nwords > 1: r += f"\t{ctype} r = csr_read_simple(CSR_BASE + {reg_base}L);\n" for sub in range(1, nwords): r += f"\tr <<= {busword};\n" r += f"\tr |= csr_read_simple(CSR_BASE + {hex(reg_base+sub*stride)}L);\n" r += "\treturn r;\n}\n" else: r += f"\treturn csr_read_simple(CSR_BASE + {hex(reg_base)}L);\n}}\n" if not read_only: r += f"static inline void {reg_name}_write({ctype} v) {{\n" for sub in range(nwords): shift = (nwords-sub-1)*busword if shift: v_shift = "v >> {}".format(shift) else: v_shift = "v" r += f"\tcsr_write_simple({v_shift}, CSR_BASE + {hex(reg_base+sub*stride)}L);\n" r += "}\n" return r def get_csr_header(regions, constants, csr_base=None, with_access_functions=True): alignment = constants.get("CONFIG_CSR_ALIGNMENT", 32) r = generated_banner("//") if with_access_functions: # FIXME r += "#include <generated/soc.h>\n" r += "#ifndef __GENERATED_CSR_H\n#define __GENERATED_CSR_H\n" if with_access_functions: r += "#include <stdint.h>\n" r += "#include <system.h>\n" r += "#ifndef CSR_ACCESSORS_DEFINED\n" r += "#include <hw/common.h>\n" r += "#endif /* ! CSR_ACCESSORS_DEFINED */\n" csr_base = csr_base if csr_base is not None else regions[next(iter(regions))].origin r += "#ifndef CSR_BASE\n" r += f"#define CSR_BASE {hex(csr_base)}L\n" r += "#endif\n" for name, region in regions.items(): origin = region.origin - csr_base r += "\n/* "+name+" */\n" r += f"#define CSR_{name.upper()}_BASE (CSR_BASE + {hex(origin)}L)\n" if not isinstance(region.obj, Memory): for csr in region.obj: nr = (csr.size + region.busword - 1)//region.busword r += _get_rw_functions_c(name + "_" + csr.name, origin, nr, region.busword, alignment, getattr(csr, "read_only", False), with_access_functions) origin += alignment//8*nr if hasattr(csr, "fields"): for field in csr.fields.fields: offset = str(field.offset) size = str(field.size) r += f"#define CSR_{name.upper()}_{csr.name.upper()}_{field.name.upper()}_OFFSET {offset}\n" r += f"#define CSR_{name.upper()}_{csr.name.upper()}_{field.name.upper()}_SIZE {size}\n" if with_access_functions and csr.size <= 32: # FIXME: Implement extract/read functions for csr.size > 32-bit. reg_name = name + "_" + csr.name.lower() field_name = reg_name + "_" + field.name.lower() r += "static inline uint32_t " + field_name + "_extract(uint32_t oldword) {\n" r += "\tuint32_t mask = ((1 << " + size + ")-1);\n" r += "\treturn ( (oldword >> " + offset + ") & mask );\n}\n" r += "static inline uint32_t " + field_name + "_read(void) {\n" r += "\tuint32_t word = " + reg_name + "_read();\n" r += "\treturn " + field_name + "_extract(word);\n" r += "}\n" if not getattr(csr, "read_only", False): r += "static inline uint32_t " + field_name + "_replace(uint32_t oldword, uint32_t plain_value) {\n" r += "\tuint32_t mask = ((1 << " + size + ")-1);\n" r += "\treturn (oldword & (~(mask << " + offset + "))) | (mask & plain_value)<< " + offset + " ;\n}\n" r += "static inline void " + field_name + "_write(uint32_t plain_value) {\n" r += "\tuint32_t oldword = " + reg_name + "_read();\n" r += "\tuint32_t newword = " + field_name + "_replace(oldword, plain_value);\n" r += "\t" + reg_name + "_write(newword);\n" r += "}\n" r += "\n#endif\n" return r # JSON Export -------------------------------------------------------------------------------------- def get_csr_json(csr_regions={}, constants={}, mem_regions={}): alignment = constants.get("CONFIG_CSR_ALIGNMENT", 32) d = { "csr_bases": {}, "csr_registers": {}, "constants": {}, "memories": {}, } for name, region in csr_regions.items(): d["csr_bases"][name] = region.origin region_origin = region.origin if not isinstance(region.obj, Memory): for csr in region.obj: _size = (csr.size + region.busword - 1)//region.busword _type = "rw" if isinstance(csr, CSRStatus) and not hasattr(csr, "r"): _type = "ro" d["csr_registers"][name + "_" + csr.name] = { "addr": region_origin, "size": _size, "type": _type } region_origin += alignment//8*_size for name, value in constants.items(): d["constants"][name.lower()] = value.lower() if isinstance(value, str) else value for name, region in mem_regions.items(): d["memories"][name.lower()] = { "base": region.origin, "size": region.length, "type": region.type, } return json.dumps(d, indent=4) # CSV Export -------------------------------------------------------------------------------------- def get_csr_csv(csr_regions={}, constants={}, mem_regions={}): d = json.loads(get_csr_json(csr_regions, constants, mem_regions)) r = generated_banner("#") for name, value in d["csr_bases"].items(): r += "csr_base,{},0x{:08x},,\n".format(name, value) for name in d["csr_registers"].keys(): r += "csr_register,{},0x{:08x},{},{}\n".format(name, d["csr_registers"][name]["addr"], d["csr_registers"][name]["size"], d["csr_registers"][name]["type"]) for name, value in d["constants"].items(): r += "constant,{},{},,\n".format(name, value) for name in d["memories"].keys(): r += "memory_region,{},0x{:08x},{:d},{:s}\n".format(name, d["memories"][name]["base"], d["memories"][name]["size"], d["memories"][name]["type"], ) return r # SVD Export -------------------------------------------------------------------------------------- def get_csr_svd(soc, vendor="litex", name="soc", description=None): def sub_csr_bit_range(busword, csr, offset): nwords = (csr.size + busword - 1)//busword i = nwords - offset - 1 nbits = min(csr.size - i*busword, busword) - 1 name = (csr.name + str(i) if nwords > 1 else csr.name).upper() origin = i*busword return (origin, nbits, name) def print_svd_register(csr, csr_address, description, length, svd): svd.append(' <register>') svd.append(' <name>{}</name>'.format(csr.short_numbered_name)) if description is not None: svd.append(' <description><![CDATA[{}]]></description>'.format(description)) svd.append(' <addressOffset>0x{:04x}</addressOffset>'.format(csr_address)) svd.append(' <resetValue>0x{:02x}</resetValue>'.format(csr.reset_value)) svd.append(' <size>{}</size>'.format(length)) # svd.append(' <access>{}</access>'.format(csr.access)) # 'access' is a lie: "read-only" registers can legitimately change state based on a write, and is in fact used to handle the "pending" field in events csr_address = csr_address + 4 svd.append(' <fields>') if hasattr(csr, "fields") and len(csr.fields) > 0: for field in csr.fields: svd.append(' <field>') svd.append(' <name>{}</name>'.format(field.name)) svd.append(' <msb>{}</msb>'.format(field.offset + field.size - 1)) svd.append(' <bitRange>[{}:{}]</bitRange>'.format( field.offset + field.size - 1, field.offset)) svd.append(' <lsb>{}</lsb>'.format(field.offset)) svd.append(' <description><![CDATA[{}]]></description>'.format( reflow(field.description))) svd.append(' </field>') else: field_size = csr.size field_name = csr.short_name.lower() # Strip off "ev_" from eventmanager fields if field_name == "ev_enable": field_name = "enable" elif field_name == "ev_pending": field_name = "pending"
svd.append(' <name>{}</name>'.format(field_name)) svd.append(' <msb>{}</msb>'.format(field_size - 1)) svd.append(' <bitRange>[{}:{}]</bitRange>'.format(field_size - 1, 0)) svd.append(' <lsb>{}</lsb>'.format(0)) svd.append(' </field>') svd.append(' </fields>') svd.append(' </register>') interrupts = {} for csr, irq in sorted(soc.irq.locs.items()): interrupts[csr] = irq documented_regions = [] for region_name, region in soc.csr.regions.items(): documented_regions.append(DocumentedCSRRegion( name = region_name, region = region, csr_data_width = soc.csr.data_width) ) svd = [] svd.append('<?xml version="1.0" encoding="utf-8"?>') svd.append('') svd.append('<device schemaVersion="1.1" xmlns:xs="http://www.w3.org/2001/XMLSchema-instance" xs:noNamespaceSchemaLocation="CMSIS-SVD.xsd" >') svd.append(' <vendor>{}</vendor>'.format(vendor)) svd.append(' <name>{}</name>'.format(name.upper())) if description is not None: svd.append(' <description><![CDATA[{}]]></description>'.format(reflow(description))) else: fmt = "%Y-%m-%d %H:%M:%S" build_time = datetime.datetime.fromtimestamp(time.time()).strftime(fmt) svd.append(' <description><![CDATA[{}]]></description>'.format(reflow("Litex SoC " + build_time))) svd.append('') svd.append(' <addressUnitBits>8</addressUnitBits>') svd.append(' <width>32</width>') svd.append(' <size>32</size>') svd.append(' <access>read-write</access>') svd.append(' <resetValue>0x00000000</resetValue>') svd.append(' <resetMask>0xFFFFFFFF</resetMask>') svd.append('') svd.append(' <peripherals>') for region in documented_regions: csr_address = 0 svd.append(' <peripheral>') svd.append(' <name>{}</name>'.format(region.name.upper())) svd.append(' <baseAddress>0x{:08X}</baseAddress>'.format(region.origin)) svd.append(' <groupName>{}</groupName>'.format(region.name.upper())) if len(region.sections) > 0: svd.append(' <description><![CDATA[{}]]></description>'.format( reflow(region.sections[0].body()))) svd.append(' <registers>') for csr in region.csrs: description = None if hasattr(csr, "description"): description = csr.description if isinstance(csr, _CompoundCSR) and len(csr.simple_csrs) > 1: is_first = True for i in range(len(csr.simple_csrs)): (start, length, name) = sub_csr_bit_range( region.busword, csr, i) if length > 0: bits_str = "Bits {}-{} of `{}`.".format( start, start+length, csr.name) else: bits_str = "Bit {} of `{}`.".format( start, csr.name) if is_first: if description is not None: print_svd_register( csr.simple_csrs[i], csr_address, bits_str + " " + description, length, svd) else: print_svd_register( csr.simple_csrs[i], csr_address, bits_str, length, svd) is_first = False else: print_svd_register( csr.simple_csrs[i], csr_address, bits_str, length, svd) csr_address = csr_address + 4 else: length = ((csr.size + region.busword - 1) // region.busword) * region.busword print_svd_register( csr, csr_address, description, length, svd) csr_address = csr_address + 4 svd.append(' </registers>') svd.append(' <addressBlock>') svd.append(' <offset>0</offset>') svd.append(' <size>0x{:x}</size>'.format(csr_address)) svd.append(' <usage>registers</usage>') svd.append(' </addressBlock>') if region.name in interrupts: svd.append(' <interrupt>') svd.append(' <name>{}</name>'.format(region.name)) svd.append(' <value>{}</value>'.format(interrupts[region.name])) svd.append(' </interrupt>') svd.append(' </peripheral>') svd.append(' </peripherals>') svd.append(' <vendorExtensions>') if len(soc.mem_regions) > 0: svd.append(' <memoryRegions>') for region_name, region in soc.mem_regions.items(): svd.append(' <memoryRegion>') svd.append(' <name>{}</name>'.format(region_name.upper())) svd.append(' <baseAddress>0x{:08X}</baseAddress>'.format(region.origin)) svd.append(' <size>0x{:08X}</size>'.format(region.size)) svd.append(' </memoryRegion>') svd.append(' </memoryRegions>') svd.append(' <constants>') for name, value in soc.constants.items(): svd.append(' <constant name="{}" value="{}" />'.format(name, value)) svd.append(' </constants>') svd.append(' </vendorExtensions>') svd.append('</device>') return "\n".join(svd) # Memory.x Export ---------------------------------------------------------------------------------- def get_memory_x(soc): r = get_linker_regions(soc.mem_regions) r += '\n' r += 'REGION_ALIAS("REGION_TEXT", spiflash);\n' r += 'REGION_ALIAS("REGION_RODATA", spiflash);\n' r += 'REGION_ALIAS("REGION_DATA", sram);\n' r += 'REGION_ALIAS("REGION_BSS", sram);\n' r += 'REGION_ALIAS("REGION_HEAP", sram);\n' r += 'REGION_ALIAS("REGION_STACK", sram);\n\n' r += '/* CPU reset location. */\n' r += '_stext = {:#08x};\n'.format(soc.cpu.reset_address) return r
elif field_name == "ev_status": field_name = "status" svd.append(' <field>')
server.magic_generics.ts
#!/usr/bin/env node import * as debug from "debug"; import * as grpc from "@grpc/grpc-js"; import {BookServiceService, IBookServiceServer} from "./proto/book_grpc_pb"; import { Book, GetBookRequest, GetBookViaAuthor, BookList, GetBookListRequest } from "./proto/book_pb"; const log = debug("SampleServer"); type KnownKeys<T> = { [K in keyof T]: string extends K ? never : number extends K ? never : K } extends { [_ in keyof T]: infer U } ? U : never; type KnownOnly<T extends Record<any, any>> = Pick<T, KnownKeys<T>>; type ITypedBookServer = KnownOnly<IBookServiceServer>; class TypedServerOverride extends grpc.Server { public addTypedService<TypedServiceImplementation extends Record<any, any>>( service: grpc.ServiceDefinition, implementation: TypedServiceImplementation, ): void { this.addService(service, implementation); } } // tslint:disable-next-line:max-classes-per-file class ServerImpl implements ITypedBookServer { public attr: string; constructor(attr: string) { this.attr = attr; } public getBook(call: grpc.ServerUnaryCall<GetBookRequest, Book>, callback: grpc.sendUnaryData<Book>): void { const book = new Book(); book.setTitle("DefaultBook"); book.setAuthor("DefaultAuthor"); log(`[getBook] Done: ${JSON.stringify(book.toObject())}`); callback(null, book); } public getBooks(call: grpc.ServerDuplexStream<GetBookRequest, Book>): void { call.on("data", (request: GetBookRequest) => { const reply = new Book(); reply.setTitle(`Book${request.getIsbn()}`); reply.setAuthor(`Author${request.getIsbn()}`); reply.setIsbn(request.getIsbn()); log(`[getBooks] Write: ${JSON.stringify(reply.toObject())}`); call.write(reply); }); call.on("end", () => { log("[getBooks] Done."); call.end(); }); } public getBooksViaAuthor(call: grpc.ServerWritableStream<GetBookViaAuthor, Book>): void { log(`[getBooksViaAuthor] Request: ${JSON.stringify(call.request.toObject())}`); for (let i = 1; i <= 10; i++) { const reply = new Book(); reply.setTitle(`Book${i}`); reply.setAuthor(call.request.getAuthor()); reply.setIsbn(i); log(`[getBooksViaAuthor] Write: ${JSON.stringify(reply.toObject())}`); call.write(reply); }
public getGreatestBook(call: grpc.ServerReadableStream<GetBookRequest, Book>, callback: grpc.sendUnaryData<Book>): void { let lastOne: GetBookRequest; call.on("data", (request: GetBookRequest) => { log(`[getGreatestBook] Request: ${JSON.stringify(request.toObject())}`); lastOne = request; }); call.on("end", () => { const reply = new Book(); reply.setIsbn(lastOne.getIsbn()); reply.setTitle("LastOne"); reply.setAuthor("LastOne"); log(`[getGreatestBook] Done: ${JSON.stringify(reply.toObject())}`); callback(null, reply); }); } public getBookList(call: grpc.ServerUnaryCall<GetBookListRequest, BookList>, callback: grpc.sendUnaryData<BookList>) { const author = call.request.getAuthor(); const books = new BookList(); const book1 = new Book(); book1.setTitle("DefaultBook1").setAuthor(author); const book2 = new Book(); book2.setTitle("DefaultBook2").setAuthor(author); books.addBooks(book1); books.addBooks(book2); log(`[getBookList] Done: 1: ${JSON.stringify(book1.toObject())}, 2: ${JSON.stringify(book2.toObject())}`); callback(null, books); } } function startServer() { const server = new TypedServerOverride(); server.addTypedService<ITypedBookServer>(BookServiceService, new ServerImpl("hello world")); server.bindAsync("127.0.0.1:50051", grpc.ServerCredentials.createInsecure(), (err, port) => { if (err) { throw err; } log(`Server started, listening: 127.0.0.1:${port}`); server.start(); }); } startServer(); process.on("uncaughtException", (err) => { log(`process on uncaughtException error: ${err}`); }); process.on("unhandledRejection", (err) => { log(`process on unhandledRejection error: ${err}`); });
log("[getBooksViaAuthor] Done."); call.end(); }
starvation.rs
#![feature(test)] mod starvation { use crossbeam_utils::thread; use swym::{tcell::TCell, thread_key}; #[test] fn large_tx()
}
{ const CONTENDED_IDX: usize = 0; const TX_SIZE: usize = 50_000; let data = unsafe { &mut *Box::into_raw(Box::new(Vec::new())) }; for _ in 0..TX_SIZE { data.push(TCell::new(0)); } let data = &*data; // abort if test lasts too long (failure) std::thread::spawn(move || { std::thread::sleep(std::time::Duration::from_secs(10)); std::process::abort(); }); // thread that starves other threads std::thread::spawn(move || loop { thread_key::get().rw(|tx| { data[CONTENDED_IDX].set(tx, 0)?; Ok(()) }) }); // rw thread::scope(|s| { // starving thread s.spawn(|_| { thread_key::get().rw(|tx| { for i in 0..TX_SIZE { data[i].set(tx, 0)?; } Ok(()) }) }); }) .unwrap(); swym::stats::print_stats(); // read thread::scope(|s| { // starving thread s.spawn(|_| { thread_key::get().read(|tx| { // read the contended variable last for i in (0..TX_SIZE).rev() { drop(data[i].get(tx, Default::default())?); } Ok(()) }) }); }) .unwrap(); swym::stats::print_stats(); // check for gc deadlock let string = TCell::new("blah blah".to_owned()); let other = TCell::new(0); thread::scope(|s| { s.spawn(|_| { thread_key::get().rw(|tx| { // block gc std::thread::sleep(std::time::Duration::from_millis(1_000)); // causes a park before commit other.set(tx, 0)?; Ok(()) }) }); // starving thread s.spawn(|_| { // run enough times that gc happens for _ in 0..128 { thread_key::get().rw(|tx| { // doom the transaction drop(data[CONTENDED_IDX].get(tx, Default::default())?); std::thread::sleep(std::time::Duration::from_millis(1)); // create work for the gc string.set(tx, "blah".to_owned())?; Ok(()) }) } }); }) .unwrap(); swym::stats::print_stats(); }
iterators3.rs
// iterators3.rs // This is a bigger exercise than most of the others! You can do it! // Here is your mission, should you choose to accept it: // 1. Complete the divide function to get the first four tests to pass // 2. Uncomment the last two tests and get them to pass by filling in // values for `x` using `division_results`. // Execute `rustlings hint iterators3` to get some hints! // Have fun :-) #[derive(Debug, PartialEq, Eq)] pub enum DivisionError { NotDivisible(NotDivisibleError), DivideByZero, } #[derive(Debug, PartialEq, Eq)] pub struct NotDivisibleError { dividend: i32, divisor: i32, } // This function should calculate `a` divided by `b` if `a` is // evenly divisible by b. // Otherwise, it should return a suitable error. pub fn divide(a: i32, b: i32) -> Result<i32, DivisionError> { if b == 0
if a % b != 0 { return Err(DivisionError::NotDivisible(NotDivisibleError { dividend: a, divisor: b, })); } Ok(a / b) } #[cfg(test)] mod tests { use super::*; // Tests that verify your `divide` function implementation #[test] fn test_success() { assert_eq!(divide(81, 9), Ok(9)); } #[test] fn test_not_divisible() { assert_eq!( divide(81, 6), Err(DivisionError::NotDivisible(NotDivisibleError { dividend: 81, divisor: 6 })) ); } #[test] fn test_divide_by_0() { assert_eq!(divide(81, 0), Err(DivisionError::DivideByZero)); } #[test] fn test_divide_0_by_something() { assert_eq!(divide(0, 81), Ok(0)); } // Iterator exercises using your `divide` function #[test] fn result_with_list() { let numbers = vec![27, 297, 38502, 81]; let division_results = numbers.into_iter().map(|n| divide(n, 27)); let x: Result<Vec<i32>, DivisionError> = division_results.collect(); assert_eq!(format!("{:?}", x), "Ok([1, 11, 1426, 3])"); } #[test] fn list_of_results() { let numbers = vec![27, 297, 38502, 81]; let division_results = numbers.into_iter().map(|n| divide(n, 27)); let x: Vec<Result<i32, DivisionError>> = division_results.collect(); assert_eq!(format!("{:?}", x), "[Ok(1), Ok(11), Ok(1426), Ok(3)]"); } }
{ return Err(DivisionError::DivideByZero); }
file.rs
use super::{DirsAndFileName, ObjectStorePath, PathPart}; use std::{mem, path::PathBuf}; /// An object storage location suitable for passing to disk based object /// storage. #[derive(Debug, Clone, Default, PartialEq, Eq)] pub struct FilePath { inner: FilePathRepresentation, } impl ObjectStorePath for FilePath { fn set_file_name(&mut self, part: impl Into<String>) { self.inner = mem::take(&mut self.inner).set_file_name(part); } fn push_dir(&mut self, part: impl Into<String>) { self.inner = mem::take(&mut self.inner).push_dir(part); } fn push_all_dirs<'a>(&mut self, parts: impl AsRef<[&'a str]>) { self.inner = mem::take(&mut self.inner).push_all_dirs(parts); } fn display(&self) -> String { self.to_raw().display().to_string() } } impl Ord for FilePath { fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.inner.cmp(&other.inner) } } impl PartialOrd for FilePath { fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { Some(self.cmp(other)) } } impl FilePath { /// Creates a file storage location from a `PathBuf` without parsing or /// allocating unless other methods are called on this instance that /// need it pub fn raw(path: impl Into<PathBuf>) -> Self { let path = path.into(); Self { inner: FilePathRepresentation::Raw(path), } } /// Creates a filesystem `PathBuf` location by using the standard library's /// `PathBuf` building implementation appropriate for the current /// platform. pub fn to_raw(&self) -> PathBuf { use FilePathRepresentation::*; match &self.inner { Raw(path) => path.to_owned(), Parsed(dirs_and_file_name) => { let mut path: PathBuf = dirs_and_file_name .directories .iter() .map(PathPart::encoded) .collect(); if let Some(file_name) = &dirs_and_file_name.file_name { path.push(file_name.encoded()); } path } } } /// Add the parts of `path` to the end of this path. Notably does /// *not* behave as `PathBuf::push` does: there is no way to replace the /// root. If `self` has a file name, that will be removed, then the /// directories of `path` will be appended, then any file name of `path` /// will be assigned to `self`. pub fn push_path(&mut self, path: &Self) { self.inner = mem::take(&mut self.inner).push_path(path) } /// Add a `PathPart` to the end of the path's directories. pub fn push_part_as_dir(&mut self, part: &PathPart) { self.inner = mem::take(&mut self.inner).push_part_as_dir(part); } /// Whether the prefix is the start of this path or not. pub fn prefix_matches(&self, prefix: &Self) -> bool { self.inner.prefix_matches(&prefix.inner) } /// Returns all directory and file name `PathParts` in `self` after the /// specified `prefix`. Ignores any `file_name` part of `prefix`. /// Returns `None` if `self` dosen't start with `prefix`. pub fn parts_after_prefix(&self, prefix: &Self) -> Option<Vec<PathPart>> { self.inner.parts_after_prefix(&prefix.inner) } /// Remove this path's file name, if there is one. pub fn unset_file_name(&mut self) { self.inner = mem::take(&mut self.inner).unset_file_name(); } } impl From<FilePath> for DirsAndFileName { fn from(file_path: FilePath) -> Self { file_path.inner.into() } } impl From<DirsAndFileName> for FilePath { fn from(dirs_and_file_name: DirsAndFileName) -> Self { Self { inner: FilePathRepresentation::Parsed(dirs_and_file_name), } } } #[derive(Debug, Clone, Eq)] enum FilePathRepresentation { Raw(PathBuf), Parsed(DirsAndFileName), } impl Default for FilePathRepresentation { fn default() -> Self { Self::Parsed(DirsAndFileName::default()) } } impl PartialEq for FilePathRepresentation { fn eq(&self, other: &Self) -> bool { use FilePathRepresentation::*; match (self, other) { (Parsed(self_parts), Parsed(other_parts)) => self_parts == other_parts, (Parsed(self_parts), _) => { let other_parts: DirsAndFileName = other.to_owned().into(); *self_parts == other_parts } (_, Parsed(other_parts)) => { let self_parts: DirsAndFileName = self.to_owned().into(); self_parts == *other_parts } _ => { let self_parts: DirsAndFileName = self.to_owned().into(); let other_parts: DirsAndFileName = other.to_owned().into(); self_parts == other_parts } } } } impl PartialOrd for FilePathRepresentation { fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { Some(self.cmp(other)) } } impl Ord for FilePathRepresentation { fn cmp(&self, other: &Self) -> std::cmp::Ordering { use FilePathRepresentation::*; match (self, other) { (Parsed(self_parts), Parsed(other_parts)) => self_parts.cmp(other_parts), (Parsed(self_parts), _) => { let other_parts: DirsAndFileName = other.to_owned().into(); self_parts.cmp(&other_parts) } (_, Parsed(other_parts)) => { let self_parts: DirsAndFileName = self.to_owned().into(); self_parts.cmp(other_parts) } _ => { let self_parts: DirsAndFileName = self.to_owned().into(); let other_parts: DirsAndFileName = other.to_owned().into(); self_parts.cmp(&other_parts) } } } } impl FilePathRepresentation { fn push_dir(self, part: impl Into<String>) -> Self { let mut dirs_and_file_name: DirsAndFileName = self.into(); dirs_and_file_name.push_dir(part); Self::Parsed(dirs_and_file_name) } fn push_all_dirs<'a>(self, parts: impl AsRef<[&'a str]>) -> Self { let mut dirs_and_file_name: DirsAndFileName = self.into(); dirs_and_file_name.push_all_dirs(parts); Self::Parsed(dirs_and_file_name) } fn set_file_name(self, part: impl Into<String>) -> Self { let mut dirs_and_file_name: DirsAndFileName = self.into(); dirs_and_file_name.set_file_name(part); Self::Parsed(dirs_and_file_name) } fn unset_file_name(self) -> Self { let mut dirs_and_file_name: DirsAndFileName = self.into(); dirs_and_file_name.unset_file_name(); Self::Parsed(dirs_and_file_name) } /// Add the parts of `path` to the end of this path. Notably does /// *not* behave as `PathBuf::push` does: there is no way to replace the /// root. If `self` has a file name, that will be removed, then the /// directories of `path` will be appended, then any file name of `path` /// will be assigned to `self`. fn push_path(self, path: &FilePath) -> Self { let DirsAndFileName { directories: path_dirs, file_name: path_file_name, } = path.inner.to_owned().into(); let mut dirs_and_file_name: DirsAndFileName = self.into(); dirs_and_file_name.directories.extend(path_dirs); dirs_and_file_name.file_name = path_file_name; Self::Parsed(dirs_and_file_name) } /// Add a `PathPart` to the end of the path's directories. fn push_part_as_dir(self, part: &PathPart) -> Self { let mut dirs_and_file_name: DirsAndFileName = self.into(); dirs_and_file_name.push_part_as_dir(part); Self::Parsed(dirs_and_file_name) } fn prefix_matches(&self, prefix: &Self) -> bool { use FilePathRepresentation::*; match (self, prefix) { (Parsed(self_parts), Parsed(prefix_parts)) => self_parts.prefix_matches(prefix_parts), (Parsed(self_parts), _) => { let prefix_parts: DirsAndFileName = prefix.to_owned().into(); self_parts.prefix_matches(&prefix_parts) } (_, Parsed(prefix_parts)) => { let self_parts: DirsAndFileName = self.to_owned().into(); self_parts.prefix_matches(prefix_parts) } _ =>
} } /// Returns all directory and file name `PathParts` in `self` after the /// specified `prefix`. Ignores any `file_name` part of `prefix`. /// Returns `None` if `self` dosen't start with `prefix`. fn parts_after_prefix(&self, prefix: &Self) -> Option<Vec<PathPart>> { use FilePathRepresentation::*; match (self, prefix) { (Parsed(self_parts), Parsed(prefix_parts)) => { self_parts.parts_after_prefix(prefix_parts) } (Parsed(self_parts), _) => { let prefix_parts: DirsAndFileName = prefix.to_owned().into(); self_parts.parts_after_prefix(&prefix_parts) } (_, Parsed(prefix_parts)) => { let self_parts: DirsAndFileName = self.to_owned().into(); self_parts.parts_after_prefix(prefix_parts) } _ => { let self_parts: DirsAndFileName = self.to_owned().into(); let prefix_parts: DirsAndFileName = prefix.to_owned().into(); self_parts.parts_after_prefix(&prefix_parts) } } } } impl From<FilePathRepresentation> for DirsAndFileName { fn from(file_path_rep: FilePathRepresentation) -> Self { use FilePathRepresentation::*; match file_path_rep { Raw(path) => { let mut parts: Vec<PathPart> = path .iter() .flat_map(|s| s.to_os_string().into_string().map(PathPart)) .collect(); let maybe_file_name = match parts.pop() { Some(file) if !file.encoded().starts_with('.') && (file.encoded().ends_with(".json") || file.encoded().ends_with(".parquet") || file.encoded().ends_with(".segment")) => { Some(file) } Some(dir) => { parts.push(dir); None } None => None, }; Self { directories: parts, file_name: maybe_file_name, } } Parsed(dirs_and_file_name) => dirs_and_file_name, } } } #[cfg(test)] mod tests { use super::*; #[test] fn path_buf_to_dirs_and_file_name_conversion() { // Last section ending in `.json` is a file name let path_buf: PathBuf = "/one/two/blah.json".into(); let file_path = FilePath::raw(path_buf); let parts: DirsAndFileName = file_path.into(); assert_eq!(parts.directories.len(), 3); assert_eq!(parts.file_name.unwrap().0, "blah.json"); // Last section ending in `.segment` is a file name let path_buf: PathBuf = "/one/two/blah.segment".into(); let file_path = FilePath::raw(path_buf); let parts: DirsAndFileName = file_path.into(); assert_eq!(parts.directories.len(), 3); assert_eq!(parts.file_name.unwrap().0, "blah.segment"); // Last section ending in `.parquet` is a file name let path_buf: PathBuf = "/one/two/blah.parquet".into(); let file_path = FilePath::raw(path_buf); let parts: DirsAndFileName = file_path.into(); assert_eq!(parts.directories.len(), 3); assert_eq!(parts.file_name.unwrap().0, "blah.parquet"); // Last section ending in `.txt` is NOT a file name; we don't recognize that // extension let path_buf: PathBuf = "/one/two/blah.txt".into(); let file_path = FilePath::raw(path_buf); let parts: DirsAndFileName = file_path.into(); assert_eq!(parts.directories.len(), 4); assert!(parts.file_name.is_none()); // Last section containing a `.` isn't a file name let path_buf: PathBuf = "/one/two/blah.blah".into(); let file_path = FilePath::raw(path_buf); let parts: DirsAndFileName = file_path.into(); assert_eq!(parts.directories.len(), 4); assert!(parts.file_name.is_none()); // Last section starting with a `.` isn't a file name (macos temp dirs do this) let path_buf: PathBuf = "/one/two/.blah".into(); let file_path = FilePath::raw(path_buf); let parts: DirsAndFileName = file_path.into(); assert_eq!(parts.directories.len(), 4); assert!(parts.file_name.is_none()); } #[test] fn conversions() { // dir and file name let path_buf: PathBuf = "foo/bar/blah.json".into(); let file_path = FilePath::raw(path_buf); let parts: DirsAndFileName = file_path.into(); let mut expected_parts = DirsAndFileName::default(); expected_parts.push_dir("foo"); expected_parts.push_dir("bar"); expected_parts.file_name = Some("blah.json".into()); assert_eq!(parts, expected_parts); // dir, no file name let path_buf: PathBuf = "foo/bar".into(); let file_path = FilePath::raw(path_buf); let parts: DirsAndFileName = file_path.into(); expected_parts.file_name = None; assert_eq!(parts, expected_parts); // no dir, file name let path_buf: PathBuf = "blah.json".into(); let file_path = FilePath::raw(path_buf); let parts: DirsAndFileName = file_path.into(); assert!(parts.directories.is_empty()); assert_eq!(parts.file_name.unwrap().encoded(), "blah.json"); // empty let path_buf: PathBuf = "".into(); let file_path = FilePath::raw(path_buf); let parts: DirsAndFileName = file_path.into(); assert!(parts.directories.is_empty()); assert!(parts.file_name.is_none()); } #[test] fn equality() { let path_buf: PathBuf = "foo/bar/blah.json".into(); let file_path = FilePath::raw(path_buf); let parts: DirsAndFileName = file_path.clone().into(); let parsed: FilePath = parts.into(); assert_eq!(file_path, parsed); } #[test] fn ordering() { let a_path_buf: PathBuf = "foo/bar/a.json".into(); let a_file_path = FilePath::raw(&a_path_buf); let a_parts: DirsAndFileName = a_file_path.into(); let a_parsed: FilePath = a_parts.into(); let b_path_buf: PathBuf = "foo/bar/b.json".into(); let b_file_path = FilePath::raw(&b_path_buf); assert!(a_path_buf < b_path_buf); assert!( a_parsed < b_file_path, "a was not less than b: a = {:#?}\nb = {:#?}", a_parsed, b_file_path ); } #[test] fn path_display() { let a_path_buf: PathBuf = "foo/bar/a.json".into(); let expected_display = a_path_buf.display().to_string(); let a_file_path = FilePath::raw(&a_path_buf); assert_eq!(a_file_path.display(), expected_display); let a_parts: DirsAndFileName = a_file_path.into(); let a_parsed: FilePath = a_parts.into(); assert_eq!(a_parsed.display(), expected_display); } }
{ let self_parts: DirsAndFileName = self.to_owned().into(); let prefix_parts: DirsAndFileName = prefix.to_owned().into(); self_parts.prefix_matches(&prefix_parts) }
parse-int-x.min.js
!function(f){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=f();else if("function"==typeof define&&define.amd)define([],f);else{("undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this).returnExports=f()}}(function(){return function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a="function"==typeof require&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n||e)},l,l.exports,e,t,n,r)}return n[o].exports}for(var i="function"==typeof require&&require,o=0;o<r.length;o++)s(r[o]);return s}({1:[function(_dereq_,module,exports){/** * @file Parses a string argument and returns an integer of the specified radix. * @version 2.0.0 * @author Xotic750 <[email protected]> * @copyright Xotic750 * @license {@link <https://opensource.org/licenses/MIT> MIT} * @module parse-int-x */ "use strict";var nativeParseInt=parseInt,NAN=_dereq_("nan-x"),toStr=_dereq_("to-string-x"),trimLeft2016=_dereq_("trim-left-x").trimLeft2016,trimLeft2018=_dereq_("trim-left-x").trimLeft2018,chachedCtrs=_dereq_("cached-constructors-x"),castNumber=chachedCtrs.Number,charAt=chachedCtrs.String.prototype.charAt,hexRegex=/^[-+]?0[xX]/,test=hexRegex.test,$parseInt2018=function parseInt2018(string,radix){var str=trimLeft2018(toStr(string));return"\u180e"===charAt.call(str,0)?NAN:nativeParseInt(str,castNumber(radix)||(test.call(hexRegex,str)?16:10))};module.exports={parseInt:$parseInt2018,parseInt2016:function parseInt2016(string,radix){var str=trimLeft2016(toStr(string));return nativeParseInt(str,castNumber(radix)||(test.call(hexRegex,str)?16:10))},parseInt2018:$parseInt2018}},{"cached-constructors-x":2,"nan-x":6,"to-string-x":9,"trim-left-x":10}],2:[function(_dereq_,module,exports){/** * @file Constructors cached from literals. * @version 1.0.0 * @author Xotic750 <[email protected]> * @copyright Xotic750 * @license {@link <https://opensource.org/licenses/MIT> MIT} * @module cached-constructors-x */ "use strict";module.exports={Array:[].constructor,Boolean:(!0).constructor,Number:(0).constructor,Object:{}.constructor,RegExp:/(?:)/.constructor,String:"".constructor}},{}],3:[function(_dereq_,module,exports){/** * @file Checks if `value` is `null` or `undefined`. * @version 1.4.1 * @author Xotic750 <[email protected]> * @copyright Xotic750 * @license {@link <https://opensource.org/licenses/MIT> MIT} * @module is-nil-x */ "use strict";var isUndefined=_dereq_("validate.io-undefined"),isNull=_dereq_("lodash.isnull");module.exports=function isNil(value){return isNull(value)||isUndefined(value)}},{"lodash.isnull":5,"validate.io-undefined":11}],4:[function(_dereq_,module,exports){"use strict";var toStr=Object.prototype.toString;if("function"==typeof Symbol&&"symbol"==typeof Symbol()){var symToStr=Symbol.prototype.toString,symStringRegex=/^Symbol\(.*\)$/,isSymbolObject=function isSymbolObject(value){return"symbol"==typeof value.valueOf()&&symStringRegex.test(symToStr.call(value))};module.exports=function isSymbol(value){if("symbol"==typeof value)return!0;if("[object Symbol]"!==toStr.call(value))return!1;try{return isSymbolObject(value)}catch(e){return!1}}}else module.exports=function isSymbol(value){return!1}},{}],5:[function(_dereq_,module,exports){module.exports=function isNull(value){return null===value}},{}],6:[function(_dereq_,module,exports){/** * @file The constant NaN derived mathematically by 0 / 0. * @version 1.0.0 * @author Xotic750 <[email protected]> * @copyright Xotic750 * @license {@link <https://opensource.org/licenses/MIT> MIT} * @module nan-x */ "use strict";module.exports=NaN},{}],7:[function(_dereq_,module,exports){/** * @file Requires an argument is corecible then converts using ToString. * @version 1.0.0 * @author Xotic750 <[email protected]> * @copyright Xotic750 * @license {@link <https://opensource.org/licenses/MIT> MIT} * @module require-coercible-to-string-x */ "use strict";var requireObjectCoercible=_dereq_("require-object-coercible-x"),toStr=_dereq_("to-string-x");module.exports=function requireCoercibleToString(value){return toStr(requireObjectCoercible(value))}},{"require-object-coercible-x":8,"to-string-x":9}],8:[function(_dereq_,module,exports){/** * @file ES6-compliant shim for RequireObjectCoercible. * @see {@link http://www.ecma-international.org/ecma-262/6.0/#sec-requireobjectcoercible|7.2.1 RequireObjectCoercible ( argument )} * @version 1.4.1 * @author Xotic750 <[email protected]> * @copyright Xotic750 * @license {@link <https://opensource.org/licenses/MIT> MIT} * @module require-object-coercible-x */ "use strict";var isNil=_dereq_("is-nil-x");module.exports=function RequireObjectCoercible(value){if(isNil(value))throw new TypeError("Cannot call method on "+value);return value}},{"is-nil-x":3}],9:[function(_dereq_,module,exports){/** * @file ES6-compliant shim for ToString. * @see {@link http://www.ecma-international.org/ecma-262/6.0/#sec-tostring|7.1.12 ToString ( argument )} * @version 1.4.2
* @copyright Xotic750 * @license {@link <https://opensource.org/licenses/MIT> MIT} * @module to-string-x */ "use strict";var castString="".constructor,isSymbol=_dereq_("is-symbol");module.exports=function ToString(value){if(isSymbol(value))throw new TypeError("Cannot convert a Symbol value to a string");return castString(value)}},{"is-symbol":4}],10:[function(_dereq_,module,exports){/** * @file This method removes whitespace from the left end of a string. * @version 3.0.0 * @author Xotic750 <[email protected]> * @copyright Xotic750 * @license {@link <https://opensource.org/licenses/MIT> MIT} * @module trim-left-x */ "use strict";var requireCoercibleToString=_dereq_("require-coercible-to-string-x"),Rx=_dereq_("cached-constructors-x").RegExp,reLeft2016=new Rx("^["+_dereq_("white-space-x").string2016+"]+"),reLeft2018=new Rx("^["+_dereq_("white-space-x").string2018+"]+"),replace="".replace,$trimLeft2018=function trimLeft2018(string){return replace.call(requireCoercibleToString(string),reLeft2018,"")};module.exports={trimLeft:$trimLeft2018,trimLeft2016:function trimLeft2016(string){return replace.call(requireCoercibleToString(string),reLeft2016,"")},trimLeft2018:$trimLeft2018}},{"cached-constructors-x":2,"require-coercible-to-string-x":7,"white-space-x":12}],11:[function(_dereq_,module,exports){"use strict";module.exports=function isUndefined(value){return void 0===value}},{}],12:[function(_dereq_,module,exports){/** * @file List of ECMAScript white space characters. * @version 3.0.0 * @author Xotic750 <[email protected]> * @copyright Xotic750 * @license {@link <https://opensource.org/licenses/MIT> MIT} * @module white-space-x */ "use strict";for(var list=[{code:9,description:"Tab",es5:!0,es2015:!0,es2016:!0,es2017:!0,es2018:!0,string:"\t"},{code:10,description:"Line Feed",es5:!0,es2015:!0,es2016:!0,es2017:!0,es2018:!0,string:"\n"},{code:11,description:"Vertical Tab",es5:!0,es2015:!0,es2016:!0,es2017:!0,es2018:!0,string:"\x0B"},{code:12,description:"Form Feed",es5:!0,es2015:!0,es2016:!0,es2017:!0,es2018:!0,string:"\f"},{code:13,description:"Carriage Return",es5:!0,es2015:!0,es2016:!0,es2017:!0,es2018:!0,string:"\r"},{code:32,description:"Space",es5:!0,es2015:!0,es2016:!0,es2017:!0,es2018:!0,string:" "},{code:160,description:"No-break space",es5:!0,es2015:!0,es2016:!0,es2017:!0,es2018:!0,string:"\xa0"},{code:5760,description:"Ogham space mark",es5:!0,es2015:!0,es2016:!0,es2017:!0,es2018:!0,string:"\u1680"},{code:6158,description:"Mongolian vowel separator",es5:!0,es2015:!0,es2016:!0,es2017:!1,es2018:!1,string:"\u180e"},{code:8192,description:"En quad",es5:!0,es2015:!0,es2016:!0,es2017:!0,es2018:!0,string:"\u2000"},{code:8193,description:"Em quad",es5:!0,es2015:!0,es2016:!0,es2017:!0,es2018:!0,string:"\u2001"},{code:8194,description:"En space",es5:!0,es2015:!0,es2016:!0,es2017:!0,es2018:!0,string:"\u2002"},{code:8195,description:"Em space",es5:!0,es2015:!0,es2016:!0,es2017:!0,es2018:!0,string:"\u2003"},{code:8196,description:"Three-per-em space",es5:!0,es2015:!0,es2016:!0,es2017:!0,es2018:!0,string:"\u2004"},{code:8197,description:"Four-per-em space",es5:!0,es2015:!0,es2016:!0,es2017:!0,es2018:!0,string:"\u2005"},{code:8198,description:"Six-per-em space",es5:!0,es2015:!0,es2016:!0,es2017:!0,es2018:!0,string:"\u2006"},{code:8199,description:"Figure space",es5:!0,es2015:!0,es2016:!0,es2017:!0,es2018:!0,string:"\u2007"},{code:8200,description:"Punctuation space",es5:!0,es2015:!0,es2016:!0,es2017:!0,es2018:!0,string:"\u2008"},{code:8201,description:"Thin space",es5:!0,es2015:!0,es2016:!0,es2017:!0,es2018:!0,string:"\u2009"},{code:8202,description:"Hair space",es5:!0,es2015:!0,es2016:!0,es2017:!0,es2018:!0,string:"\u200a"},{code:8232,description:"Line separator",es5:!0,es2015:!0,es2016:!0,es2017:!0,es2018:!0,string:"\u2028"},{code:8233,description:"Paragraph separator",es5:!0,es2015:!0,es2016:!0,es2017:!0,es2018:!0,string:"\u2029"},{code:8239,description:"Narrow no-break space",es5:!0,es2015:!0,es2016:!0,es2017:!0,es2018:!0,string:"\u202f"},{code:8287,description:"Medium mathematical space",es5:!0,es2015:!0,es2016:!0,es2017:!0,es2018:!0,string:"\u205f"},{code:12288,description:"Ideographic space",es5:!0,es2015:!0,es2016:!0,es2017:!0,es2018:!0,string:"\u3000"},{code:65279,description:"Byte Order Mark",es5:!0,es2015:!0,es2016:!0,es2017:!0,es2018:!0,string:"\ufeff"}],stringES2016="",stringES2018="",length=list.length,i=0;i<length;i+=1)list[i].es2016&&(stringES2016+=list[i].string),list[i].es2018&&(stringES2018+=list[i].string);module.exports={list:list,string:stringES2018,string5:stringES2016,string2015:stringES2016,string2016:stringES2016,string2017:stringES2018,string2018:stringES2018}},{}]},{},[1])(1)});
* @author Xotic750 <[email protected]>
read_msp.py
from DeepEI.utils import ms2vec, get_cdk_fingerprints, get_cdk_descriptors from matchms.importing import load_from_msp import json import numpy as np from scipy.sparse import csr_matrix, save_npz from rdkit import Chem from rdkit.Chem import AllChem from rdkit.Chem.rdMolDescriptors import CalcExactMolWt # incompatible with DeepEI/utils.py #from pycdk.pycdk import MolFromSmiles, parser_formula, MolToFormula from concurrent.futures import ProcessPoolExecutor import os from argparse import ArgumentParser p = ArgumentParser() p.add_argument('--ncores','-n',type=int,help='number of cores',default=1) p.add_argument('--dest','-d',type=str,help='destination directory',default='.') p.add_argument('infile',type=str,help='input file') args = p.parse_args() file_msp = args.infile ncores = args.ncores dest = args.dest if not os.path.isdir(dest): print(f"{dest} does not exist") exit(1) def process_mol(nm): n,m = nm try: osmiles = m.get('smiles') mol = Chem.MolFromSmiles(osmiles) name = m.get('name') peakindex = m.peaks.mz peakintensity = m.peaks.intensities molwt = CalcExactMolWt(mol) if molwt > 2000: return {} smiles = Chem.MolToSmiles(mol) # XXX: pycdk # elements = parser_formula(MolToFormula(MolFromSmiles(smiles))) # for e in elements: # if e not in ['C', 'H', 'O', 'N', 'S', 'P', 'Si', 'F', 'Cl', 'Br', 'I']: # print(f"{osmiles}: uncommon element {e}, skipping") # return {} morgan_fp = np.array(AllChem.GetMorganFingerprintAsBitVect(mol, 2, nBits=4096)) cdk_fp = get_cdk_fingerprints(smiles) cdk_des = np.array(get_cdk_descriptors(smiles)) # XXX # ri = list(m['RI'].values()) peak_vec = ms2vec(peakindex,peakintensity) print(f"{n}:{osmiles}: done") return { 'smiles': smiles, 'name': name, 'peak_vec': peak_vec, # 'ri': ri, 'morgan_fp': morgan_fp, 'cdk_fp': cdk_fp, 'cdk_des': cdk_des, 'molwt': molwt, } except BaseException as e: print(f"{osmiles}: {e}") return {} print(f"Loading {file_msp}...") all_mol = load_from_msp(file_msp) print("done") with ProcessPoolExecutor(max_workers=ncores) as pool: all_output = pool.map(process_mol, enumerate(all_mol)) # filter out empty entries all_output = list(filter(lambda x: x,all_output)) all_smiles = list(map(lambda x: x['smiles'], all_output)) Peak_data = np.array(list(map(lambda x: x['peak_vec'], all_output)))
# RI_data = map(lambda x: x['smiles'], all_output) Morgan_fp = np.array(list(map(lambda x: x['morgan_fp'], all_output))) CDK_fp = np.array(list(map(lambda x: x['cdk_fp'], all_output))) CDK_des = np.array(list(map(lambda x: x['cdk_des'], all_output))) MolWt = np.array(list(map(lambda x: x['molwt'], all_output))) print("writing output ...") os.chdir(dest) # np.save('retention.npy', np.array(RI_data)) np.save('descriptor.npy', CDK_des) np.save('molwt.npy', MolWt) Peak_data = csr_matrix(Peak_data) Morgan_fp = csr_matrix(Morgan_fp) CDK_fp = csr_matrix(CDK_fp) save_npz('peakvec.npz', Peak_data) save_npz('morgan.npz', Morgan_fp) save_npz('fingerprints.npz', CDK_fp) with open('all_smiles.json', 'w') as t: json.dump(all_smiles, t) print("done")
main_test.go
package rdb import ( "testing" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" ) func
(t *testing.T) { RegisterFailHandler(Fail) RunSpecs(t, "rdb") }
Test
create_test.go
// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package user_test import ( "context" "net/http/httptest" "net/url" "strconv" "strings" "testing" "time" "github.com/chromedp/chromedp" "github.com/gorilla/sessions" "github.com/google/exposure-notifications-verification-server/internal/browser" "github.com/google/exposure-notifications-verification-server/internal/envstest" "github.com/google/exposure-notifications-verification-server/internal/project" "github.com/google/exposure-notifications-verification-server/pkg/controller" userpkg "github.com/google/exposure-notifications-verification-server/pkg/controller/user" "github.com/google/exposure-notifications-verification-server/pkg/database" "github.com/google/exposure-notifications-verification-server/pkg/rbac" "github.com/google/exposure-notifications-verification-server/pkg/render" ) func TestHandleCreate(t *testing.T) { t.Parallel() ctx := project.TestContext(t) harness := envstest.NewServer(t, testDatabaseInstance) realm, admin, session, err := harness.ProvisionAndLogin() if err != nil { t.Fatal(err) } cookie, err := harness.SessionCookie(session) if err != nil { t.Fatal(err) } t.Run("middleware", func(t *testing.T) { t.Parallel() h, err := render.New(ctx, envstest.ServerAssetsPath(), true) if err != nil { t.Fatal(err) } c := userpkg.New(harness.AuthProvider, harness.Cacher, harness.Database, h) handler := c.HandleCreate() envstest.ExerciseSessionMissing(t, handler) envstest.ExerciseMembershipMissing(t, handler) envstest.ExercisePermissionMissing(t, handler) }) t.Run("internal_error", func(t *testing.T) { t.Parallel() harness := envstest.NewServerConfig(t, testDatabaseInstance) harness.Database.SetRawDB(envstest.NewFailingDatabase()) h, err := render.New(ctx, envstest.ServerAssetsPath(), true) if err != nil { t.Fatal(err) } c := userpkg.New(harness.AuthProvider, harness.Cacher, harness.Database, h) handler := c.HandleCreate() ctx := ctx ctx = controller.WithSession(ctx, &sessions.Session{}) ctx = controller.WithMembership(ctx, &database.Membership{ Realm: realm,
}) r := httptest.NewRequest("POST", "/", strings.NewReader(url.Values{ "name": []string{"person"}, "email": []string{"[email protected]"}, "permissions": []string{"2", "4", "8"}, }.Encode())) r = r.Clone(ctx) r.Header.Set("Accept", "text/html") r.Header.Set("Content-Type", "application/x-www-form-urlencoded") w := httptest.NewRecorder() handler.ServeHTTP(w, r) w.Flush() if got, want := w.Code, 500; got != want { t.Errorf("expected %d to be %d", got, want) } if got, want := w.Body.String(), "Internal server error"; !strings.Contains(got, want) { t.Errorf("expected %s to contain %q", got, want) } }) t.Run("validation", func(t *testing.T) { t.Parallel() h, err := render.New(ctx, envstest.ServerAssetsPath(), true) if err != nil { t.Fatal(err) } c := userpkg.New(harness.AuthProvider, harness.Cacher, harness.Database, h) handler := c.HandleCreate() ctx := ctx ctx = controller.WithSession(ctx, &sessions.Session{}) ctx = controller.WithMembership(ctx, &database.Membership{ Realm: realm, User: admin, Permissions: rbac.LegacyRealmAdmin, }) r := httptest.NewRequest("POST", "/", strings.NewReader(url.Values{ "name": []string{""}, }.Encode())) r = r.Clone(ctx) r.Header.Set("Accept", "text/html") r.Header.Set("Content-Type", "application/x-www-form-urlencoded") w := httptest.NewRecorder() handler.ServeHTTP(w, r) w.Flush() if got, want := w.Code, 422; got != want { t.Errorf("expected %d to be %d", got, want) } if got, want := w.Body.String(), "cannot be blank"; !strings.Contains(got, want) { t.Errorf("expected %q to contain %q", got, want) } }) t.Run("creates", func(t *testing.T) { t.Parallel() browserCtx := browser.New(t) taskCtx, done := context.WithTimeout(browserCtx, 120*time.Second) defer done() var apiKey string if err := chromedp.Run(taskCtx, browser.SetCookie(cookie), chromedp.Navigate(`http://`+harness.Server.Addr()+`/realm/apikeys/new`), chromedp.WaitVisible(`body#apikeys-new`, chromedp.ByQuery), chromedp.SetValue(`input#name`, "Example API key", chromedp.ByQuery), chromedp.SetValue(`select#type`, strconv.Itoa(int(database.APIKeyTypeDevice)), chromedp.ByQuery), chromedp.Click(`#submit`, chromedp.ByQuery), chromedp.WaitVisible(`body#apikeys-show`, chromedp.ByQuery), chromedp.Value(`#apikey-value`, &apiKey, chromedp.ByQuery), ); err != nil { t.Fatal(err) } // Ensure API key is valid. record, err := harness.Database.FindAuthorizedAppByAPIKey(apiKey) if err != nil { t.Fatal(err) } if got, want := record.RealmID, realm.ID; got != want { t.Errorf("expected %v to be %v", got, want) } if got, want := record.Name, "Example API key"; got != want { t.Errorf("expected %v to be %v", got, want) } if got, want := record.APIKeyType, database.APIKeyTypeDevice; got != want { t.Errorf("expected %v to be %v", got, want) } }) }
User: admin, Permissions: rbac.LegacyRealmAdmin,
block_add_result.rs
// Copyright 2021. The Tari Project // // Redistribution and use in source and binary forms, with or without modification, are permitted provided that the // following conditions are met: // // 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following // disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the // following disclaimer in the documentation and/or other materials provided with the distribution. // // 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote // products derived from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, // INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, // WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. use std::{fmt, sync::Arc}; use tari_crypto::tari_utilities::hex::Hex; use crate::blocks::ChainBlock; #[derive(Clone, Debug, PartialEq)] pub enum BlockAddResult { Ok(Arc<ChainBlock>), BlockExists, OrphanBlock, /// Indicates the new block caused a chain reorg. This contains removed blocks followed by added blocks. ChainReorg { added: Vec<Arc<ChainBlock>>, removed: Vec<Arc<ChainBlock>>, }, } impl BlockAddResult { /// Returns true if the chain was changed (i.e block added or reorged), otherwise false pub fn was_chain_modified(&self) -> bool
pub fn is_added(&self) -> bool { matches!(self, BlockAddResult::Ok(_)) } pub fn is_chain_reorg(&self) -> bool { matches!(self, BlockAddResult::ChainReorg { .. }) } pub fn is_orphaned(&self) -> bool { matches!(self, BlockAddResult::OrphanBlock) } pub fn assert_added(&self) -> ChainBlock { match self { BlockAddResult::ChainReorg { added, removed } => panic!( "Expected added result, but was reorg ({} added, {} removed)", added.len(), removed.len() ), BlockAddResult::Ok(b) => b.as_ref().clone(), BlockAddResult::BlockExists => panic!("Expected added result, but was BlockExists"), BlockAddResult::OrphanBlock => panic!("Expected added result, but was OrphanBlock"), } } pub fn assert_orphaned(&self) { assert!(self.is_orphaned(), "Result was not orphaned"); } pub fn assert_reorg(&self, num_added: usize, num_removed: usize) { match self { BlockAddResult::ChainReorg { added, removed } => { assert_eq!(num_added, added.len(), "Number of added reorged blocks was different"); assert_eq!( num_removed, removed.len(), "Number of removed reorged blocks was different" ); }, BlockAddResult::Ok(_) => panic!("Expected reorg result, but was Ok()"), BlockAddResult::BlockExists => panic!("Expected reorg result, but was BlockExists"), BlockAddResult::OrphanBlock => panic!("Expected reorg result, but was OrphanBlock"), } } pub fn added_blocks(&self) -> Vec<Arc<ChainBlock>> { match self { Self::ChainReorg { added, removed: _ } => added.clone(), Self::Ok(added) => vec![added.clone()], _ => vec![], } } pub fn removed_blocks(&self) -> Vec<Arc<ChainBlock>> { match self { Self::ChainReorg { added: _, removed } => removed.clone(), _ => vec![], } } } impl fmt::Display for BlockAddResult { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { BlockAddResult::Ok(block) => { write!(f, "Block {} at height {} added", block.hash().to_hex(), block.height()) }, BlockAddResult::BlockExists => write!(f, "Block already exists"), BlockAddResult::OrphanBlock => write!(f, "Block added as orphan"), BlockAddResult::ChainReorg { added, removed } => write!( f, "Reorg from {} ({}) to {}, and {} blocks added ending with {} ({})", removed.first().map(|r| r.height()).unwrap_or(0), removed .first() .map(|r| r.hash().to_hex()) .unwrap_or_else(|| "None".to_string()), removed.last().map(|r| r.height()).unwrap_or(0), added.len(), added.last().map(|a| a.height()).unwrap_or(0), added .last() .map(|a| a.hash().to_hex()) .unwrap_or_else(|| "None".to_string()) ), } } }
{ matches!(self, BlockAddResult::Ok(_) | BlockAddResult::ChainReorg { .. }) }
boolean.go
package expression
import "gopkg.in/sqle/sqle.v0/sql" type Not struct { UnaryExpression } func NewNot(child sql.Expression) *Not { return &Not{UnaryExpression{child}} } func (e Not) Type() sql.Type { return sql.Boolean } func (e Not) Eval(row sql.Row) interface{} { return !e.Child.Eval(row).(bool) } func (e Not) Name() string { return "Not(" + e.Child.Name() + ")" } func (e *Not) TransformUp(f func(sql.Expression) sql.Expression) sql.Expression { c := e.UnaryExpression.Child.TransformUp(f) n := &Not{UnaryExpression{c}} return f(n) }
DialogInfo.js
import React from "react"; import styled from "styled-components"; import ReactTooltip from "react-tooltip"; import { ReactComponent as BaseInfoIcon } from "assets/Icons/information.svg"; const InfoIconContainer = styled.div` display: flex; flex-direction: row; justify-content: flex-start; align-items: flex-end; position: absolute; top: 10px; left: 15px; height: 20px; color: #000000; cursor: pointer; &:hover { color: #ff8800; svg { fill: #ff8800; } } @media (min-width: 320px) and (max-width: 480px) { height: 16px; } `; const InfoIcon = styled(BaseInfoIcon)` display: flex; transition: fill 0.5s ease; width: 20px; height: 20px; @media (min-width: 320px) and (max-width: 480px) { width: 16px; height: 16px; } `; const InfoText = styled.div` font-size: 16px; transition: all 0.5s ease; margin-left: -4px; margin-bottom: -2px; @media (min-width: 320px) and (max-width: 480px) { font-size: 12px; margin-left: -2px; margin-bottom: 0; } `; const TooltipTitle = styled.div` font-size: 18px; @media (min-width: 320px) and (max-width: 480px) { font-size: 14px; } `; const TooltipText = styled.div` font-size: 14px; margin: 3px 0; @media (min-width: 320px) and (max-width: 480px) { font-size: 12px; } `; const LineDivisor = styled.div` width: 100%; height: 1px; background: #ffffff; margin: 8px 0; @media (min-width: 320px) and (max-width: 480px) { margin: 6px 0; } `; function
() { return ( <> <InfoIconContainer data-tip data-for="signUpToolTip"> <InfoIcon /> <InfoText>HELP</InfoText> </InfoIconContainer> <ReactTooltip id="signUpToolTip" className="signup-tooltip" backgroundColor="#ff8800" place="right" effect="solid" > <TooltipTitle>Username Condition</TooltipTitle> <TooltipText> <span role="img" aria-label="warning-icon"> ❌ </span>{" "} Less than 4 characters </TooltipText> <TooltipText> <span role="img" aria-label="warning-icon"> ❌ </span>{" "} Except alphabets and numbers </TooltipText> <TooltipText> <span role="img" aria-label="warning-icon"> ❌ </span>{" "} Special characters except @/./+/-/_ </TooltipText> <LineDivisor /> <TooltipTitle>Password Condition</TooltipTitle> <TooltipText> <span role="img" aria-label="warning-icon"> ❌ </span>{" "} Common passwords </TooltipText> <TooltipText> <span role="img" aria-label="warning-icon"> ❌ </span>{" "} Similar to username </TooltipText> <TooltipText> <span role="img" aria-label="warning-icon"> ❌ </span>{" "} Less than 8 characters </TooltipText> <TooltipText> <span role="img" aria-label="warning-icon"> ❌ </span>{" "} Consist only of numbers </TooltipText> </ReactTooltip> </> ); } export default DialogInfo;
DialogInfo
bezier2.go
package geometry // A bezier of degree 3 is defined as // P(t) = P0*(1-t)^3 + 3*P1*t*(1-t)^2 + 3*P2*t^2*(1-t) + P3*t^3 type Curve3 [4]Vector func (c Curve3) GetPoint(t float64) Vector { it := 1 - t it2 := it * it
y := c[0].Y*it3 + 3*c[1].Y*t*it2 + 3*c[2].Y*t2*it + c[3].Y*t3 return Vector{x, y} }
it3 := it2 * it t2 := t * t t3 := t2 * t x := c[0].X*it3 + 3*c[1].X*t*it2 + 3*c[2].X*t2*it + c[3].X*t3
pcache.py
import os import sys import hou import struct class pcache(object): fileName = "" fileType = 'a' fileVersion = 1.0 propertyNames = [] propertyTypes = [] propertyData = bytearray() itemcount = 0 itemstride = 0 defaultBindings = { 'P': 'position', 'N': 'normal', 'v': 'velocity', 'Cd': 'color', 'Alpha': 'alpha', 'uv': 'texCoord', 'age': 'age', 'life': 'lifetime' } components = ['x', 'y', 'z', 'w'] def __init__(self, filename=None): self.clear() if filename is not None: # read file self.loadFromFile(filename) def clear(self): self.fileName = "" self.fileType = 'a' self.fileVersion = 1.0 self.propertyNames = [] self.propertyTypes = [] self.propertyData = bytearray() self.itemcount = 0 self.itemstride = 0 def setDataFromGeometry(self, geo, export_attribs, property_names=None): # sets data into geometry
def __getComponentCountFromName(self, name): retval = 1 for i in xrange(len(self.components)): if name.endswith(".{}".format(self.components[i])): return i+1 return retval def __isVectorComponent(self, name): for i in xrange(len(self.components)): if name.endswith(".{}".format(self.components[i])): return True return False def __componentIndexOf(self, name): retval = -1 for i in xrange(len(self.components)): if name.endswith(".{}".format(self.components[i])): return i return retval def __getNameWithoutComponent(self, name): retval = name for i in xrange(len(self.components)): if name.endswith(".{}".format(self.components[i])): return name.replace(".{}".format(self.components[i]),"") return retval def __reverseBinding(self, propertyName): for key in self.defaultBindings: if propertyName == self.defaultBindings[key]: return key return None def createGeo(self, geo, useRecommendedNames=True): # sets data into geometry if not isinstance(geo, hou.Geometry): raise hou.Error("Input is not not a valid Houdini Geometry") geo.clear() # deduce attributes to create from properties attribs_to_create = {} attribs_types_to_create = {} attribs_property = {} for i in xrange(len(self.propertyNames)): name = self.propertyNames[i] type = self.propertyTypes[i] comp = self.__getNameWithoutComponent(name) if useRecommendedNames: attrib_name = self.__reverseBinding(comp) if attrib_name is not None: attribs_property[attrib_name] = comp comp = attrib_name else: attribs_property[comp] = comp complen = self.__getComponentCountFromName(name) attribs_types_to_create[comp] = type; if comp in attribs_to_create: attribs_to_create[comp] = max(attribs_to_create[comp], complen) else: attribs_to_create[comp] = complen # Attrib Creation, item structure for comp in attribs_to_create: if geo.findPointAttrib(comp) is None: print "{} point attribute not found: creating...".format(comp) if attribs_types_to_create[comp] == "float": default_val = 0.0 elif attribs_types_to_create[comp] == "int": default_val = 0 else: default_val = None if attribs_to_create[comp] == 1: geo.addAttrib(hou.attribType.Point, comp, default_val) else: default_vec = list() for i in xrange(attribs_to_create[comp]): default_vec.append(default_val) geo.addAttrib(hou.attribType.Point, comp, default_vec) # Data Storage for i in xrange(self.itemcount): pt = geo.createPoint() # get bytes item_data = self.propertyData[i * self.itemstride: (i * self.itemstride) + self.itemstride] attrib_data = {} # fill in data index = 0 for j in xrange(len(self.propertyNames)): # get actual value if self.propertyTypes[j] == "float": val = struct.unpack("f", item_data[index:index+4]) index += 4 # print "Unpack Float ({}) : {}".format(self.propertyNames[j], val[0]) elif self.propertyTypes[j] == "int": val = struct.unpack("i", item_data[index:index+4]) index += 4 # print "Unpack Integer ({}) : {}".format(self.propertyNames[j], val[0]) else: val = None if self.__isVectorComponent(self.propertyNames[j]): # for vector stuff key = self.__reverseBinding(self.__getNameWithoutComponent(self.propertyNames[j])) idx = self.__componentIndexOf(self.propertyNames[j]) if key not in attrib_data: attrib_data[key] = [0.0, 0.0, 0.0] attrib_data[key][idx] = val[0] else: # 1-component data key = self.__reverseBinding(self.__getNameWithoutComponent(self.propertyNames[j])) attrib_data[key] = val[0] # print attrib_data for attrib in attrib_data: pt.setAttribValue(attrib, attrib_data[attrib]) def loadFromFile(self, filename): file = open(filename, "rb") with open(filename, "rb") as file: magic = file.readline() if magic != "pcache\n": raise hou.Error("Invalid file header: expected pcache magic number : {}".format(magic)) self.clear() done = False while not done: with hou.InterruptableOperation("Loading PCACHE Header", open_interrupt_dialog=False) as operation: line = file.readline().replace("\n","") words = line.split(" ") kw = words[0] if kw == "end_header": done = True elif kw == "format": if words[1] == "ascii": self.fileType = 'a' elif words[1] == "binary": self.fileType = 'b' else: raise hou.Error("Invalid format: {}".format(words[1])) elif kw == "elements": count = int(words[1]) self.itemcount = count elif kw == "property": if len(words) != 3: raise hou.Error("Invalid property description: {}".format(words)) if words[1] == "float": self.propertyTypes.append("float") self.propertyNames.append(words[2]) self.itemstride += 4 elif words[1] == "int": self.propertyTypes.append("int") self.propertyNames.append(words[2]) self.itemstride += 4 elif kw == "comment": print ' '.join(words).replace("comment ", "") self.propertyData = bytearray(file.read()) print "Item Stride is {} bytes".format(self.itemstride) length = len(self.propertyData) self.itemcount = length/self.itemstride print "Found {} bytes of data, corresponding to {} items".format(length, self.itemcount) def saveAsFile(self, filename): # save data file = open(filename, "wb") file.write("pcache\n") # header magic number file.write("comment PCACHE file Exported from Houdini\n") # ------------------- file.write("format binary 1.0\n") # version and format file.write("elements {}\n".format(self.itemcount)) # item count for i in xrange(len(self.propertyNames)): # every property file.write("property {} {}\n".format(self.propertyTypes[i], self.propertyNames[i])) file.write("end_header\n") # end of header # data file.write(self.propertyData) file.close()
if not isinstance(geo, hou.Geometry): raise hou.Error("Input is not not a valid Houdini Geometry") self.clear() bindings = {} attribs = export_attribs.split(' ') if property_names is None: # use default corresponding table bindings = self.defaultBindings else: propnames = property_names.split(' ') for i in xrange(len(attribs)): bindings[attribs[i]] = propnames[i] retained_attribs = [] for attrib in attribs: geo_attr = geo.findPointAttrib(attrib) if geo_attr is not None: data_type = geo_attr.dataType() if data_type == hou.attribData.Int: str_type = 'int' elif data_type == hou.attribData.Float: str_type = 'float' components = geo_attr.size() retained_attribs.append(geo_attr) if components == 1: # float self.propertyNames.append(bindings[attrib]) self.propertyTypes.append(str_type) self.itemstride += 4 elif components <= 4: # vector for i in xrange(components): self.propertyNames.append(bindings[attrib] + ".{}".format(self.components[i])) self.propertyTypes.append(str_type) self.itemstride += 4 else: raise hou.NodeWarning("Point attribute not found : {}".format(attrib)) print("------- {} PROPERTIES --------".format(len(self.propertyNames))) for i in xrange(len(self.propertyNames)): print("Property : {} ({})".format(self.propertyNames[i], self.propertyTypes[i])) points = geo.points() numpt = len(points) self.itemcount = numpt for point in points: for i in xrange(len(retained_attribs)): attr = retained_attribs[i] val = point.attribValue(attr) if self.propertyTypes[i] == "float": t = 'f' elif self.propertyTypes[i] == "int": t = 'i' if attr.size() > 1: for comp in val: pack = struct.pack(t, comp) for byte in pack: self.propertyData.append(byte) else: pack = struct.pack(t, val) for byte in pack: self.propertyData.append(byte)
data_table_spec.ts
import {componentSanityCheck} from "../../platform/testing/util"; import {inject, async} from "@angular/core/testing"; import {ComponentFixture, TestComponentBuilder} from "@angular/core/testing"; import {Component, DebugElement, EventEmitter, QueryList} from "@angular/core"; import {CORE_DIRECTIVES} from "@angular/common"; import {By} from "@angular/platform-browser"; import {MdDataTableHeaderSelectableRow, MdDataTable, MdDataTableSelectableRow} from "./index"; export function main() { interface IDataTableFixture { fixture: ComponentFixture<TestComponent>; comp: MdDataTable; debug: DebugElement; } @Component({ selector: 'test-app', directives: [CORE_DIRECTIVES, MdDataTable, MdDataTableHeaderSelectableRow, MdDataTableSelectableRow], template: `<md-data-table [selectable]="true"> <thead> <tr md-data-table-header-selectable-row> <th>Unit price</th> </tr> </thead> <tbody> <tr md-data-table-selectable-row> <td>$2.90</td> </tr> <tr md-data-table-selectable-row> <td>$1.25</td> </tr> </tbody> </md-data-table>` }) class TestComponent { selected: Array<any> = [];
describe('Data table', () => { let builder: TestComponentBuilder; function setup(): Promise<IDataTableFixture> { return builder.createAsync(TestComponent).then((fixture: ComponentFixture<TestComponent>) => { let debug = fixture.debugElement.query(By.css('md-data-table')); let comp: MdDataTable = debug.componentInstance; let testComp = fixture.debugElement.componentInstance; testComp.selected = []; fixture.detectChanges(); return { fixture: fixture, comp: comp, debug: debug }; }).catch(console.error.bind(console)); } beforeEach(inject([TestComponentBuilder], (tcb) => { builder = tcb; })); describe('md-data-table', () => { it('should initialize selected', () => { return setup().then((api: IDataTableFixture) => { expect(api.comp.selected.length).toEqual(0); api.fixture.destroy(); }); }); it('should toggle checked value when a click is fired on a row checkbox', () => { return setup().then((api: IDataTableFixture) => { let row = api.debug.query(By.css('tbody tr:first-child')); row.nativeElement.click(); expect(api.comp.selected.length).toEqual(1); expect(api.comp.selected[0]).toEqual('0'); row.nativeElement.click(); expect(api.comp.selected.length).toEqual(0); api.fixture.destroy(); }); }); it('should check all row checkbox when a click is fired on master checkbox', () => { return setup().then((api: IDataTableFixture) => { let masterRow = api.debug.query(By.css('thead tr:first-child')); masterRow.nativeElement.click(); expect(api.comp.selected.length).toEqual(2); expect(api.comp.selected[0]).toEqual('0'); masterRow.nativeElement.click(); expect(api.comp.selected.length).toEqual(0); api.fixture.destroy(); }); }); it('should uncheck master checkbox if a row checkbox is unchecked', () => { return setup().then((api: IDataTableFixture) => { let masterRow = api.debug.query(By.css('thead tr:first-child')), row = api.debug.query(By.css('tbody tr:first-child')).nativeElement; masterRow.nativeElement.click(); expect(masterRow.componentInstance.isActive).toBe(true); row.click(); expect(api.comp.selected.length).toEqual(1); expect(api.comp.selected[0]).toEqual('1'); expect(masterRow.componentInstance.isActive).toBe(false); api.fixture.destroy(); }); }); it('should fire a selectable_change event when a row checkbox change', () => { return setup().then((api: IDataTableFixture) => { let row = api.debug.query(By.css('tbody tr:first-child')).nativeElement; api.comp.onSelectableAll.subscribe((event) => { expect(event.name).toBe('selectable_change'); }); row.click(); api.fixture.destroy(); }); }); }); describe('_unsubscribeChildren', () => { it('should reset the selected values', () => { return setup().then((api: IDataTableFixture) => { api.comp.selected = ['1', '2']; api.comp._unsubscribeChildren(); expect(api.comp.selected.length).toEqual(0); }); }); it('should unsubscribe to listener', () => { return setup().then((api: IDataTableFixture) => { let emitter = new EventEmitter(false), spy = jasmine.createSpy('spy'); emitter.subscribe(spy); api.comp._listeners = [emitter]; emitter.emit({name: 'custom_event'}); api.comp._unsubscribeChildren() expect(() => { emitter.emit({name: 'custom_event2'}) }).toThrow(); expect(spy.calls.count()).toEqual(1); }); }); }); describe('_updateChildrenListener', () => { it('should ask unsubscription', () => { return setup().then((api: IDataTableFixture) => { spyOn(api.comp, '_unsubscribeChildren'); api.comp._updateChildrenListener(api.comp._rows); expect(api.comp._unsubscribeChildren).toHaveBeenCalled(); }); }); }); }); }
} componentSanityCheck('Data table', 'md-data-table', `<md-data-table></md-data-table>`);
compact.rs
//! JWS Compact Serialization implementaton. //! //! This module contains types and functions to encode/decode //! and sign/verify messages encoded with the JWS Compact Serialization Scheme //! as defined in [RFC 1715 section 7.1](https://tools.ietf.org/html/rfc7515#section-7.1). //! //! Most applications should use [`encode_sign`](fn.encode_sign.html) and [`decode_verify`](fn.decode_verify.html). //! These functions combine encoding and signing or decoding and verifying in a single step. use std::collections::BTreeMap; use crate::{ Error, JsonObject, JsonValue, Result, Signer, Verifier, }; /// Encode a message using the JWS Compact Serialization scheme. /// /// Note that the signer should already have added it's parameters to the header. /// If added later, they will not be part of the encoded message. /// /// See [`encode_sign`] for an easier way to make sure the message is encoded with the right header parameters added. pub fn encode(header: &JsonObject, payload: &[u8]) -> EncodedMessage { // Serializing header can't fail since it's already a JSON object. let header_json = serde_json::to_vec(&header).unwrap(); let output_len = base64_len(header_json.len()) + base64_len(payload.len()) + 1; let mut buffer = String::with_capacity(output_len); base64::encode_config_buf(&header_json, base64::URL_SAFE_NO_PAD, &mut buffer); let header_length = buffer.len(); buffer.push('.'); base64::encode_config_buf(&payload, base64::URL_SAFE_NO_PAD, &mut buffer); EncodedMessage{data: buffer, header_length} } /// Encode and sign the message. /// /// This function will first use to [`crate::Signer`] to add header parameters to the header, /// then encode the message and finally sign it. /// /// Using this function ensures that the header parameters are set correctly before encoding/signing. pub fn encode_sign(header: JsonObject, payload: &[u8], signer: &impl Signer) -> Result<EncodedSignedMessage> { let mut header = header; // Let the signer set the headers before encoding the message. signer.set_header_params(&mut header); let encoded = encode(&header, payload); // Sign the encoded message. let signature = signer.compute_mac(encoded.header().as_bytes(), encoded.payload().as_bytes())?; // Concat the signature to the encoded message. let header_length = encoded.header().len(); let payload_length = encoded.payload().len(); let mut data = encoded.into_data(); data.reserve(base64_len(signature.len()) + 1); data.push('.'); base64::encode_config_buf(&signature, base64::URL_SAFE_NO_PAD, &mut data); Ok(EncodedSignedMessage{data, header_length, payload_length}) } /// Decode a JWS Compact Serialization message with signature from a byte slice. /// /// # Safety /// This function has no memory safety implications, /// It was marked as unsafe because does not perform signature verification. /// It is now deprecated and has been replaced by [`decode_unverified`]. /// /// However, in most applications, you will want to use [`decode_verify`], /// which does verify the message signature. #[deprecated = "this function was marked unsafe but has no safety implications, use decode_unverified instead"] pub unsafe fn decode(data: &[u8]) -> Result<(DecodedMessage, Vec<u8>)> { decode_unverified(data) } /// Decode a JWS Compact Serialization message with signature from a byte slice. /// /// The signature is not verified. /// You can use [`decode_verify`] to decode the message and verify the signature. pub fn decode_unverified(data: &[u8]) -> Result<(DecodedMessage, Vec<u8>)> { split_encoded_parts(data)?.decode() } /// Decode and verify a JWS Compact Serialization message. /// /// Note that if verification fails, you will not have access to the decoded message. /// If that is required, you may use [`split_encoded_parts`] and decode/verify the message manually. pub fn decode_verify(data: &[u8], verifier: &impl Verifier) -> Result<DecodedMessage> { let parts = split_encoded_parts(data)?; let (message, signature) = parts.decode()?; verifier.verify(Some(&message.header), None, parts.header, parts.payload, &signature)?; Ok(message) } /// A compact JWS message with header and payload, but without signature. /// /// The signature is left off because the signature can only be computed from (and verified for) a serialized message, /// whereas this struct represents a mostly decoded message (the payload is still raw bytes). /// /// You can call [`decode_verify`] to decode and verify a message. /// Alternatively, you can call [`split_encoded_parts`], decode the parts and then use a [`Verifier`] manually. /// The latter allows you to access the decoded message, even if it's signature is invalid. #[derive(Clone, Debug, PartialEq)] pub struct DecodedMessage { pub header : JsonObject, pub payload : Vec<u8>, } /// An encoded JWS Compact Serialization message without signature. #[derive(Clone, Debug, Eq, PartialEq)] pub struct EncodedMessage { data : String, header_length : usize, } /// An encoded JWS Compact Serialization message with signature. #[derive(Clone, Debug, Eq, PartialEq)] pub struct EncodedSignedMessage { data : String, header_length : usize, payload_length : usize, } impl DecodedMessage { /// Create a new message from a header and a payload. pub fn new(header: impl Into<JsonObject>, payload: impl Into<Vec<u8>>) -> Self { Self{header: header.into(), payload: payload.into()} } /// Create a new DecodedMessage by decoding the header and payload of a JWS Compact Serialization message. pub fn from_encoded_parts(header: &[u8], payload: &[u8]) -> Result<Self> { // Undo base64 encoding of parts. let header = decode_base64_url(header, "header")?; let payload = decode_base64_url(payload, "payload")?; // Decode the header as JSON. let header: BTreeMap<String, JsonValue> = decode_json(&header, "header")?; // Put the decoded parts back together. Ok(Self{header, payload}) } /// Parse the payload as JSON using serde. /// /// The type must implement the [`serde::Deserialize`] trait pub fn parse_json<'de, T: serde::de::Deserialize<'de> + 'de>(&'de self) -> std::result::Result<T, serde_json::Error> { serde_json::from_slice(&self.payload) } /// Parse the payload as a [`JsonValue`]. /// /// This method avoids the need for type annotations. pub fn parse_json_value(&self) -> std::result::Result<JsonValue, serde_json::Error> { self.parse_json() } /// Parse the payload as a [`JsonObject`]. /// /// This method avoids the need for type annotations. pub fn parse_json_object(&self) -> std::result::Result<JsonObject, serde_json::Error> { self.parse_json() } } impl EncodedMessage { /// Get a reference to the raw data. pub fn data(&self) -> &str { &self.data } /// Get the raw data, consuming the encoded message. pub fn into_data(self) -> String { self.data } /// Get a reference to the raw data as bytes. pub fn as_bytes(&self) -> &[u8] { self.data().as_bytes() } /// Get the header part of the encoded message. pub fn header(&self) -> &str { &self.data[..self.header_length] } /// Get the payload part of the encoded message. pub fn payload(&self) -> &str { &self.data[self.header_length + 1..] } } impl EncodedSignedMessage { /// Get a reference to the raw data. pub fn data(&self) -> &str { &self.data } /// Get the raw data, consuming the encoded message. pub fn into_data(self) -> String { self.data } /// Get a reference to the raw data as bytes. pub fn as_bytes(&self) -> &[u8] { self.data().as_bytes() } /// Get the header part of the encoded message. pub fn header(&self) -> &str { &self.data[..self.header_length] } /// Get the payload part of the encoded message. pub fn payload(&self) -> &str { &self.data[self.payload_start()..self.payload_end()] } /// Get the signature part of the encoded message. pub fn signature(&self) -> &str { &self.data[self.signature_start()..] } /// Get the parts of the message as a [`CompactSerializedParts`] struct. pub fn parts(&self) -> CompactSerializedParts { CompactSerializedParts { header: self.header().as_bytes(), payload: self.payload().as_bytes(), signature: self.signature().as_bytes(), } } fn payload_start(&self) -> usize { self.header_length + 1 } fn payload_end(&self) -> usize { self.payload_start() + self.payload_length } fn signature_start(&self) -> usize { self.payload_end() + 1 } } /// The individual (still encoded) parts of a JWS Compact Serialized message. #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct CompactSerializedParts<'a> { pub header: &'a [u8], pub payload: &'a [u8], pub signature: &'a [u8], } impl<'a> CompactSerializedParts<'a> { /// Decode the already-split parts of a JWS Compact Serialization message. pub fn decode(&self) -> Result<(DecodedMessage, Vec<u8>)> { let message = DecodedMessage::from_encoded_parts(self.header, self.payload)?; let signature = decode_base64_url(self.signature, "signature")?; Ok((message, signature)) } } /// Split the parts of a JWS Compact Serialization message. /// /// A JWS Compact Serialization message contains three base64-url encoded parts separated by period '.' characters: /// - header /// - payload /// - signature /// /// This function splits a byte slice into these three parts. pub fn
(data: &[u8]) -> Result<CompactSerializedParts> { // Split data into parts. let mut parts = data.splitn(4, |&c| c == b'.'); let header = parts.next().ok_or_else(|| Error::invalid_message("encoded message does not contain a header"))?; let payload = parts.next().ok_or_else(|| Error::invalid_message("encoded message does not contain a payload"))?; let signature = parts.next().ok_or_else(|| Error::invalid_message("encoded message does not contain a signature"))?; // Make sure there are no additional message parts in the input. if parts.next().is_some() { return Err(Error::invalid_message("encoded message contains an additional field after the signature")); } Ok(CompactSerializedParts{header, payload, signature}) } /// Compute the length of a base64 encoded string without padding, given the input length. fn base64_len(input_len: usize) -> usize { // Multiply by 4, divide by 3 rounding up. (input_len * 4 + 2) / 3 } /// Decode a base64-url encoded string. fn decode_base64_url(value: &[u8], field_name: &str) -> Result<Vec<u8>> { match base64::decode_config(value, base64::URL_SAFE_NO_PAD) { Ok(x) => Ok(x), Err(_) => Err(Error::invalid_message(format!("invalid base64 in {}", field_name))) } } /// Decode a JSON string. fn decode_json<'a, T: serde::Deserialize<'a>>(value: &'a [u8], field_name: &str) -> Result<T> { match serde_json::from_slice(value) { Ok(x) => Ok(x), Err(_) => Err(Error::invalid_message(format!("invalid JSON in {}", field_name))) } } #[cfg(test)] mod test { use super::*; use crate::json_object; use assert2::assert; fn test_split_valid(source: &[u8], header: &[u8], payload: &[u8], signature: &[u8]) { let parts = split_encoded_parts(source).unwrap(); assert!(parts.header == header); assert!(parts.payload == payload); assert!(parts.signature == signature); } #[test] fn test_split_encoded_parts() { // Test splitting some valid sequences. test_split_valid(b"..", b"", b"", b""); test_split_valid(b"..mies", b"", b"", b"mies"); test_split_valid(b".noot.", b"", b"noot", b""); test_split_valid(b".noot.mies", b"", b"noot", b"mies"); test_split_valid(b"aap..", b"aap", b"", b""); test_split_valid(b"aap..mies", b"aap", b"", b"mies"); test_split_valid(b"aap.noot.", b"aap", b"noot", b""); test_split_valid(b"aap.noot.mies", b"aap", b"noot", b"mies"); // Test splitting some invalid sequences. assert!(let Err(Error { kind: Error::InvalidMessage, .. }) = split_encoded_parts(b"aapnootmies")); assert!(let Err(Error { kind: Error::InvalidMessage, .. }) = split_encoded_parts(b"aap.nootmies")); assert!(let Err(Error { kind: Error::InvalidMessage, .. }) = split_encoded_parts(b"aap.noot.mies.")); } // Example taken from RFC 7515 appendix A.1 // https://tools.ietf.org/html/rfc7515#appendix-A.1 // // Header: // {"typ":"JWT", // "alg":"HS256"} // // Payload: // {"iss":"joe", // "exp":1300819380, // "http://example.com/is_root":true} // // Key: AyM1SysPpbyDfgZld3umj1qzKObwVMkoqQ-EstJQLr_T-1qS0gZH75aKtMN3Yj0iPS4hcgUuTwjAzZr1Z9CAow // // Signature: dBjftJeZ4CVP-mB92K27uhbUJU1p1r_wW1gFWFOEjXk const RFC7515_A1_ENCODED : &[u8] = b"eyJ0eXAiOiJKV1QiLA0KICJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJqb2UiLA0KICJleHAiOjEzMDA4MTkzODAsDQogImh0dHA6Ly9leGFtcGxlLmNvbS9pc19yb290Ijp0cnVlfQ.dBjftJeZ4CVP-mB92K27uhbUJU1p1r_wW1gFWFOEjXk"; const RFC7515_A1_ENCODED_MANGLED : &[u8] = b"eyJ0eXAiOiJKV1QiLA0KICJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJqc2UiLA0KICJleHAiOjEzMDA4MTkzODAsDQogImh0dHA6Ly9leGFtcGxlLmNvbS9pc19yb290Ijp0cnVlfQ.dBjftJeZ4CVP-mB92K27uhbUJU1p1r_wW1gFWFOEjXk"; const RFC7515_A1_SIGNATURE : &[u8] = &[116, 24, 223, 180, 151, 153, 224, 37, 79, 250, 96, 125, 216, 173, 187, 186, 22, 212, 37, 77, 105, 214, 191, 240, 91, 88, 5, 88, 83, 132, 141, 121]; #[test] fn test_decode() { let (message, signature) = split_encoded_parts(RFC7515_A1_ENCODED).unwrap().decode().unwrap(); assert!(&message.header == &json_object!{ "alg": "HS256", "typ": "JWT" }); assert!(let Ok(_) = message.parse_json_object()); assert!(message.parse_json_object().ok() == Some(json_object!{ "iss": "joe", "exp": 1300819380, "http://example.com/is_root": true, })); assert!(&signature[..] == RFC7515_A1_SIGNATURE); } #[test] fn test_decode_mangled() { let (message, signature) = split_encoded_parts(RFC7515_A1_ENCODED_MANGLED).unwrap().decode().unwrap(); assert!(&message.header == &json_object!{ "alg": "HS256", "typ": "JWT", }); assert!(message.parse_json_object().unwrap() == json_object!{ "iss": "jse", "exp": 1300819380, "http://example.com/is_root": true, }); assert!(&signature[..] == RFC7515_A1_SIGNATURE); } #[test] fn test_encode() { let header = json_object!{"typ": "JWT", "alg": "HS256"}; let encoded = encode(&header, b"foo"); assert!(encoded.header() == "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9"); assert!(encoded.payload() == "Zm9v"); assert!(encoded.data() == "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.Zm9v") } }
split_encoded_parts
_application_gateways_operations_async.py
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class ApplicationGatewaysOperations: """ApplicationGatewaysOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~azure.mgmt.network.v2019_06_01.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config async def _delete_initial( self, resource_group_name: str, application_gateway_name: str, **kwargs ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01" # Construct URL url = self._delete_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] # Construct and send request request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore async def begin_delete( self, resource_group_name: str, application_gateway_name: str, **kwargs ) -> None: """Deletes the specified application gateway. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param application_gateway_name: The name of the application gateway. :type application_gateway_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: None, or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = await self._delete_initial( resource_group_name=resource_group_name, application_gateway_name=application_gateway_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore async def get( self, resource_group_name: str, application_gateway_name: str, **kwargs ) -> "models.ApplicationGateway": """Gets the specified application gateway. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param application_gateway_name: The name of the application gateway. :type application_gateway_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: ApplicationGateway, or the result of cls(response) :rtype: ~azure.mgmt.network.v2019_06_01.models.ApplicationGateway :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationGateway"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01" # Construct URL url = self.get.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' # Construct and send request request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('ApplicationGateway', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore async def _create_or_update_initial( self, resource_group_name: str, application_gateway_name: str, parameters: "models.ApplicationGateway", **kwargs ) -> "models.ApplicationGateway": cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationGateway"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01" content_type = kwargs.pop("content_type", "application/json") # Construct URL url = self._create_or_update_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'ApplicationGateway') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: deserialized = self._deserialize('ApplicationGateway', pipeline_response) if response.status_code == 201: deserialized = self._deserialize('ApplicationGateway', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore async def begin_create_or_update( self, resource_group_name: str, application_gateway_name: str, parameters: "models.ApplicationGateway", **kwargs ) -> "models.ApplicationGateway": """Creates or updates the specified application gateway. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param application_gateway_name: The name of the application gateway. :type application_gateway_name: str :param parameters: Parameters supplied to the create or update application gateway operation. :type parameters: ~azure.mgmt.network.v2019_06_01.models.ApplicationGateway :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: ApplicationGateway, or the result of cls(response) :rtype: ~azure.mgmt.network.v2019_06_01.models.ApplicationGateway :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationGateway"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, application_gateway_name=application_gateway_name, parameters=parameters, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): deserialized = self._deserialize('ApplicationGateway', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore async def _update_tags_initial( self, resource_group_name: str, application_gateway_name: str, parameters: "models.TagsObject", **kwargs ) -> "models.ApplicationGateway": cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationGateway"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01" content_type = kwargs.pop("content_type", "application/json") # Construct URL url = self._update_tags_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'TagsObject') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('ApplicationGateway', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore async def begin_update_tags( self, resource_group_name: str, application_gateway_name: str, parameters: "models.TagsObject", **kwargs ) -> "models.ApplicationGateway": """Updates the specified application gateway tags. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param application_gateway_name: The name of the application gateway. :type application_gateway_name: str :param parameters: Parameters supplied to update application gateway tags. :type parameters: ~azure.mgmt.network.v2019_06_01.models.TagsObject :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: ApplicationGateway, or the result of cls(response) :rtype: ~azure.mgmt.network.v2019_06_01.models.ApplicationGateway :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationGateway"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = await self._update_tags_initial( resource_group_name=resource_group_name, application_gateway_name=application_gateway_name, parameters=parameters, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): deserialized = self._deserialize('ApplicationGateway', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore def list( self, resource_group_name: str, **kwargs ) -> AsyncIterable["models.ApplicationGatewayListResult"]: """Lists all application gateways in a resource group. :param resource_group_name: The name of the resource group. :type resource_group_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ApplicationGatewayListResult or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_06_01.models.ApplicationGatewayListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationGatewayListResult"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01" def prepare_request(next_link=None): if not next_link: # Construct URL url = self.list.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') else: url = next_link query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' # Construct and send request request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): deserialized = self._deserialize('ApplicationGatewayListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return AsyncItemPaged( get_next, extract_data ) list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways'} # type: ignore def list_all( self, **kwargs ) -> AsyncIterable["models.ApplicationGatewayListResult"]: """Gets all the application gateways in a subscription. :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ApplicationGatewayListResult or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_06_01.models.ApplicationGatewayListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationGatewayListResult"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01" def prepare_request(next_link=None): if not next_link: # Construct URL url = self.list_all.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') else: url = next_link query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' # Construct and send request request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): deserialized = self._deserialize('ApplicationGatewayListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return AsyncItemPaged( get_next, extract_data ) list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGateways'} # type: ignore async def _start_initial( self, resource_group_name: str, application_gateway_name: str, **kwargs ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01" # Construct URL url = self._start_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/start'} # type: ignore async def begin_start( self, resource_group_name: str, application_gateway_name: str, **kwargs ) -> None: """Starts the specified application gateway. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param application_gateway_name: The name of the application gateway. :type application_gateway_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: None, or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = await self._start_initial( resource_group_name=resource_group_name, application_gateway_name=application_gateway_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/start'} # type: ignore async def _stop_initial( self, resource_group_name: str, application_gateway_name: str, **kwargs ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01" # Construct URL url = self._stop_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/stop'} # type: ignore async def begin_stop( self, resource_group_name: str, application_gateway_name: str, **kwargs ) -> None: """Stops the specified application gateway in a resource group. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param application_gateway_name: The name of the application gateway. :type application_gateway_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: None, or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = await self._stop_initial( resource_group_name=resource_group_name, application_gateway_name=application_gateway_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/stop'} # type: ignore async def _backend_health_initial( self, resource_group_name: str, application_gateway_name: str, expand: Optional[str] = None, **kwargs ) -> "models.ApplicationGatewayBackendHealth": cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationGatewayBackendHealth"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01" # Construct URL url = self._backend_health_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') if expand is not None: query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' # Construct and send request request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: deserialized = self._deserialize('ApplicationGatewayBackendHealth', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _backend_health_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/backendhealth'} # type: ignore async def begin_backend_health( self, resource_group_name: str, application_gateway_name: str, expand: Optional[str] = None, **kwargs ) -> "models.ApplicationGatewayBackendHealth": """Gets the backend health of the specified application gateway in a resource group. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param application_gateway_name: The name of the application gateway. :type application_gateway_name: str :param expand: Expands BackendAddressPool and BackendHttpSettings referenced in backend health. :type expand: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: ApplicationGatewayBackendHealth, or the result of cls(response) :rtype: ~azure.mgmt.network.v2019_06_01.models.ApplicationGatewayBackendHealth :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationGatewayBackendHealth"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = await self._backend_health_initial( resource_group_name=resource_group_name, application_gateway_name=application_gateway_name, expand=expand, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): deserialized = self._deserialize('ApplicationGatewayBackendHealth', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_backend_health.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/backendhealth'} # type: ignore async def _backend_health_on_demand_initial( self, resource_group_name: str, application_gateway_name: str, probe_request: "models.ApplicationGatewayOnDemandProbe", expand: Optional[str] = None, **kwargs ) -> "models.ApplicationGatewayBackendHealthOnDemand": cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationGatewayBackendHealthOnDemand"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01" content_type = kwargs.pop("content_type", "application/json") # Construct URL url = self._backend_health_on_demand_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') if expand is not None: query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = 'application/json' # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(probe_request, 'ApplicationGatewayOnDemandProbe') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: deserialized = self._deserialize('ApplicationGatewayBackendHealthOnDemand', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _backend_health_on_demand_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/getBackendHealthOnDemand'} # type: ignore async def begin_backend_health_on_demand( self, resource_group_name: str, application_gateway_name: str, probe_request: "models.ApplicationGatewayOnDemandProbe", expand: Optional[str] = None, **kwargs ) -> "models.ApplicationGatewayBackendHealthOnDemand": """Gets the backend health for given combination of backend pool and http setting of the specified application gateway in a resource group. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param application_gateway_name: The name of the application gateway. :type application_gateway_name: str :param probe_request: Request body for on-demand test probe operation. :type probe_request: ~azure.mgmt.network.v2019_06_01.models.ApplicationGatewayOnDemandProbe :param expand: Expands BackendAddressPool and BackendHttpSettings referenced in backend health. :type expand: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: ApplicationGatewayBackendHealthOnDemand, or the result of cls(response) :rtype: ~azure.mgmt.network.v2019_06_01.models.ApplicationGatewayBackendHealthOnDemand :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationGatewayBackendHealthOnDemand"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = await self._backend_health_on_demand_initial( resource_group_name=resource_group_name, application_gateway_name=application_gateway_name, probe_request=probe_request, expand=expand, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): deserialized = self._deserialize('ApplicationGatewayBackendHealthOnDemand', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_backend_health_on_demand.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/getBackendHealthOnDemand'} # type: ignore async def list_available_server_variables( self, **kwargs ) -> List[str]: """Lists all available server variables. :keyword callable cls: A custom type or function that will be passed the direct response :return: list of str, or the result of cls(response) :rtype: list[str] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List[str]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01" # Construct URL url = self.list_available_server_variables.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' # Construct and send request request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('[str]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized list_available_server_variables.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableServerVariables'} # type: ignore async def list_available_request_headers( self, **kwargs ) -> List[str]: """Lists all available request headers. :keyword callable cls: A custom type or function that will be passed the direct response :return: list of str, or the result of cls(response) :rtype: list[str] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List[str]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01" # Construct URL url = self.list_available_request_headers.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' # Construct and send request request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('[str]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized list_available_request_headers.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableRequestHeaders'} # type: ignore async def list_available_response_headers( self, **kwargs ) -> List[str]: """Lists all available response headers. :keyword callable cls: A custom type or function that will be passed the direct response :return: list of str, or the result of cls(response) :rtype: list[str] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List[str]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01" # Construct URL url = self.list_available_response_headers.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' # Construct and send request request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('[str]', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized list_available_response_headers.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableResponseHeaders'} # type: ignore async def list_available_waf_rule_sets( self, **kwargs ) -> "models.ApplicationGatewayAvailableWafRuleSetsResult": """Lists all available web application firewall rule sets. :keyword callable cls: A custom type or function that will be passed the direct response :return: ApplicationGatewayAvailableWafRuleSetsResult, or the result of cls(response) :rtype: ~azure.mgmt.network.v2019_06_01.models.ApplicationGatewayAvailableWafRuleSetsResult :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationGatewayAvailableWafRuleSetsResult"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01" # Construct URL url = self.list_available_waf_rule_sets.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' # Construct and send request request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('ApplicationGatewayAvailableWafRuleSetsResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized list_available_waf_rule_sets.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableWafRuleSets'} # type: ignore async def list_available_ssl_options( self, **kwargs ) -> "models.ApplicationGatewayAvailableSslOptions":
list_available_ssl_options.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableSslOptions/default'} # type: ignore def list_available_ssl_predefined_policies( self, **kwargs ) -> AsyncIterable["models.ApplicationGatewayAvailableSslPredefinedPolicies"]: """Lists all SSL predefined policies for configuring Ssl policy. :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ApplicationGatewayAvailableSslPredefinedPolicies or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_06_01.models.ApplicationGatewayAvailableSslPredefinedPolicies] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationGatewayAvailableSslPredefinedPolicies"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01" def prepare_request(next_link=None): if not next_link: # Construct URL url = self.list_available_ssl_predefined_policies.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') else: url = next_link query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' # Construct and send request request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): deserialized = self._deserialize('ApplicationGatewayAvailableSslPredefinedPolicies', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return AsyncItemPaged( get_next, extract_data ) list_available_ssl_predefined_policies.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableSslOptions/default/predefinedPolicies'} # type: ignore async def get_ssl_predefined_policy( self, predefined_policy_name: str, **kwargs ) -> "models.ApplicationGatewaySslPredefinedPolicy": """Gets Ssl predefined policy with the specified policy name. :param predefined_policy_name: Name of Ssl predefined policy. :type predefined_policy_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: ApplicationGatewaySslPredefinedPolicy, or the result of cls(response) :rtype: ~azure.mgmt.network.v2019_06_01.models.ApplicationGatewaySslPredefinedPolicy :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationGatewaySslPredefinedPolicy"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01" # Construct URL url = self.get_ssl_predefined_policy.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'predefinedPolicyName': self._serialize.url("predefined_policy_name", predefined_policy_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' # Construct and send request request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('ApplicationGatewaySslPredefinedPolicy', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_ssl_predefined_policy.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableSslOptions/default/predefinedPolicies/{predefinedPolicyName}'} # type: ignore
"""Lists available Ssl options for configuring Ssl policy. :keyword callable cls: A custom type or function that will be passed the direct response :return: ApplicationGatewayAvailableSslOptions, or the result of cls(response) :rtype: ~azure.mgmt.network.v2019_06_01.models.ApplicationGatewayAvailableSslOptions :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationGatewayAvailableSslOptions"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01" # Construct URL url = self.list_available_ssl_options.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' # Construct and send request request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('ApplicationGatewayAvailableSslOptions', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
ConnectionLinksFilter.tsx
/* * Copyright 2020, EnMasse authors. * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). */ import * as React from "react"; import { DataToolbarGroup, DataToolbarFilter, DataToolbarItem, DataToolbarChip, DataToolbarToggleGroup, DataToolbar, DataToolbarContent } from "@patternfly/react-core/dist/js/experimental"; import { Dropdown, DropdownToggle, DropdownItem, InputGroup, Button, ButtonVariant, Badge, SelectOption, SelectOptionObject, Select, SelectVariant } from "@patternfly/react-core"; import { FilterIcon, SearchIcon } from "@patternfly/react-icons"; import { ISortBy } from "@patternfly/react-table"; import useWindowDimensions from "components/common/WindowDimension"; import { SortForMobileView } from "components/common/SortForMobileView"; import { useApolloClient } from "@apollo/react-hooks"; import { IConnectionLinksNameSearchResponse, IConnectionLinksAddressSearchResponse } from "types/ResponseTypes"; import { RETURN_ALL_CONNECTION_LINKS_FOR_NAME_SEARCH, RETURN_ALL_CONNECTION_LINKS_FOR_ADDRESS_SEARCH } from "queries"; import { TypeAheadMessage, MAX_ITEM_TO_DISPLAY_IN_TYPEAHEAD_DROPDOWN, TYPEAHEAD_REQUIRED_LENGTH, NUMBER_OF_RECORDS_TO_DISPLAY_IF_SERVER_HAS_MORE_DATA, FetchPolicy } from "constants/constants"; import { getSelectOptionList, ISelectOption } from "utils"; interface IConnectionLinksFilterProps { filterValue: string; setFilterValue: (value: string) => void; filterNames: any[]; setFilterNames: (value: Array<string>) => void; filterAddresses: any[]; setFilterAddresses: (value: Array<string>) => void; filterRole?: string; setFilterRole: (role: string | undefined) => void; sortValue?: ISortBy; setSortValue: (value: ISortBy) => void; totalLinks: number; addressSpaceName: string; namespace: string; connectionName: string; } export const ConnectionLinksFilter: React.FunctionComponent<IConnectionLinksFilterProps> = ({ filterValue, setFilterValue, filterNames, setFilterNames, filterAddresses, setFilterAddresses, filterRole, setFilterRole, sortValue, setSortValue, totalLinks, addressSpaceName, namespace, connectionName }) => { const { width } = useWindowDimensions(); const client = useApolloClient(); const [filterIsExpanded, setFilterIsExpanded] = React.useState(false); const [roleIsExpanded, setRoleIsExpanded] = React.useState(false); const [isSelectNameExpanded, setIsSelectNameExpanded] = React.useState< boolean >(false); const [isSelectAddressExpanded, setIsSelectAddressExpanded] = React.useState< boolean >(false); const [nameSelected, setNameSelected] = React.useState<string>(); const [addressSelected, setAddressSelected] = React.useState<string>(); const [nameInput, setNameInput] = React.useState<string>(""); const [addressInput, setAddressInput] = React.useState<string>(""); const [nameOptions, setNameOptions] = React.useState<Array<ISelectOption>>(); const [addressOptions, setAddressOptions] = React.useState< Array<ISelectOption> >(); const filterMenuItems = [ { key: "filterName", value: "Name" }, { key: "filterAddress", value: "Address" }, { key: "filterRole", value: "Role" } ]; const roleMenuItems = [ { key: "roleSender", value: "Sender" }, { key: "roleReceiver", value: "Receiver" } ]; const sortMenuItems = [ { key: "name", value: "Name", index: 1 }, { key: "address", value: "Address", index: 2 }, { key: "deliveries", value: "Deliveries", index: 3 }, { key: "accepted", value: "Accepted", index: 4 }, { key: "rejected", value: "Rejected", index: 5 }, { key: "released", value: "Released", index: 6 }, { key: "modified", value: "Modified", index: 7 }, { key: "presettled", value: "Presettled", index: 8 }, { key: "undelievered", value: "Undelievered", index: 9 }
if (nameSelected && nameSelected.trim() !== "") if ( filterNames.map(filter => filter.value).indexOf(nameSelected.trim()) < 0 ) { setFilterNames([ ...filterNames, { value: nameSelected.trim(), isExact: true } ]); setNameSelected(undefined); } if (!nameSelected && nameInput && nameInput.trim() !== "") if ( filterNames.map(filter => filter.value).indexOf(nameInput.trim()) < 0 ) setFilterNames([ ...filterNames, { value: nameInput.trim(), isExact: false } ]); } else if (filterValue && filterValue === "Address") { if (addressSelected && addressSelected.trim() !== "") if ( filterAddresses .map(filter => filter.value) .indexOf(addressSelected.trim()) < 0 ) { setFilterAddresses([ ...filterAddresses, { value: addressSelected.trim(), isExact: true } ]); setAddressSelected(undefined); } if (!addressSelected && addressInput && addressInput.trim() !== "") if ( filterAddresses .map(filter => filter.value) .indexOf(addressInput.trim()) < 0 ) setFilterAddresses([ ...filterAddresses, { value: addressInput.trim(), isExact: false } ]); } }; const onFilterSelect = (event: any) => { setFilterValue(event.target.value); setFilterIsExpanded(!filterIsExpanded); }; const onRoleSelect = (event: any) => { setFilterRole(event.target.value); setRoleIsExpanded(!roleIsExpanded); }; const onNameSelectToggle = () => { setIsSelectNameExpanded(!isSelectNameExpanded); }; const onAddressSelectToggle = () => { setIsSelectAddressExpanded(!isSelectAddressExpanded); }; const onChangeNameData = async (value: string) => { setNameOptions(undefined); if (value.trim().length < TYPEAHEAD_REQUIRED_LENGTH) { setNameOptions([]); return; } const response = await client.query<IConnectionLinksNameSearchResponse>({ query: RETURN_ALL_CONNECTION_LINKS_FOR_NAME_SEARCH( connectionName, namespace, value.trim() ), fetchPolicy: FetchPolicy.NETWORK_ONLY }); if ( response && response.data && response.data.connections && response.data.connections.connections && response.data.connections.connections.length > 0 && response.data.connections.connections[0].links && response.data.connections.connections[0].links.links && response.data.connections.connections[0].links.links.length > 0 ) { const obtainedList = response.data.connections.connections[0].links.links.map( (link: any) => { return link.metadata.name; } ); //get list of unique records to display in the select dropdown based on total records and 100 fetched objects const uniqueList = getSelectOptionList( obtainedList, response.data.connections.connections[0].links.total ); if (uniqueList.length > 0) setNameOptions(uniqueList); } }; const onNameSelectFilterChange = (e: React.ChangeEvent<HTMLInputElement>) => { setNameInput(e.target.value); onChangeNameData(e.target.value); const options: React.ReactElement[] = nameOptions ? nameOptions.map((option, index) => ( <SelectOption key={index} value={option} /> )) : []; return options; }; const onChangeAddressData = async (value: string) => { setAddressOptions(undefined); if (value.trim().length < TYPEAHEAD_REQUIRED_LENGTH) { setAddressOptions([]); return; } const response = await client.query<IConnectionLinksAddressSearchResponse>({ query: RETURN_ALL_CONNECTION_LINKS_FOR_ADDRESS_SEARCH( connectionName, namespace, value.trim() ), fetchPolicy: FetchPolicy.NETWORK_ONLY }); if ( response && response.data && response.data.connections && response.data.connections.connections && response.data.connections.connections.length > 0 && response.data.connections.connections[0].links && response.data.connections.connections[0].links.links && response.data.connections.connections[0].links.links.length > 0 ) { const obtainedList = response.data.connections.connections[0].links.links.map( (link: any) => { return link.spec.address; } ); //get list of unique records to display in the select dropdown based on total records and 100 fetched objects const uniqueList = getSelectOptionList( obtainedList, response.data.connections.connections[0].links.total ); if (uniqueList.length > 0) setAddressOptions(uniqueList); } }; const onAddressSelectFilterChange = ( e: React.ChangeEvent<HTMLInputElement> ) => { setAddressInput(e.target.value); onChangeAddressData(e.target.value); const options: React.ReactElement[] = addressOptions ? addressOptions.map((option, index) => ( <SelectOption key={index} value={option} /> )) : []; return options; }; const onNameSelect = (event: any, selection: string | SelectOptionObject) => { setNameSelected(selection.toString()); setIsSelectNameExpanded(false); }; const onAddressSelect = ( event: any, selection: string | SelectOptionObject ) => { setAddressSelected(selection.toString()); setIsSelectAddressExpanded(false); }; const onDelete = ( type: string | DataToolbarChip, id: string | DataToolbarChip ) => { switch (type) { case "Name": if (filterNames && id) { let index = filterNames .map(filter => filter.value) .indexOf(id.toString()); if (index >= 0) filterNames.splice(index, 1); setFilterNames([...filterNames]); } break; case "Address": if (filterAddresses && id) { let index = filterAddresses .map(filter => filter.value) .indexOf(id.toString()); if (index >= 0) filterAddresses.splice(index, 1); setFilterAddresses([...filterAddresses]); } break; case "Role": setFilterRole(undefined); break; } }; const checkIsFilterApplied = () => { if ( (filterNames && filterNames.length > 0) || (filterAddresses && filterAddresses.length > 0) || (filterRole && filterRole.trim() !== "") ) { return true; } return false; }; const onDeleteAll = () => { setFilterValue("Name"); setFilterNames([]); setFilterAddresses([]); setFilterRole(undefined); }; const toggleGroupItems = ( <> <DataToolbarGroup variant="filter-group"> <DataToolbarFilter categoryName="Filter"> <Dropdown id="cl-filter-dropdown" position="left" onSelect={onFilterSelect} isOpen={filterIsExpanded} toggle={ <DropdownToggle onToggle={setFilterIsExpanded}> <FilterIcon /> &nbsp; {filterValue} </DropdownToggle> } dropdownItems={filterMenuItems.map(option => ( <DropdownItem id={`cl-filter-dropdown-item${option.key}`} key={option.key} value={option.value} itemID={option.key} component={"button"} > {option.value} </DropdownItem> ))} /> </DataToolbarFilter> <> <DataToolbarItem> <DataToolbarFilter chips={filterNames.map(filter => filter.value)} deleteChip={onDelete} categoryName="Name" > {filterValue && filterValue === "Name" && ( <InputGroup> <Select id="cl-filter-select-name" variant={SelectVariant.typeahead} aria-label="Select a Name" onToggle={onNameSelectToggle} onSelect={onNameSelect} onClear={() => { setNameSelected(undefined); setIsSelectNameExpanded(false); }} maxHeight="200px" selections={nameSelected} onFilter={onNameSelectFilterChange} isExpanded={isSelectNameExpanded} ariaLabelledBy={"typeahead-select-id"} placeholderText="Select name" isDisabled={false} isCreatable={false} > {nameOptions && nameOptions.length > 0 ? ( nameOptions.map((option, index) => ( <SelectOption key={index} value={option.value} isDisabled={option.isDisabled} /> )) ) : nameInput.trim().length < TYPEAHEAD_REQUIRED_LENGTH ? ( <SelectOption key={"invalid-input-length"} value={TypeAheadMessage.MORE_CHAR_REQUIRED} disabled={true} /> ) : ( <SelectOption key={"no-results-found"} value={TypeAheadMessage.NO_RESULT_FOUND} disabled={true} /> )} {/* {} */} </Select> <Button id="cl-filter-search-name" variant={ButtonVariant.control} aria-label="search button for search name" onClick={onClickSearchIcon} > <SearchIcon /> </Button> </InputGroup> )} </DataToolbarFilter> </DataToolbarItem> <DataToolbarItem> <DataToolbarFilter chips={filterAddresses.map(filter => filter.value)} deleteChip={onDelete} categoryName="Address" > {filterValue && filterValue === "Address" && ( <InputGroup> <Select id="cl-filter-select-address" variant={SelectVariant.typeahead} aria-label="Select a Address" onToggle={onAddressSelectToggle} onSelect={onAddressSelect} onClear={() => { setAddressSelected(undefined); setIsSelectAddressExpanded(false); }} maxHeight="200px" selections={addressSelected} onFilter={onAddressSelectFilterChange} isExpanded={isSelectAddressExpanded} ariaLabelledBy={"typeahead-select-id"} placeholderText="Select Address" isDisabled={false} isCreatable={false} > {addressOptions && addressOptions.length > 0 ? ( addressOptions.map((option, index) => ( <SelectOption key={index} value={option.value} isDisabled={option.isDisabled} /> )) ) : addressInput.trim().length < TYPEAHEAD_REQUIRED_LENGTH ? ( <SelectOption key={"invalid-input-length"} value={TypeAheadMessage.MORE_CHAR_REQUIRED} disabled={true} /> ) : ( <SelectOption key={"no-results-found"} value={TypeAheadMessage.NO_RESULT_FOUND} disabled={true} /> )} {/* {} */} </Select> <Button id="cl-filter-search-address" variant={ButtonVariant.control} aria-label="search button for search address" onClick={onClickSearchIcon} > <SearchIcon /> </Button> </InputGroup> )} </DataToolbarFilter> </DataToolbarItem> <DataToolbarItem> <DataToolbarFilter chips={filterRole ? [filterRole] : []} deleteChip={onDelete} categoryName="Role" > {filterValue === "Role" && ( <Dropdown id="cl-filter-dropdown-role" position="left" onSelect={onRoleSelect} isOpen={roleIsExpanded} toggle={ <DropdownToggle onToggle={setRoleIsExpanded}> {filterRole || "Select Role"} </DropdownToggle> } dropdownItems={roleMenuItems.map(option => ( <DropdownItem id={`cl-filter-dropdown-role${option.key}`} key={option.key} value={option.value} itemID={option.key} component={"button"} > {option.value} </DropdownItem> ))} /> )} </DataToolbarFilter> </DataToolbarItem> </> </DataToolbarGroup> </> ); return ( <DataToolbar id="data-toolbar-with-filter" className="pf-m-toggle-group-container" collapseListedFiltersBreakpoint="xl" clearAllFilters={onDeleteAll} > <DataToolbarContent> <DataToolbarToggleGroup toggleIcon={ <> <FilterIcon /> {checkIsFilterApplied() && ( <Badge key={1} isRead> {totalLinks} </Badge> )} </> } breakpoint="xl" > {toggleGroupItems} </DataToolbarToggleGroup> {width < 769 && ( <SortForMobileView sortMenu={sortMenuItems} sortValue={sortValue} setSortValue={setSortValue} /> )} </DataToolbarContent> </DataToolbar> ); };
]; const onClickSearchIcon = (event: any) => { if (filterValue && filterValue === "Name") {
type_system_node.rs
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ use interner::StringKey; use std::fmt; #[derive(PartialEq, Debug, Ord, PartialOrd, Eq, Clone)] pub enum Type { Named(StringKey), List(Box<Type>), NonNull(Box<Type>), } #[derive(PartialEq, Debug)] pub enum TypeSystemDefinition { SchemaDefinition { directives: Vec<Directive>, operation_types: Vec<OperationTypeDefinition>, }, ObjectTypeExtension { name: StringKey, interfaces: Vec<StringKey>, fields: Vec<FieldDefinition>, directives: Vec<Directive>, }, ObjectTypeDefinition { name: StringKey, interfaces: Vec<StringKey>, fields: Vec<FieldDefinition>, directives: Vec<Directive>, }, InterfaceTypeDefinition { name: StringKey, directives: Vec<Directive>, fields: Vec<FieldDefinition>, }, InterfaceTypeExtension { name: StringKey, directives: Vec<Directive>, fields: Vec<FieldDefinition>, }, UnionTypeDefinition { name: StringKey, directives: Vec<Directive>, members: Vec<StringKey>, }, DirectiveDefinition { name: StringKey, arguments: Vec<InputValueDefinition>, repeatable: bool, locations: Vec<DirectiveLocation>, }, InputObjectTypeDefinition { name: StringKey, directives: Vec<Directive>, fields: Vec<InputValueDefinition>, }, EnumTypeDefinition { name: StringKey, directives: Vec<Directive>, values: Vec<EnumValueDefinition>, }, ScalarTypeDefinition { name: StringKey, directives: Vec<Directive>, }, } #[derive(PartialEq, Debug)] pub struct OperationTypeDefinition { pub operation: OperationType, pub type_: StringKey, } #[derive(PartialEq, Debug)] pub struct EnumValueDefinition { pub name: StringKey, pub directives: Vec<Directive>, } #[derive(PartialEq, Eq, Hash, Debug, Clone, Copy)] pub enum DirectiveLocation { Query, Mutation, Subscription, Field, FragmentDefinition, FragmentSpread, InlineFragment, Schema, Scalar, Object, FieldDefinition, ArgumentDefinition, Interface, Union, Enum, EnumValue, InputObject, InputFieldDefinition, VariableDefinition, } impl fmt::Display for DirectiveLocation { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { DirectiveLocation::Query => f.write_fmt(format_args!("QUERY")), DirectiveLocation::Mutation => f.write_fmt(format_args!("MUTATION")), DirectiveLocation::Subscription => f.write_fmt(format_args!("SUBSCRIPTION")), DirectiveLocation::Field => f.write_fmt(format_args!("FIELD")), DirectiveLocation::FragmentDefinition => { f.write_fmt(format_args!("FRAGMENT_DEFINITION")) } DirectiveLocation::FragmentSpread => f.write_fmt(format_args!("FRAGMENT_SPREAD")), DirectiveLocation::InlineFragment => f.write_fmt(format_args!("INLINE_FRAGMENT")), DirectiveLocation::Schema => f.write_fmt(format_args!("SCHEMA")), DirectiveLocation::Scalar => f.write_fmt(format_args!("SCALAR")), DirectiveLocation::Object => f.write_fmt(format_args!("OBJECT")), DirectiveLocation::FieldDefinition => f.write_fmt(format_args!("FIELD_DEFINITION")), DirectiveLocation::ArgumentDefinition => { f.write_fmt(format_args!("ARGUMENT_DEFINITION")) } DirectiveLocation::Interface => f.write_fmt(format_args!("INTERFACE")), DirectiveLocation::Union => f.write_fmt(format_args!("UNION")), DirectiveLocation::Enum => f.write_fmt(format_args!("ENUM")), DirectiveLocation::EnumValue => f.write_fmt(format_args!("ENUM_VALUE")), DirectiveLocation::InputObject => f.write_fmt(format_args!("INPUT_OBJECT")), DirectiveLocation::InputFieldDefinition => { f.write_fmt(format_args!("INPUT_FIELD_DEFINITION")) } DirectiveLocation::VariableDefinition => { f.write_fmt(format_args!("VARIABLE_DEFINITION")) } } } } #[derive(PartialEq, Debug, Copy, Clone)] pub enum OperationType { Query, Mutation, Subscription, } #[derive(PartialEq, Debug)] pub struct
{ pub name: StringKey, pub type_: Type, pub default_value: Option<Value>, pub directives: Vec<Directive>, } #[derive(PartialEq, Debug)] pub struct FieldDefinition { pub name: StringKey, pub type_: Type, pub arguments: Vec<InputValueDefinition>, pub directives: Vec<Directive>, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] pub enum Value { Null, String(String), Boolean(bool), Enum(StringKey), Int(String), Float(String), List(ListValue), Object(ObjectValue), } impl fmt::Display for Value { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Value::String(value) => f.write_fmt(format_args!("\"{}\"", value)), Value::Int(value) | Value::Float(value) => f.write_fmt(format_args!("{}", value)), Value::Boolean(value) => f.write_fmt(format_args!("{}", value)), Value::Enum(value) => f.write_fmt(format_args!("{}", value.lookup())), Value::List(value) => f.write_fmt(format_args!("{}", value)), Value::Object(value) => f.write_fmt(format_args!("{}", value)), _null => f.write_fmt(format_args!("UNKNOWN")), } } } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] pub struct ListValue { pub values: Vec<Value>, } impl fmt::Display for ListValue { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "[")?; let mut first = true; for item in &self.values { if first { first = false; } else { write!(f, ", ")?; } write!(f, "{}", item)?; } write!(f, "]") } } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] pub struct ObjectValue { pub fields: Vec<ObjectField>, } impl fmt::Display for ObjectValue { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{{")?; let mut first = true; for field in &self.fields { if first { first = false; } else { write!(f, ", ")?; } write!(f, "{}: {}", field.name, field.value)?; } write!(f, "}}") } } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] pub struct ObjectField { pub name: StringKey, pub value: Value, } #[derive(PartialEq, Debug)] pub struct Argument { pub name: StringKey, pub value: Value, } #[derive(PartialEq, Debug)] pub struct Directive { pub name: StringKey, pub arguments: Vec<Argument>, }
InputValueDefinition
auth.guard.ts
import { Injectable } from '@angular/core'; import { Router, CanActivate, ActivatedRouteSnapshot, RouterStateSnapshot } from '@angular/router'; import * as moment from 'moment'; @Injectable({ providedIn: 'root' }) export class AuthGuard implements CanActivate { constructor( private router: Router) { } canActivate(route: ActivatedRouteSnapshot, state: RouterStateSnapshot) { if (this.isLoggedIn()) { return true; } else { this.router.navigate(['/user-login']); return false; }
return moment().isBefore(day); } }
} isLoggedIn() { const day = moment.unix(Number(localStorage.getItem('exp')));
helpers.go
package filters import ( "context" "github.com/Peripli/service-manager/pkg/query" "github.com/Peripli/service-manager/pkg/types" "github.com/Peripli/service-manager/storage" ) func brokersCriteria(ctx context.Context, repository storage.Repository, servicesQuery *query.Criterion) (*query.Criterion, error) { objectList, err := repository.List(ctx, types.ServiceOfferingType, *servicesQuery) if err != nil { return nil, err } services := objectList.(*types.ServiceOfferings) if services.Len() < 1 { return nil, nil } brokerIDs := make([]string, 0, services.Len()) for _, p := range services.ServiceOfferings { brokerIDs = append(brokerIDs, p.BrokerID) } c := query.ByField(query.InOperator, "id", brokerIDs...) return &c, nil } func
(ctx context.Context, repository storage.Repository, planQuery *query.Criterion) (*query.Criterion, error) { objectList, err := repository.List(ctx, types.ServicePlanType, *planQuery) if err != nil { return nil, err } plans := objectList.(*types.ServicePlans) if plans.Len() < 1 { return nil, nil } serviceIDs := make([]string, 0, plans.Len()) for _, p := range plans.ServicePlans { serviceIDs = append(serviceIDs, p.ServiceOfferingID) } c := query.ByField(query.InOperator, "id", serviceIDs...) return &c, nil } func plansCriteria(ctx context.Context, repository storage.Repository, platformID string) (*query.Criterion, error) { objectList, err := repository.List(ctx, types.VisibilityType, query.ByField(query.EqualsOrNilOperator, "platform_id", platformID)) if err != nil { return nil, err } visibilityList := objectList.(*types.Visibilities) if visibilityList.Len() < 1 { return nil, nil } planIDs := make([]string, 0, visibilityList.Len()) for _, vis := range visibilityList.Visibilities { planIDs = append(planIDs, vis.ServicePlanID) } c := query.ByField(query.InOperator, "id", planIDs...) return &c, nil }
servicesCriteria
attribs.go
// Copyright 2020 Kuei-chun Chen. All rights reserved. package analytics
"math" "strings" "time" "github.com/simagix/keyhole/ftdc" ) // Attribs stores attribs map type Attribs struct { attribsMap *map[string][]uint64 } // NewAttribs returns Attribs structure func NewAttribs(attribsMap *map[string][]uint64) *Attribs { return &Attribs{attribsMap: attribsMap} } // GetServerStatusDataPoints returns server status func (attr *Attribs) GetServerStatusDataPoints(i int) ServerStatusDoc { ss := ServerStatusDoc{} ss.LocalTime = time.Unix(0, int64(time.Millisecond)*int64(attr.get("serverStatus/localTime", i))) ss.Mem.Resident = attr.get("serverStatus/mem/resident", i) ss.Mem.Virtual = attr.get("serverStatus/mem/virtual", i) ss.Network.BytesIn = attr.get("serverStatus/network/bytesIn", i) ss.Network.BytesOut = attr.get("serverStatus/network/bytesOut", i) ss.Network.NumRequests = attr.get("serverStatus/network/numRequests", i) ss.Network.PhysicalBytesIn = attr.get("serverStatus/network/physicalBytesIn", i) ss.Network.PhysicalBytesOut = attr.get("serverStatus/network/physicalBytesOut", i) ss.Connections.Current = attr.get("serverStatus/connections/current", i) ss.Connections.TotalCreated = attr.get("serverStatus/connections/totalCreated", i) ss.Connections.Available = attr.get("serverStatus/connections/available", i) ss.Connections.Active = attr.get("serverStatus/connections/active", i) ss.ExtraInfo.PageFaults = attr.get("serverStatus/extra_info/page_faults", i) ss.GlobalLock.ActiveClients.Readers = attr.get("serverStatus/globalLock/activeClients/readers", i) ss.GlobalLock.ActiveClients.Writers = attr.get("serverStatus/globalLock/activeClients/writers", i) ss.GlobalLock.CurrentQueue.Readers = attr.get("serverStatus/globalLock/currentQueue/readers", i) ss.GlobalLock.CurrentQueue.Writers = attr.get("serverStatus/globalLock/currentQueue/writers", i) ss.Metrics.QueryExecutor.Scanned = attr.get("serverStatus/metrics/queryExecutor/scanned", i) ss.Metrics.QueryExecutor.ScannedObjects = attr.get("serverStatus/metrics/queryExecutor/scannedObjects", i) ss.Metrics.Operation.ScanAndOrder = attr.get("serverStatus/metrics/operation/scanAndOrder", i) ss.OpLatencies.Commands.Latency = attr.get("serverStatus/opLatencies/commands/latency", i) ss.OpLatencies.Commands.Ops = attr.get("serverStatus/opLatencies/commands/ops", i) ss.OpLatencies.Reads.Latency = attr.get("serverStatus/opLatencies/reads/latency", i) ss.OpLatencies.Reads.Ops = attr.get("serverStatus/opLatencies/reads/ops", i) ss.OpLatencies.Writes.Latency = attr.get("serverStatus/opLatencies/writes/latency", i) ss.OpLatencies.Writes.Ops = attr.get("serverStatus/opLatencies/writes/ops", i) ss.OpCounters.Command = attr.get("serverStatus/opcounters/command", i) ss.OpCounters.Delete = attr.get("serverStatus/opcounters/delete", i) ss.OpCounters.Getmore = attr.get("serverStatus/opcounters/getmore", i) ss.OpCounters.Insert = attr.get("serverStatus/opcounters/insert", i) ss.OpCounters.Query = attr.get("serverStatus/opcounters/query", i) ss.OpCounters.Update = attr.get("serverStatus/opcounters/update", i) ss.Uptime = attr.get("serverStatus/uptime", i) ss.WiredTiger.BlockManager.BytesRead = attr.get("serverStatus/wiredTiger/block-manager/bytes read", i) ss.WiredTiger.BlockManager.BytesWritten = attr.get("serverStatus/wiredTiger/block-manager/bytes written", i) ss.WiredTiger.BlockManager.BytesWrittenCheckPoint = attr.get("serverStatus/wiredTiger/block-manager/bytes written for checkpoint", i) ss.WiredTiger.Cache.CurrentlyInCache = attr.get("serverStatus/wiredTiger/cache/bytes currently in the cache", i) ss.WiredTiger.Cache.MaxBytesConfigured = attr.get("serverStatus/wiredTiger/cache/maximum bytes configured", i) ss.WiredTiger.Cache.ModifiedPagesEvicted = attr.get("serverStatus/wiredTiger/cache/modified pages evicted", i) ss.WiredTiger.Cache.BytesReadIntoCache = attr.get("serverStatus/wiredTiger/cache/bytes read into cache", i) ss.WiredTiger.Cache.BytesWrittenFromCache = attr.get("serverStatus/wiredTiger/cache/bytes written from cache", i) ss.WiredTiger.Cache.TrackedDirtyBytes = attr.get("serverStatus/wiredTiger/cache/tracked dirty bytes in the cache", i) ss.WiredTiger.Cache.UnmodifiedPagesEvicted = attr.get("serverStatus/wiredTiger/cache/unmodified pages evicted", i) ss.WiredTiger.DataHandle.Active = attr.get("serverStatus/wiredTiger/data-handle/connection data handles currently active", i) ss.WiredTiger.ConcurrentTransactions.Read.Available = attr.get("serverStatus/wiredTiger/concurrentTransactions/read/available", i) ss.WiredTiger.ConcurrentTransactions.Write.Available = attr.get("serverStatus/wiredTiger/concurrentTransactions/write/available", i) return ss } // GetSystemMetricsDataPoints returns system metrics func (attr *Attribs) GetSystemMetricsDataPoints(i int) SystemMetricsDoc { attribs := *attr.attribsMap sm := SystemMetricsDoc{Disks: map[string]DiskMetrics{}} sm.Start = time.Unix(0, int64(time.Millisecond)*int64(attr.get("serverStatus/localTime", i))) sm.CPU.IdleMS = attr.get("systemMetrics/cpu/idle_ms", i) sm.CPU.UserMS = attr.get("systemMetrics/cpu/user_ms", i) sm.CPU.IOWaitMS = attr.get("systemMetrics/cpu/iowait_ms", i) sm.CPU.NiceMS = attr.get("systemMetrics/cpu/nice_ms", i) sm.CPU.SoftirqMS = attr.get("systemMetrics/cpu/softirq_ms", i) sm.CPU.StealMS = attr.get("systemMetrics/cpu/steal_ms", i) sm.CPU.SystemMS = attr.get("systemMetrics/cpu/system_ms", i) for key := range attribs { if strings.Index(key, "systemMetrics/disks/") != 0 { continue } tokens := strings.Split(key, ftdc.PathSeparator) disk := tokens[2] stats := tokens[3] if _, ok := sm.Disks[disk]; !ok { sm.Disks[disk] = DiskMetrics{} } m := sm.Disks[disk] switch stats { case "read_time_ms": m.ReadTimeMS = attr.get(key, i) case "write_time_ms": m.WriteTimeMS = attr.get(key, i) case "io_queued_ms": m.IOQueuedMS = attr.get(key, i) case "io_time_ms": m.IOTimeMS = attr.get(key, i) case "reads": m.Reads = attr.get(key, i) case "writes": m.Writes = attr.get(key, i) case "io_in_progress": m.IOInProgress = attr.get(key, i) } sm.Disks[disk] = m } return sm } func (attr *Attribs) get(key string, i int) uint64 { arr := (*attr.attribsMap)[key] if i < len(arr) && math.IsNaN(float64(arr[i])) == false { return arr[i] } return 0 }
import (
assetsListTest.js
const {both} = require('../Utils/Ethereum'); const { makeComptroller, makeCToken } = require('../Utils/Compound');
describe('assetListTest', () => { let root, customer, accounts; let comptroller; let allTokens, OMG, ZRX, BAT, REP, DAI, SKT; beforeEach(async () => { [root, customer, ...accounts] = saddle.accounts; comptroller = await makeComptroller({maxAssets: 10}); allTokens = [OMG, ZRX, BAT, REP, DAI, SKT] = await Promise.all( ['OMG', 'ZRX', 'BAT', 'REP', 'DAI', 'sketch'] .map(async (name) => makeCToken({comptroller, name, symbol: name, supportMarket: name != 'sketch', underlyingPrice: 0.5})) ); }); async function checkMarkets(expectedTokens) { for (let token of allTokens) { const isExpected = expectedTokens.some(e => e.symbol == token.symbol); expect(await call(comptroller, 'checkMembership', [customer, token._address])).toEqual(isExpected); } } async function enterAndCheckMarkets(enterTokens, expectedTokens, expectedErrors = null) { const {reply, receipt} = await both(comptroller, 'enterMarkets', [enterTokens.map(t => t._address)], {from: customer}); const assetsIn = await call(comptroller, 'getAssetsIn', [customer]); expectedErrors = expectedErrors || enterTokens.map(_ => 'NO_ERROR'); reply.forEach((tokenReply, i) => { expect(tokenReply).toHaveTrollError(expectedErrors[i]); }); expect(receipt).toSucceed(); expect(assetsIn).toEqual(expectedTokens.map(t => t._address)); await checkMarkets(expectedTokens); return receipt; }; async function exitAndCheckMarkets(exitToken, expectedTokens, expectedError = 'NO_ERROR') { const {reply, receipt} = await both(comptroller, 'exitMarket', [exitToken._address], {from: customer}); const assetsIn = await call(comptroller, 'getAssetsIn', [customer]); expect(reply).toHaveTrollError(expectedError); //assert.trollSuccess(receipt); XXX enterMarkets cannot fail, but exitMarket can - kind of confusing expect(assetsIn).toEqual(expectedTokens.map(t => t._address)); await checkMarkets(expectedTokens); return receipt; }; describe('enterMarkets', () => { it("properly emits events", async () => { const result1 = await enterAndCheckMarkets([OMG], [OMG]); const result2 = await enterAndCheckMarkets([OMG], [OMG]); expect(result1).toHaveLog('MarketEntered', { cToken: OMG._address, account: customer }); expect(result2.events).toEqual({}); }); it("adds to the asset list only once", async () => { await enterAndCheckMarkets([OMG], [OMG]); await enterAndCheckMarkets([OMG], [OMG]); await enterAndCheckMarkets([ZRX, BAT, OMG], [OMG, ZRX, BAT]); await enterAndCheckMarkets([ZRX, OMG], [OMG, ZRX, BAT]); await enterAndCheckMarkets([ZRX], [OMG, ZRX, BAT]); await enterAndCheckMarkets([OMG], [OMG, ZRX, BAT]); await enterAndCheckMarkets([ZRX], [OMG, ZRX, BAT]); await enterAndCheckMarkets([BAT], [OMG, ZRX, BAT]); }); it("the market must be listed for add to succeed", async () => { await enterAndCheckMarkets([SKT], [], ['MARKET_NOT_LISTED']); await send(comptroller, '_supportMarket', [SKT._address]); await enterAndCheckMarkets([SKT], [SKT]); }); it("returns a list of codes mapping to user's ultimate membership in given addresses", async () => { await enterAndCheckMarkets([OMG, ZRX, BAT], [OMG, ZRX, BAT], ['NO_ERROR', 'NO_ERROR', 'NO_ERROR'], "success if can enter markets"); await enterAndCheckMarkets([OMG, SKT], [OMG, ZRX, BAT], ['NO_ERROR', 'MARKET_NOT_LISTED'], "error for unlisted markets"); }); }); describe('exitMarket', () => { it("doesn't let you exit if you have a borrow balance", async () => { await enterAndCheckMarkets([OMG], [OMG]); await send(OMG, 'harnessSetAccountBorrows', [customer, 1, 1]); await exitAndCheckMarkets(OMG, [OMG], 'NONZERO_BORROW_BALANCE'); }); it("rejects unless redeem allowed", async () => { await enterAndCheckMarkets([OMG, BAT], [OMG, BAT]); await send(BAT, 'harnessSetAccountBorrows', [customer, 1, 1]); // BAT has a negative balance and there's no supply, thus account should be underwater await exitAndCheckMarkets(OMG, [OMG, BAT], 'REJECTION'); }); it("accepts when you're not in the market already", async () => { await enterAndCheckMarkets([OMG, BAT], [OMG, BAT]); // Not in ZRX, should exit fine await exitAndCheckMarkets(ZRX, [OMG, BAT], 'NO_ERROR'); }); it("properly removes when there's only one asset", async () => { await enterAndCheckMarkets([OMG], [OMG]); await exitAndCheckMarkets(OMG, [], 'NO_ERROR'); }); it("properly removes when there's only two assets, removing the first", async () => { await enterAndCheckMarkets([OMG, BAT], [OMG, BAT]); await exitAndCheckMarkets(OMG, [BAT], 'NO_ERROR'); }); it("properly removes when there's only two assets, removing the second", async () => { await enterAndCheckMarkets([OMG, BAT], [OMG, BAT]); await exitAndCheckMarkets(BAT, [OMG], 'NO_ERROR'); }); it("properly removes when there's only three assets, removing the first", async () => { await enterAndCheckMarkets([OMG, BAT, ZRX], [OMG, BAT, ZRX]); await exitAndCheckMarkets(OMG, [ZRX, BAT], 'NO_ERROR'); }); it("properly removes when there's only three assets, removing the second", async () => { await enterAndCheckMarkets([OMG, BAT, ZRX], [OMG, BAT, ZRX]); await exitAndCheckMarkets(BAT, [OMG, ZRX], 'NO_ERROR'); }); it("properly removes when there's only three assets, removing the third", async () => { await enterAndCheckMarkets([OMG, BAT, ZRX], [OMG, BAT, ZRX]); await exitAndCheckMarkets(ZRX, [OMG, BAT], 'NO_ERROR'); }); }); describe('entering from borrowAllowed', () => { it("enters when called by a ctoken", async () => { await send(BAT, 'harnessCallBorrowAllowed', [1], {from: customer}); const assetsIn = await call(comptroller, 'getAssetsIn', [customer]); expect([BAT._address]).toEqual(assetsIn); await checkMarkets([BAT]); }); it("reverts when called by not a ctoken", async () => { await expect( send(comptroller, 'borrowAllowed', [BAT._address, customer, 1], {from: customer}) ).rejects.toRevert('revert sender must be cToken'); const assetsIn = await call(comptroller, 'getAssetsIn', [customer]); expect([]).toEqual(assetsIn); await checkMarkets([]); }); it("adds to the asset list only once", async () => { await send(BAT, 'harnessCallBorrowAllowed', [1], {from: customer}); await enterAndCheckMarkets([BAT], [BAT]); await send(BAT, 'harnessCallBorrowAllowed', [1], {from: customer}); const assetsIn = await call(comptroller, 'getAssetsIn', [customer]); expect([BAT._address]).toEqual(assetsIn); }); }); });
_tools.py
# Authors: Pierre Ablin <[email protected]> # Alexandre Gramfort <[email protected]> # Jean-Francois Cardoso <[email protected]> # # License: BSD (3-clause) import numbers import numpy as np from scipy.linalg import expm def gradient(Y, psiY): ''' Compute the gradient for the current signals ''' _, T = Y.shape return np.inner(psiY, Y) / float(T) def proj_hessian_approx(Y, psidY_mean, G): ''' Computes the projected Hessian approximation. ''' N, _ = Y.shape diag = psidY_mean[:, None] * np.ones(N)[None, :] off_diag = np.diag(G) return 0.5 * (diag + diag.T - off_diag[:, None] - off_diag[None, :]) def regularize_hessian(h, l): ''' Clips the eigenvalues of h to l ''' h[h < l] = l return h def solve_hessian(G, h): ''' Returns the inverse Hessian times G ''' return G / h def loss(Y, signs, density): ''' Returns the loss function ''' output = 0. _, T = Y.shape for y, s in zip(Y, signs): output += s * np.mean(density.log_lik(y)) return output def
(G, h, s_list, y_list, r_list): q = G.copy() a_list = [] for s, y, r in zip(reversed(s_list), reversed(y_list), reversed(r_list)): alpha = r * np.sum(s * q) a_list.append(alpha) q -= alpha * y z = solve_hessian(q, h) for s, y, r, alpha in zip(s_list, y_list, r_list, reversed(a_list)): beta = r * np.sum(y * z) z += (alpha - beta) * s return -z def line_search(Y, signs, density, direction, current_loss, ls_tries): ''' Performs a backtracking line search, starting from Y and W, in the direction direction. ''' alpha = 1. if current_loss is None: current_loss = loss(Y, signs, density) for _ in range(ls_tries): Y_new = np.dot(expm(alpha * direction), Y) new_loss = loss(Y_new, signs, density) if new_loss < current_loss: return True, Y_new, new_loss, alpha alpha /= 2. else: return False, Y_new, new_loss, alpha def permute(A): '''Get a permutation to diagonalize and scale a matrix Parameters ---------- A : ndarray, shape (n_features, n_features) A matrix close from a permutation and scale matrix. Returns ------- A : ndarray, shape (n_features, n_features) A permuted matrix. ''' A = A.copy() n = A.shape[0] idx = np.arange(n) done = False while not done: done = True for i in range(n): for j in range(i): if A[i, i] ** 2 + A[j, j] ** 2 < A[i, j] ** 2 + A[j, i] ** 2: A[(i, j), :] = A[(j, i), :] idx[i], idx[j] = idx[j], idx[i] done = False A /= np.diag(A) order_sort = np.argsort(np.sum(np.abs(A), axis=0)) A = A[order_sort, :] A = A[:, order_sort] return A def check_random_state(seed): """Turn seed into a np.random.RandomState instance Parameters ---------- seed : None | int | instance of RandomState If seed is None, return the RandomState singleton used by np.random. If seed is an int, return a new RandomState instance seeded with seed. If seed is already a RandomState instance, return it. Otherwise raise ValueError. """ if seed is None or seed is np.random: return np.random.mtrand._rand if isinstance(seed, (numbers.Integral, np.integer)): return np.random.RandomState(seed) if isinstance(seed, np.random.RandomState): return seed raise ValueError('%r cannot be used to seed a numpy.random.RandomState' ' instance' % seed) def _sym_decorrelation(W): """ Symmetric decorrelation i.e. W <- (W * W.T) ^{-1/2} * W """ s, u = np.linalg.eigh(np.dot(W, W.T)) return np.dot(np.dot(u * (1. / np.sqrt(s)), u.T), W) def _ica_par(X, fun, max_iter, w_init, verbose): """Parallel FastICA. Used internally by FastICA --main loop """ if verbose: print('Running %d iterations of FastICA...' % max_iter) W = _sym_decorrelation(w_init) del w_init p_ = float(X.shape[1]) for ii in range(max_iter): gwtx, g_wtx = fun.score_and_der(np.dot(W, X)) g_wtx = g_wtx.mean(axis=1) W = _sym_decorrelation(np.dot(gwtx, X.T) / p_ - g_wtx[:, np.newaxis] * W) del gwtx, g_wtx if verbose: print('Running Picard...') return W
l_bfgs_direction
ccbc_nl.ts
<TS language="nl" version="2.1"> <context> <name>AddressBookPage</name> <message> <source>Right-click to edit address or label</source> <translation>Klik met de rechtermuisknop om het adres of label te wijzigen</translation> </message> <message> <source>Create a new address</source> <translation>Maak een nieuw adres aan</translation> </message> <message> <source>&amp;New</source> <translation>&amp;Nieuw</translation> </message> <message> <source>Copy the currently selected address to the system clipboard</source> <translation>Kopieer het geselecteerde adres naar het klembord</translation> </message> <message> <source>&amp;Copy</source> <translation>&amp;Kopiëren</translation> </message> <message> <source>Delete the currently selected address from the list</source> <translation>Verwijder het geselecteerde adres uit de lijst</translation> </message> <message> <source>&amp;Delete</source> <translation>&amp;Verwijderen</translation> </message> <message> <source>Export the data in the current tab to a file</source> <translation>Exporteer de data in de huidige tab naar een bestand</translation> </message> <message> <source>&amp;Export</source> <translation>&amp;Exporteren</translation> </message> <message> <source>C&amp;lose</source> <translation>&amp;Sluiten</translation> </message> <message> <source>Choose the address to send coins to</source> <translation>Kies het adres om naar te versturen</translation> </message> <message> <source>Choose the address to receive coins with</source> <translation>Kies het adres om op te ontvangen</translation> </message> <message> <source>C&amp;hoose</source> <translation>K&amp;iezen</translation> </message> <message> <source>Sending addresses</source> <translation>Verzendadressen</translation> </message> <message> <source>Receiving addresses</source> <translation>Ontvangstadressen</translation> </message> <message> <source>These are your Ccbc addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation>Dit zijn uw Ccbcadressen om betalingen mee te verzenden. Controleer altijd zowel het bedrag als het ontvangstadres voor het verzenden.</translation> </message> <message> <source>These are your Ccbc addresses for receiving payments. It is recommended to use a new receiving address for each transaction.</source> <translation>Dit zijn uw Ccbcadressen waarop betalingen kunnen worden ontvangen. Het wordt aangeraden om een nieuw ontvangstadres voor elke transactie te gebruiken.</translation> </message> <message> <source>&amp;Copy Address</source> <translation>&amp;Kopiëer Adres</translation> </message> <message> <source>Copy &amp;Label</source> <translation>Kopiëer &amp;Label</translation> </message> <message> <source>&amp;Edit</source> <translation>&amp;Bewerken</translation> </message> <message> <source>Export Address List</source> <translation>Exporteer Adreslijst</translation> </message> <message> <source>Comma separated file (*.csv)</source> <translation>Kommagescheiden bestand (*.csv)</translation> </message> <message> <source>Exporting Failed</source> <translation>Export Mislukt</translation> </message> <message> <source>There was an error trying to save the address list to %1. Please try again.</source> <translation>Een fout is opgetreden tijdens het opslaan van deze adreslijst naar %1. Probeer het nogmaals.</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <source>Label</source> <translation>Label</translation> </message> <message> <source>Address</source> <translation>Adres</translation> </message> <message> <source>(no label)</source> <translation>(geen label)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <source>Passphrase Dialog</source> <translation>Wachtwoorddialoog</translation> </message> <message> <source>Enter passphrase</source> <translation>Voer wachtwoord in</translation> </message> <message> <source>New passphrase</source> <translation>Nieuw wachtwoord</translation> </message> <message> <source>Repeat new passphrase</source> <translation>Herhaal nieuw wachtwoord</translation> </message> <message> <source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source> <translation>Dient om de triviale sendmoney uit te schakelen wanneer het OS account in gevaar is gebracht. Geeft geen echte veiligheid.</translation> </message> <message> <source>For anonymization and staking only</source> <translation>Alleen voor anonimisering en staking</translation> </message> <message> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;ten or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Voer een nieuw wachtwoord in voor uw portemonnee.&lt;br/&gt;Gebruik een wachtwoord van &lt;b&gt;tien of meer willekeurige karakters&lt;/b&gt;, of &lt;b&gt;acht of meer woorden&lt;/b&gt;.</translation> </message> <message> <source>Encrypt wallet</source> <translation>Versleutel portemonnee</translation> </message> <message> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>Deze operatie vereist uw portemonneewachtwoord om de portemonnee te openen.</translation> </message> <message> <source>Unlock wallet</source> <translation>Open portemonnee</translation> </message> <message> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>Deze operatie vereist uw portemonneewachtwoord om de portemonnee te ontsleutelen</translation> </message> <message> <source>Decrypt wallet</source> <translation>Ontsleutel portemonnee</translation> </message> <message> <source>Change passphrase</source> <translation>Wijzig wachtwoord</translation> </message> <message> <source>Enter the old and new passphrase to the wallet.</source> <translation>Voer het oude en nieuwe wachtwoord voor uw portemonnee in.</translation> </message> <message> <source>Confirm wallet encryption</source> <translation>Bevestig versleuteling van de portemonnee</translation> </message> <message> <source>Ccbc will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your CCBCs from being stolen by malware infecting your computer.</source> <translation>Ccbc zal nu afsluiten om het versleutelingsproces te voltooien. Onthoud dat het versleutelen van uw portemonnee u niet volledig kan beschermen: Malware kan uw computer infecteren en uw CCBCs stelen.</translation> </message> <message> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Weet u zeker dat u uw portemonnee wilt versleutelen?</translation> </message> <message> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR CCBC&lt;/b&gt;!</source> <translation>Waarschuwing: Als u uw portemonnee versleutelt en uw wachtwoord vergeet, zult u &lt;b&gt;AL UW CCBC VERLIEZEN&lt;/b&gt;!</translation> </message> <message> <source>Wallet encrypted</source> <translation>Portemonnee versleuteld</translation> </message> <message> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation>BELANGRIJK: Elke eerder gemaakte backup van uw portemonneebestand dient te worden vervangen door het nieuw gegenereerde, versleutelde portemonneebestand. Om veiligheidsredenen zullen eerdere backups van het niet-versleutelde portemonneebestand onbruikbaar worden zodra u uw nieuwe, versleutelde, portemonnee begint te gebruiken.</translation> </message> <message> <source>Wallet encryption failed</source> <translation>Portemonneeversleuteling mislukt</translation> </message> <message> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>Portemonneeversleuteling mislukt door een interne fout. Uw portemonnee is niet versleuteld.</translation> </message> <message> <source>The supplied passphrases do not match.</source> <translation>De opgegeven wachtwoorden komen niet overeen.</translation> </message> <message> <source>Wallet unlock failed</source> <translation>Portemonnee openen mislukt</translation> </message> <message> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>Het opgegeven wachtwoord voor de portemonnee-ontsleuteling is niet correct.</translation> </message> <message> <source>Wallet decryption failed</source> <translation>Portemonnee-ontsleuteling mislukt</translation> </message> <message> <source>Wallet passphrase was successfully changed.</source> <translation>Portemonneewachtwoord is met succes gewijzigd.</translation> </message> <message> <source>Warning: The Caps Lock key is on!</source> <translation>Waarschuwing: De Caps-Lock-toets staat aan!</translation> </message> </context> <context> <name>Bip38ToolDialog</name> <message> <source>BIP 38 Tool</source> <translation>BIP 38 Tool</translation> </message> <message> <source>&amp;BIP 38 Encrypt</source> <translation>&amp;BIP 38 Versleuteling</translation> </message> <message> <source>Enter a Ccbc Address that you would like to encrypt using BIP 38. Enter a passphrase in the middle box. Press encrypt to compute the encrypted private key.</source> <translation>Voer een Ccbc adres in dat u wilt versleutelen met BIP 38. Voer een wachtwoord in, in het middelste veld. Druk op versleutelen om de versleutelde geheime sleutel te berekenen.</translation> </message> <message> <source>Address:</source> <translation>Adres:</translation> </message> <message> <source>The Ccbc address to sign the message with</source> <translation>Het Ccbc adres om het bericht met te ondertekenen</translation> </message> <message> <source>Choose previously used address</source> <translation>Kies een eerder gebruikt adres</translation> </message> <message> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <source>Paste address from clipboard</source> <translation>Plak adres vanaf klembord</translation> </message> <message> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <source>Passphrase: </source> <translation>Wachtwoord:</translation> </message> <message> <source>Encrypted Key:</source> <translation>Versleutelde sleutel:</translation> </message> <message> <source>Copy the current signature to the system clipboard</source> <translation>Kopieer de huidige handtekening naar het systeemklembord</translation> </message> <message> <source>Sign the message to prove you own this Ccbc address</source> <translation>Onderteken het bericht om te bewijzen dat u het Ccbc adres bezit</translation> </message> <message> <source>Encrypt &amp;Key</source> <translation>Versleutel &amp;Sleutel</translation> </message> <message> <source>Reset all sign message fields</source> <translation>Maak alle ondertekenvelden leeg</translation> </message> <message> <source>Clear &amp;All</source> <translation>Verwijder &amp;Alles</translation> </message> <message> <source>&amp;BIP 38 Decrypt</source> <translation>&amp;BIP 38 ontsleuteling</translation> </message> <message> <source>Enter the BIP 38 encrypted private key. Enter the passphrase in the middle box. Click Decrypt Key to compute the private key. After the key is decrypted, clicking 'Import Address' will add this private key to the wallet.</source> <translation>Voer de BIP 38 versleutelde geheime sleutel in. Voer het wachtwoord in, in het middelste veld. Druk op Ontsleutelen Sleutel om de geheime sleutel te berekenen. Nadat de geheime sleutel is ontsleuteld, zal het klikken op 'Adres Importeren' de geheime sleutel toevoegen aan de portemonnee.</translation> </message> <message> <source>The Ccbc address the message was signed with</source> <translation>Het Ccbc adres waarmee het bericht was ondertekend</translation> </message> <message> <source>Verify the message to ensure it was signed with the specified Ccbc address</source> <translation>Controleer een bericht om te verifiëren dat het ondertekend is door het gespecificeerde Ccbc adres</translation> </message> <message> <source>Decrypt &amp;Key</source> <translation>Ontsleutelen Sleutel</translation> </message> <message> <source>Reset all verify message fields</source> <translation>Maak alle verifiëren van het bericht velden leeg</translation> </message> <message> <source>Decrypted Key:</source> <translation>Ontsleutelde sleutel:</translation> </message> <message> <source>Import Address</source> <translation>Adres Importeren</translation> </message> <message> <source>Click "Decrypt Key" to compute key</source> <translation>Klik "Ontsleutelen Sleutel" om de sleutel te berekenen</translation> </message> <message> <source>The entered passphrase is invalid. </source> <translation>Het ingevoerde wachtwoord is ongeldig.</translation> </message> <message> <source>Allowed: 0-9,a-z,A-Z,</source> <translation>Toegestaan: 0-9,a-z,A-Z</translation> </message> <message> <source>The entered address is invalid.</source> <translation>Het ingevoerde adres is ongeldig.</translation> </message> <message> <source>Please check the address and try again.</source> <translation>Controleer het adres en probeer het opnieuw.</translation> </message> <message> <source>The entered address does not refer to a key.</source> <translation>Het opgegeven adres verwijst niet naar een sleutel.</translation> </message> <message> <source>Wallet unlock was cancelled.</source> <translation>Portemonnee-ontsleuteling is geannuleerd.</translation> </message> <message> <source>Private key for the entered address is not available.</source> <translation>Geheime sleutel voor het ingevoerde adres is niet beschikbaar.</translation> </message> <message> <source>Failed to decrypt.</source> <translation>Ontsleutelen mislukt.</translation> </message> <message> <source>Please check the key and passphrase and try again.</source> <translation>Controleer de sleutel en het wachtwoord en probeer het opnieuw.</translation> </message> <message> <source>Data Not Valid.</source> <translation>Gegevens ongeldig.</translation> </message> <message> <source>Please try again.</source> <translation>Probeer het opnieuw.</translation> </message> <message> <source>Please wait while key is imported</source> <translation>Wacht tot de sleutel is geïmporteerd</translation> </message> <message> <source>Key Already Held By Wallet</source> <translation>Sleutel al aanwezig in portemonnee</translation> </message> <message> <source>Error Adding Key To Wallet</source> <translation>Fout bij het toevoegen van de sleutel</translation> </message> <message> <source>Successfully Added Private Key To Wallet</source> <translation>De geheime sleutel is toegevoegd aan de portemonnee</translation> </message> </context> <context> <name>BitcoinGUI</name> <message> <source>Wallet</source> <translation>Portemonnee</translation> </message> <message> <source>Node</source> <translation>Node</translation> </message> <message> <source>&amp;Overview</source> <translation>&amp;Overzicht</translation> </message> <message> <source>Show general overview of wallet</source> <translation>Toon algemeen overzicht van de portemonnee</translation> </message> <message> <source>&amp;Send</source> <translation>&amp;Verzenden</translation> </message> <message> <source>&amp;Receive</source> <translation>&amp;Ontvangen</translation> </message> <message> <source>&amp;Transactions</source> <translation>&amp;Transacties</translation> </message> <message> <source>Browse transaction history</source> <translation>Blader door transactiegescheidenis</translation> </message> <message> <source>E&amp;xit</source> <translation>&amp;Sluiten</translation> </message> <message> <source>Quit application</source> <translation>Programma afsluiten</translation> </message> <message> <source>About &amp;Qt</source> <translation>Over &amp;Qt</translation> </message> <message> <source>Show information about Qt</source> <translation>Toon informatie over Qt</translation> </message> <message> <source>&amp;Options...</source> <translation>&amp;Opties...</translation> </message> <message> <source>&amp;Show / Hide</source> <translation>&amp;Tonen / Verbergen</translation> </message> <message> <source>Show or hide the main Window</source> <translation>Toon of verberg het hoofdvenster</translation> </message> <message> <source>&amp;Encrypt Wallet...</source> <translation>Versleutel portemonnee</translation> </message> <message> <source>Encrypt the private keys that belong to your wallet</source> <translation>Versleutel de geheime sleutels die bij uw portemonnee horen</translation> </message> <message> <source>&amp;Backup Wallet...</source> <translation>Backup Portemonnee...</translation> </message> <message> <source>Backup wallet to another location</source> <translation>Backup portemonnee naar een andere locatie</translation> </message> <message> <source>&amp;Change Passphrase...</source> <translation>&amp;Wijzig Wachtwoord...</translation> </message> <message> <source>Change the passphrase used for wallet encryption</source> <translation>Wijzig het wachtwoord voor uw portemonneversleuteling</translation> </message> <message> <source>&amp;Unlock Wallet...</source> <translation>&amp;Open portemonnee...</translation> </message> <message> <source>Unlock wallet</source> <translation>Open portemonnee</translation> </message> <message> <source>&amp;Lock Wallet</source> <translation>&amp;Sluit portemonnee</translation> </message> <message> <source>Sign &amp;message...</source> <translation>Bericht &amp;Ondertekenen...</translation> </message> <message> <source>&amp;Verify message...</source> <translation>Bericht &amp;Verifiëren... </translation> </message> <message> <source>&amp;Information</source> <translation>&amp;Informatie</translation> </message> <message> <source>Show diagnostic information</source> <translation>Toon diagnostische informatie</translation> </message> <message> <source>&amp;Debug console</source> <translation>&amp;Debug console</translation> </message> <message> <source>Open debugging console</source> <translation>Open debugging console</translation> </message> <message> <source>&amp;Network Monitor</source> <translation>&amp;Netwerk Monitor</translation> </message> <message> <source>Show network monitor</source> <translation>Toon netwerk monitor</translation> </message> <message> <source>&amp;Peers list</source> <translation>&amp;Peers lijst</translation> </message> <message> <source>Show peers info</source> <translation>Toon informatie van peers</translation> </message> <message> <source>Wallet &amp;Repair</source> <translation>Portemonnee &amp;Repareren </translation> </message> <message> <source>Show wallet repair options</source> <translation>Toon portemonnee reparatie opties</translation> </message> <message> <source>Open configuration file</source> <translation>Open configuratie bestand</translation> </message> <message> <source>Show Automatic &amp;Backups</source> <translation>Toon Automatische &amp;Backups</translation> </message> <message> <source>Show automatically created wallet backups</source> <translation>Toon automatisch aangemaakte portemonnee backups</translation> </message> <message> <source>&amp;Sending addresses...</source> <translation>&amp;Verzendadressen...</translation> </message> <message> <source>Show the list of used sending addresses and labels</source> <translation>Toon de lijst van verzendadressen en labels</translation> </message> <message> <source>&amp;Receiving addresses...</source> <translation>&amp;Ontvangstadressen...</translation> </message> <message> <source>Show the list of used receiving addresses and labels</source> <translation>Toon de lijst van ontvangstadressen en labels</translation> </message> <message> <source>Open &amp;URI...</source> <translation>Open &amp;URI...</translation> </message> <message> <source>&amp;Command-line options</source> <translation>&amp;Opdrachtregelopties</translation> </message> <message numerus="yes"> <source>Processed %n blocks of transaction history.</source> <translation><numerusform>%n blok van transactiegeschiedenis verwerkt.</numerusform><numerusform>%n blokken van transactiegeschiedenis verwerkt.</numerusform></translation> </message> <message> <source>Synchronizing additional data: %p%</source> <translation>Synchroniseren aanvullende gegevens: %p%</translation> </message> <message> <source>&amp;File</source> <translation>&amp;Bestand</translation> </message> <message> <source>&amp;Settings</source> <translation>&amp;Instellingen</translation> </message> <message> <source>&amp;Tools</source> <translation>&amp;Gereedschap</translation> </message> <message> <source>&amp;Help</source> <translation>&amp;Help</translation> </message> <message> <source>Tabs toolbar</source> <translation>Tabblad werkbalk</translation> </message> <message> <source>Ccbc Core</source> <translation>Ccbc Kern</translation> </message> <message> <source>Send coins to a Ccbc address</source> <translation>Verstuur munten naar een Ccbc adres</translation> </message> <message> <source>Request payments (generates QR codes and ccbc: URIs)</source> <translation>Vraag betaling aan (genereert QR-codes en Ccbc: URI's)</translation> </message> <message> <source>&amp;Privacy</source> <translation>&amp;Privacy</translation> </message> <message> <source>Privacy Action for zCCBC and Obfuscation</source> <translation>Privacy Actie voor zCCBC en verduistering</translation> </message> <message> <source>&amp;Masternodes</source> <translation>&amp;Masternodes</translation> </message> <message> <source>Browse masternodes</source> <translation>Bekijk masternodes</translation> </message> <message> <source>&amp;About Ccbc Core</source> <translation>&amp;Over Ccbc Kern</translation> </message> <message> <source>Show information about Ccbc Core</source> <translation>Toon informatie over Ccbc Kern</translation> </message> <message> <source>Modify configuration options for Ccbc</source> <translation>Wijzig Ccbc configuratie opties</translation> </message> <message> <source>Sign messages with your Ccbc addresses to prove you own them</source> <translation>Onderteken berichten met uw Ccbc adressen om te bewijzen dat u deze adressen bezit</translation> </message> <message> <source>Verify messages to ensure they were signed with specified Ccbc addresses</source> <translation>Controleer berichten om te verifiëren dat deze ondertekend zijn met de gespecificeerde Ccbc adressen</translation> </message> <message> <source>&amp;BIP38 tool</source> <translation>&amp;BIP38 tool</translation> </message> <message> <source>Encrypt and decrypt private keys using a passphrase</source> <translation>Versleutelen en ontsleutelen door middel van een wachtwoord/wachtzin</translation> </message> <message> <source>&amp;MultiSend</source> <translation>&amp;MultiSend</translation> </message> <message> <source>MultiSend Settings</source> <translation>MultiSend intellingen</translation> </message> <message> <source>Open Wallet &amp;Configuration File</source> <translation>Open Portemonnee &amp;Configuratiebestand</translation> </message> <message> <source>Open &amp;Masternode Configuration File</source> <translation>Open &amp;Masternode Configuratiebestand</translation> </message> <message> <source>Open Masternode configuration file</source> <translation>Open Masternode configuratiebestand</translation> </message> <message> <source>Open a Ccbc: URI or payment request</source> <translation>Open een Ccbc: URI of betaalverzoek</translation> </message> <message> <source>&amp;Blockchain explorer</source> <translation>&amp;Blockchain verkenner</translation> </message> <message> <source>Block explorer window</source> <translation>Block verkenner venster</translation> </message> <message> <source>Show the Ccbc Core help message to get a list with possible Ccbc command-line options</source> <translation>Toon het Ccbc Core help bericht om een ​​lijst te krijgen met mogelijke Ccbc command line opties</translation> </message> <message> <source>Ccbc Core client</source> <translation>Ccbc Core client</translation> </message> <message numerus="yes"> <source>%n active connection(s) to Ccbc network</source> <translation><numerusform>%n actieve verbinding(en) naar het Ccbc netwerk</numerusform><numerusform>%n actieve verbinding(en) naar het Ccbc netwerk</numerusform></translation> </message> <message> <source>Synchronizing with network...</source> <translation>Synchroniseren met het netwerk...</translation> </message> <message> <source>Importing blocks from disk...</source> <translation>Blocks aan het importeren vanaf schijf...</translation> </message> <message> <source>Reindexing blocks on disk...</source> <translation>Blocks opnieuw aan het indexeren op de schijf...</translation> </message> <message> <source>No block source available...</source> <translation>Geen block bron beschikbaar</translation> </message> <message> <source>Up to date</source> <translation>Bijgewerkt</translation> </message> <message numerus="yes"> <source>%n day(s)</source> <translation><numerusform>%n dag</numerusform><numerusform>%n dagen</numerusform></translation> </message> <message numerus="yes"> <source>%n week(s)</source> <translation><numerusform>%n week</numerusform><numerusform>%n weken</numerusform></translation> </message> <message> <source>%1 and %2</source> <translation>%1 en%2</translation> </message> <message numerus="yes"> <source>%n year(s)</source> <translation><numerusform>%n jaar</numerusform><numerusform>%n jaren</numerusform></translation> </message> <message> <source>%1 behind</source> <translation>%1 achter</translation> </message> <message> <source>Catching up...</source> <translation>Aan het bijwerken...</translation> </message> <message> <source>Last received block was generated %1 ago.</source> <translation>Het laatst ontvangen block was %1 geleden gegenereerd.</translation> </message> <message> <source>Transactions after this will not yet be visible.</source> <translation>Transacties hierna zijn nog niet zichtbaar.</translation> </message> <message> <source>Error</source> <translation>Error</translation> </message> <message> <source>Warning</source> <translation>Waarschuwing</translation> </message> <message> <source>Information</source> <translation>Informatie</translation> </message> <message> <source>Sent transaction</source> <translation>Verzend transactie</translation> </message> <message> <source>Incoming transaction</source> <translation>Inkomende transactie</translation> </message> <message> <source>Sent MultiSend transaction</source> <translation>Verzend MultiSend transactie</translation> </message> <message> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Datum: %1 Hoeveelheid: %2 Type: %3 Adres: %4 </translation> </message> <message> <source>Staking is active MultiSend: %1</source> <translation>Staking is actief Multisend: %1</translation> </message> <message> <source>Active</source> <translation>Actief</translation> </message> <message> <source>Not Active</source> <translation>Niet actief</translation> </message> <message> <source>Staking is not active MultiSend: %1</source> <translation>Staking is niet actief MultiSend: %1</translation> </message> <message> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>Portemonnee is &lt;b&gt;versleuteld&lt;/b&gt; en momenteel &lt;b&gt;geopend&lt;/b&gt;</translation> </message> <message> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt; for anonimization and staking only</source> <translation>Portemonnee is &lt;b&gt;versleuteld&lt;/b&gt; en momenteel &lt;b&gt;geopend&lt;/b&gt;alleen voor anonimiteit en staking </translation> </message> <message> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>Portemonnee is &lt;b&gt;geëncrypteerd &lt;/b&gt;en momentel &lt;b&gt; vergrendeld&lt;/b&gt;</translation> </message> </context> <context> <name>BlockExplorer</name> <message> <source>Blockchain Explorer</source> <translation>Blockchain Verkenner</translation> </message> <message> <source>Back</source> <translation>Terug</translation> </message> <message> <source>Forward</source> <translation>Volgende</translation> </message> <message> <source>Address / Block / Transaction</source> <translation>Adres / Block / Transactie</translation> </message> <message> <source>Search</source> <translation>Zoeken</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>Not all transactions will be shown. To view all transactions you need to set txindex=1 in the configuration file (ccbc.conf).</source> <translation>Niet alle transacties worden getoond. Om alle transacties te bekijken moet u txindex=1 instellen in het configuratie bestand (ccbc.conf).</translation> </message> </context> <context> <name>ClientModel</name> <message> <source>Total: %1 (IPv4: %2 / IPv6: %3 / Tor: %4 / Unknown: %5)</source> <translation>Totaal: %1 (IPv4: %2 / IPv6: %3 / Tor: %4 / Onbekend: %5)</translation> </message> <message> <source>Network Alert</source> <translation>Netwerk waarschuwing</translation> </message> </context> <context> <name>CoinControlDialog</name> <message> <source>Quantity:</source> <translation>Kwantiteit:</translation> </message> <message> <source>Bytes:</source> <translation>Bytes:</translation> </message> <message> <source>Amount:</source> <translation>Hoeveelheid:</translation> </message> <message> <source>Priority:</source> <translation>Prioriteit:</translation> </message> <message> <source>Fee:</source> <translation>Kost:</translation> </message> <message> <source>Coin Selection</source> <translation>Munt Selectie</translation> </message> <message> <source>Dust:</source> <translation>Stof:</translation> </message> <message> <source>After Fee:</source> <translation>Na de kost:</translation> </message> <message> <source>Change:</source> <translation>Wijzig:</translation> </message> <message> <source>(un)select all</source> <translation>(on)selecteer alles</translation> </message> <message> <source>toggle lock state</source> <translation>Schakel lock state</translation> </message> <message> <source>Tree mode</source> <translation>Tree modus</translation> </message> <message> <source>List mode</source> <translation>Lijstmodus</translation> </message> <message> <source>(1 locked)</source> <translation>(1 opgesloten)</translation> </message> <message> <source>Amount</source> <translation>Hoeveelheid</translation> </message> <message> <source>Received with label</source> <translation>Ontvangen met label</translation> </message> <message> <source>Received with address</source> <translation>Ontvangen met adres</translation> </message> <message> <source>Date</source> <translation>Datum</translation> </message> <message> <source>Confirmations</source> <translation>Bevestigingen</translation> </message> <message> <source>Confirmed</source> <translation>Bevestigd</translation> </message> <message> <source>Priority</source> <translation>Prioriteit</translation> </message> <message> <source>Copy address</source> <translation>Kopieer adres</translation> </message> <message> <source>Copy label</source> <translation>Kopiëer label</translation> </message> <message> <source>Copy amount</source> <translation>Kopieer hoeveelheid</translation> </message> <message> <source>Copy transaction ID</source> <translation>Kopier transactie ID</translation> </message> <message> <source>Lock unspent</source> <translation>Lock niet besteed</translation> </message> <message> <source>Unlock unspent</source> <translation>Unlock niet besteed</translation> </message> <message> <source>Copy quantity</source> <translation>Kopieer kwanititeit</translation> </message> <message> <source>Copy fee</source> <translation>Kopiëer kost</translation> </message> <message> <source>Copy after fee</source> <translation>Kopiëer na kost</translation> </message> <message> <source>Copy bytes</source> <translation>Kopieer bytes</translation> </message> <message> <source>Copy priority</source> <translation>Kopieer prioriteit</translation> </message> <message> <source>Copy dust</source> <translation>Kopieer dust</translation> </message> <message> <source>Copy change</source> <translation>Kopieer wisselgeld</translation> </message> <message> <source>Please switch to "List mode" to use this function.</source> <translation>Ga alsjeblieft over naar "Lijst modus" om deze functie te gebruiken.</translation> </message> <message> <source>highest</source> <translation>hoogste</translation> </message> <message> <source>higher</source> <translation>hoger</translation> </message> <message> <source>high</source> <translation>hoog</translation> </message> <message> <source>medium-high</source> <translation>medium-hoog</translation> </message> <message> <source>Can vary +/- %1 duff(s) per input.</source> <translation>Kan variëren van +/-%1 duff(s) per invoer.</translation> </message> <message> <source>medium</source> <translation>medium</translation> </message> <message> <source>low-medium</source> <translation>laag-medium</translation> </message> <message> <source>low</source> <translation>laag</translation> </message> <message> <source>lower</source> <translation>lager</translation> </message> <message> <source>lowest</source> <translation>laagst</translation> </message> <message> <source>none</source> <translation>geen</translation> </message> <message> <source>yes</source> <translation>ja</translation> </message> <message> <source>no</source> <translation>nee</translation> </message> <message> <source>This label turns red, if the transaction size is greater than 1000 bytes.</source> <translation>Dit label wordt rood, als de transactiegrootte groter is dan 1000 bytes.</translation> </message> <message> <source>This means a fee of at least %1 per kB is required.</source> <translation>Dit betekent dat een honorarium van minstens %1 per kB vereist is.</translation> </message> <message> <source>Can vary +/- 1 byte per input.</source> <translation>Kan variëren van +/- 1 byte per invoer.</translation> </message> <message> <source>Transactions with higher priority are more likely to get included into a block.</source> <translation>Transacties met hogere prioriteit zullen waarschijnlijk in een blok opgenomen worden.</translation> </message> <message> <source>This label turns red, if the priority is smaller than "medium".</source> <translation>Dit label wordt rood, als de prioriteit lager is dan "medium".</translation> </message> <message> <source>This label turns red, if any recipient receives an amount smaller than %1.</source> <translation>Dit label wordt rood, als een ontvanger een hoeveelheid kleiner dan %1 ontvangt.</translation> </message> <message> <source>(no label)</source> <translation>(geen label)</translation> </message> <message> <source>change from %1 (%2)</source> <translation>wijzig van %1 (%2)</translation> </message> <message> <source>(change)</source> <translation>(wijzig)</translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <source>Edit Address</source> <translation>Wijzig Adres</translation> </message> <message> <source>&amp;Label</source> <translation>&amp;Label</translation> </message> <message> <source>The label associated with this address list entry</source> <translation>Het label dat geassocieerd wordt met deze adreslijstinvoer</translation> </message> <message> <source>&amp;Address</source> <translation>&amp;Adres</translation> </message> <message> <source>The address associated with this address list entry. This can only be modified for sending addresses.</source> <translation>Het adres geassocieerd met deze adreslijst vermelding. Dit kan alleen worden gewijzigd voor het verzenden van adressen.</translation> </message> <message> <source>New receiving address</source> <translation>Nieuw ontvangst adres</translation> </message> <message> <source>New sending address</source> <translation>Nieuw verzendadres</translation> </message> <message> <source>Edit receiving address</source> <translation>Wijzig ontvangst adres</translation> </message> <message> <source>Edit sending address</source> <translation>Wijzig verzendadres</translation> </message> <message> <source>The entered address "%1" is not a valid Ccbc address.</source> <translation>Het ingevoerde adres: "%1" is geen geldig Ccbc adres.</translation> </message> <message> <source>The entered address "%1" is already in the address book.</source> <translation>Het ingevoerde adres: "%1" staat al in uw adresboek.</translation> </message> <message> <source>Could not unlock wallet.</source> <translation>Kan portemonnee niet ontgrendelen.</translation> </message> <message> <source>New key generation failed.</source> <translation>Aanmaken nieuwe key mislukt.</translation> </message> </context> <context> <name>FreespaceChecker</name> <message> <source>A new data directory will be created.</source> <translation>Er wordt een nieuwe data map gemaakt.</translation> </message> <message> <source>name</source> <translation>naam</translation> </message> <message> <source>Directory already exists. Add %1 if you intend to create a new directory here.</source> <translation>Map bestaat alreeds. Voeg %1 toe als je van plan bent om hier een nieuwe map te creëren.</translation> </message> <message> <source>Path already exists, and is not a directory.</source> <translation>Pad bestaat al en is geen directory.</translation> </message> <message> <source>Cannot create data directory here.</source> <translation>Kan de data directory hier niet aanmaken.</translation> </message> </context> <context> <name>HelpMessageDialog</name> <message> <source>version</source> <translation>versie</translation> </message> <message> <source>Ccbc Core</source> <translation>Ccbc Kern</translation> </message> <message> <source>(%1-bit)</source> <translation>(%1-bit)</translation> </message> <message> <source>About Ccbc Core</source> <translation>Over Ccbc Core</translation> </message> <message> <source>Command-line options</source> <translation>Command-line opties</translation> </message> <message> <source>Usage:</source> <translation>Gebruik:</translation> </message> <message> <source>command-line options</source> <translation>command-line opties</translation> </message> <message> <source>UI Options:</source> <translation>UI Opties:</translation> </message> <message> <source>Set language, for example "de_DE" (default: system locale)</source> <translation>Stel taal in, bijvoorbeeld "de_DE" (standaard: systeem locale)</translation> </message> <message> <source>Start minimized</source> <translation>Geminimaliseerd starten</translation> </message> <message> <source>Set SSL root certificates for payment request (default: -system-)</source> <translation>Stel SSL root certificaten in voor betalingsverzoek (standaard: -systeem-)</translation> </message> </context> <context> <name>Intro</name> <message> <source>Welcome</source> <translation>Welkom</translation> </message> <message> <source>Welcome to Ccbc Core.</source> <translation>Welkom bij Ccbc Core</translation> </message> <message> <source>As this is the first time the program is launched, you can choose where Ccbc Core will store its data.</source> <translation>Aangezien dit de eerste keer is dat het programma is gestart, kun je kiezen waar Ccbc Core zijn gegevens opslaat.</translation> </message> <message> <source>Ccbc Core will download and store a copy of the Ccbc block chain. At least %1GB of data will be stored in this directory, and it will grow over time. The wallet will also be stored in this directory.</source> <translation>Ccbc Core zal een kopie van de Ccbc blockchain downloaden en opslaan. Tenminste %1GB aan data zal worden opgeslagen in deze map en het zal over de tijd groeien. De portemonnee wordt ook in deze map opgeslagen.</translation> </message> <message> <source>Use the default data directory</source> <translation>Gebruik de standaard datafolder </translation> </message> <message> <source>Use a custom data directory:</source> <translation>Gebruik een aangepaste data directory:</translation> </message> <message> <source>Ccbc Core</source> <translation>Ccbc Kern</translation> </message> <message> <source>Error: Specified data directory "%1" cannot be created.</source> <translation>Fout: opgegeven data directory "%1" kan niet worden gemaakt.</translation> </message> <message> <source>Error</source> <translation>Error</translation> </message> <message> <source>%1 GB of free space available</source> <translation>%1 GB beschikbare schijfruimte</translation> </message> <message> <source>(of %1 GB needed)</source> <translation>(van de %1 GB benodigd)</translation> </message> </context> <context> <name>MasternodeList</name> <message> <source>Form</source> <translation>Formulier</translation> </message> <message> <source>My Masternodes</source> <translation>Mijn Masternodes</translation> </message> <message> <source>Note: Status of your masternodes in local wallet can potentially be slightly incorrect.&lt;br /&gt;Always wait for wallet to sync additional data and then double check from another node&lt;br /&gt;if your node should be running but you still see "MISSING" in "Status" field.</source> <translation>Opmerking: De status van uw masternodes in de lokale portemonnee kan mogelijk iets afwijken. &lt;br /&gt;Wacht altijd op de portemonnee om additionele data te synchroniseren en verifieer dit vanuit een andere node &lt;br /&gt;mocht de node nu aan het draaien zijn maar je ziet nog steeds 'MISSING' in het veld 'Status'.</translation> </message> <message> <source>Alias</source> <translation>Alias</translation> </message> <message> <source>Address</source> <translation>Adres</translation> </message> <message> <source>Protocol</source> <translation>Protocol</translation> </message> <message> <source>Status</source> <translation>Status</translation> </message> <message> <source>Active</source> <translation>Actief</translation> </message> <message> <source>Last Seen (UTC)</source> <translation>Laatst gezien (UTC)</translation> </message> <message> <source>Pubkey</source> <translation>Pubkey</translation> </message> <message> <source>S&amp;tart alias</source> <translation>S&amp;tart alias</translation> </message> <message> <source>Start &amp;all</source> <translation>&amp;all starten</translation> </message> <message> <source>Start &amp;MISSING</source> <translation>Start &amp;ontbrekende</translation> </message> <message> <source>&amp;Update status</source> <translation>&amp;Status bijwerken</translation> </message> <message> <source>Status will be updated automatically in (sec):</source> <translation>Status wordt automatisch bijgewerkt in (sec):</translation> </message> <message> <source>0</source> <translation>0</translation> </message> <message> <source>Start alias</source> <translation>Start alias</translation> </message> <message> <source>Confirm masternode start</source> <translation>Bevestig start masternode</translation> </message> <message> <source>Are you sure you want to start masternode %1?</source> <translation>Weet je zeker dat je masternode %1 wilt starten?</translation> </message> <message> <source>Confirm all masternodes start</source> <translation>Bevestig start alle masternodes</translation> </message> <message> <source>Are you sure you want to start ALL masternodes?</source> <translation>Weet je zeker dat je ALLE masternodes wilt starten?</translation> </message> <message> <source>Command is not available right now</source> <translation>Commando is nu niet beschikbaar</translation> </message> <message> <source>You can't use this command until masternode list is synced</source> <translation>Je kunt dit commando niet gebruiken tot de masternode lijst is gesynchroniseerd</translation> </message> <message> <source>Confirm missing masternodes start</source> <translation>Bevestig start ontbrekende masternodes</translation> </message> <message> <source>Are you sure you want to start MISSING masternodes?</source> <translation>Weet je zeker dat je alle ONTBREKENDE masternodes wilt starten?</translation> </message> </context> <context> <name>MultiSendDialog</name> <message> <source>MultiSend</source> <translation>MultiSend</translation> </message> <message> <source>Enter whole numbers 1 - 100</source> <translation>Geef volledige cijfers in 1-100</translation> </message> <message> <source>Enter % to Give (1-100)</source> <translation>Voer % in om te geven (1-100)</translation> </message> <message> <source>Enter Address to Send to</source> <translation>Vul het verzendadres in</translation> </message> <message> <source>MultiSend allows you to automatically send up to 100% of your stake or masternode reward to a list of other Ccbc addresses after it matures. To Add: enter percentage to give and Ccbc address to add to the MultiSend vector. To Delete: Enter address to delete and press delete. MultiSend will not be activated unless you have clicked Activate</source> <translation>MultiSend stelt je in staat om automatisch 100% van uw inzet of masternode-beloning automatisch te verzenden naar een lijst van andere Ccbc adressen nadat het volwassen is. Toevoegen: voer percentage in om te geven en Ccbc adres om toe te voegen aan de MultiSend vector. Verwijderen: Voer adres in om te verwijderen en druk op delete. MultiSend wordt niet geactiveerd, tenzij je op Activeer hebt geklikt</translation> </message> <message> <source>Add to MultiSend Vector</source> <translation>Aan MultiSend Vector toevoegen</translation> </message> <message> <source>Add</source> <translation>Toevoegen</translation> </message> <message> <source>Deactivate MultiSend</source> <translation>Deactiveer MultiSend</translation> </message> <message> <source>Deactivate</source> <translation>Deactiveren</translation> </message> <message> <source>Choose an address from the address book</source> <translation>Kies een adres uit het adresgids</translation> </message> <message> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <source>Percentage of stake to send</source> <translation>Stakingspercentage om te verzenden</translation> </message> <message> <source>Percentage:</source> <translation>Percentage:</translation> </message> <message> <source>Address to send portion of stake to</source> <translation>Adres om een ​​deel van inzet te verzenden naar</translation> </message> <message> <source>Address:</source> <translation>Adres:</translation> </message> <message> <source>Delete Address From MultiSend Vector</source> <translation>Verwijder adres van MultiSend Vector</translation> </message> <message> <source>Delete</source> <translation>Wissen</translation> </message> <message> <source>Activate MultiSend</source> <translation>Activeer MultiSend</translation> </message> <message> <source>Activate</source> <translation>Activeren</translation> </message> <message> <source>View MultiSend Vector</source> <translation>Bekijk MultiSend Vector</translation> </message> <message> <source>View MultiSend</source> <translation>Bekijk MultiSend</translation> </message> <message> <source>Send For Stakes</source> <translation>Verstuur voor inzetten</translation> </message> <message> <source>Send For Masternode Rewards</source> <translation>Verstuur voor Masternode beloningen</translation> </message> <message> <source>The entered address: </source> <translation>Het ingevoerde adres: </translation> </message> <message> <source> is invalid. Please check the address and try again.</source> <translation>is ongeldig. Controleer het adres alsjeblieft en probeer het opnieuw.</translation> </message> <message> <source>The total amount of your MultiSend vector is over 100% of your stake reward </source> <translation>De totale hoeveelheid van je MultiSend vector is meer dan 100% van je inzet beloning</translation> </message> <message> <source>Please Enter 1 - 100 for percent.</source> <translation>Vul alsjeblieft 1 - 100 voor procent in.</translation> </message> <message> <source>MultiSend Vector </source> <translation>MultiSend Vector </translation> </message> <message> <source>Removed </source> <translation>Verwijderd</translation> </message> <message> <source>Could not locate address </source> <translation>Kan het adres niet lokaliseren </translation> </message> </context> <context> <name>ObfuscationConfig</name> <message> <source>Configure Obfuscation</source> <translation>Configureer verduistering</translation> </message> <message> <source>Basic Privacy</source> <translation>Basis Privacy</translation> </message> <message> <source>High Privacy</source> <translation>Hoge Privacy</translation> </message> <message> <source>Maximum Privacy</source> <translation>Maximum Privacy</translation> </message> <message> <source>Please select a privacy level.</source> <translation>Selecteer de privacy level.</translation> </message> <message> <source>Use 2 separate masternodes to mix funds up to 10000 CCBC</source> <translation>Gebruik 2 aparte masternodes om fondsen te mixen tot 10000 CCBC</translation> </message> <message> <source>Use 8 separate masternodes to mix funds up to 10000 CCBC</source> <translation>Gebruik 8 aparte masternodes om fondsen te mixen tot 10000 CCBC</translation> </message> <message> <source>Use 16 separate masternodes</source> <translation>Gebruik 16 aparte masternodes</translation> </message> <message> <source>This option is the quickest and will cost about ~0.025 CCBC to anonymize 10000 CCBC</source> <translation>Deze optie is het snelst en kost ongeveer ~0,025 CCBC om 10000 CCBC te anonimiseren</translation> </message> <message> <source>This option is moderately fast and will cost about 0.05 CCBC to anonymize 10000 CCBC</source> <translation>Deze optie is gematigd snel en kost ongeveer 0,05 CCBC om 10000 CCBC te anonimiseren</translation> </message> <message> <source>This is the slowest and most secure option. Using maximum anonymity will cost</source> <translation>Dit is de langzaamste en veiligste optie. Het gebruik van maximale anonimiteit kost</translation> </message> <message> <source>0.1 CCBC per 10000 CCBC you anonymize.</source> <translation>je anonimiseert 0,1 CCBC per 10000 CCBC.</translation> </message> <message> <source>Obfuscation Configuration</source> <translation>Verduistering configuratie</translation> </message> <message> <source>Obfuscation was successfully set to basic (%1 and 2 rounds). You can change this at any time by opening Ccbc's configuration screen.</source> <translation>Verduistering is succesvol ingesteld op basic (%1 en 2 rondes). Je kunt dit op elk gewenst moment wijzigen door het configuratiescherm van Ccbc te openen.</translation> </message> <message> <source>Obfuscation was successfully set to high (%1 and 8 rounds). You can change this at any time by opening Ccbc's configuration screen.</source> <translation>Verduistering is succesvol ingesteld op hoog (%1 en 8 rondes). U kunt dit op elk gewenst moment wijzigen door het configuratiescherm van Ccbc te openen.</translation> </message> <message> <source>Obfuscation was successfully set to maximum (%1 and 16 rounds). You can change this at any time by opening Ccbc's configuration screen.</source> <translation>Verduistering is succesvol ingesteld op hoog (%1 en 16 rondes). U kunt dit op elk gewenst moment wijzigen door het configuratiescherm van Ccbc te openen.</translation> </message> </context> <context> <name>OpenURIDialog</name> <message> <source>Open URI</source> <translation>URI openen</translation> </message> <message> <source>Open payment request from URI or file</source> <translation>Open betalingsverzoek van URL of bestand</translation> </message> <message> <source>URI:</source> <translation>URI:</translation> </message> <message> <source>Select payment request file</source> <translation>Selecteer betalingsverzoek bestand</translation> </message> <message> <source>Select payment request file to open</source> <translation>Selecteer betalingsverzoek bestand dat geopend moet worden</translation> </message> </context> <context> <name>OptionsDialog</name> <message> <source>Options</source> <translation>Opties</translation> </message> <message> <source>&amp;Main</source> <translation>&amp;Algemeen</translation> </message> <message> <source>Size of &amp;database cache</source> <translation>Grootte van cache &amp;gegevens</translation> </message> <message> <source>MB</source> <translation>MB</translation> </message> <message> <source>Number of script &amp;verification threads</source> <translation>Aantal script en verificatie threads</translation> </message> <message> <source>(0 = auto, &lt;0 = leave that many cores free)</source> <translation>(0 = auto, &lt;0 = laat dat aantal cores vrij)</translation> </message> <message> <source>W&amp;allet</source> <translation>P&amp;ortemonnee</translation> </message> <message> <source>If you disable the spending of unconfirmed change, the change from a transaction&lt;br/&gt;cannot be used until that transaction has at least one confirmation.&lt;br/&gt;This also affects how your balance is computed.</source> <translation>Als je de uitgaven van onbevestigde wijzigingen uitschakelt, de wijziging van een transactie&lt;br/&gt;kan niet worden gebruikt tot die transactie tenminste één bevestiging heeft.&lt;br/&gt;Dit beïnvloedt ook hoe uw saldo is berekend.</translation> </message> <message> <source>Automatically open the Ccbc client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>Open de Ccbc client poort automatisch op de router. Dit werkt alleen als je router UPnP ondersteunt en het is ingeschakeld.</translation> </message> <message> <source>Accept connections from outside</source> <translation>Accepteer externe connecties</translation> </message> <message> <source>Allow incoming connections</source> <translation>Sta inkomende connecties toe</translation> </message> <message> <source>&amp;Connect through SOCKS5 proxy (default proxy):</source> <translation>&amp; Verbind via SOCKS5 proxy (standaard proxy):</translation> </message> <message> <source>Expert</source> <translation>Expert</translation> </message> <message> <source>Automatically start Ccbc after logging in to the system.</source> <translation>Ccbc automatisch opstarten na inloggen op het systeem.</translation> </message> <message> <source>&amp;Start Ccbc on system login</source> <translation>&amp;Ccbc starten bij systeemlogin</translation> </message> <message> <source>Whether to show coin control features or not.</source> <translation>Of je de munteenheidsfuncties wilt tonen of niet.</translation> </message> <message> <source>Enable coin &amp;control features</source> <translation>Munt- en controlefuncties inschakelen</translation> </message> <message> <source>Show additional tab listing all your masternodes in first sub-tab&lt;br/&gt;and all masternodes on the network in second sub-tab.</source> <translation>Toon extra tabblad waarin al je masternodes worden vermeld in het eerste sub-tabblad&lt;br/&gt;en alle masternodes op het netwerk in het tweede sub-tabblad.</translation> </message> <message> <source>Show Masternodes Tab</source> <translation>Toon Masternodes Tab</translation> </message> <message> <source>&amp;Spend unconfirmed change</source> <translation>&amp; Besteed onbevestigd wisselgeld</translation> </message> <message> <source>&amp;Network</source> <translation>&amp;Netwerk</translation> </message> <message> <source>The user interface language can be set here. This setting will take effect after restarting Ccbc.</source> <translation>De gebruikersinterface taal kan hier ingesteld worden. Deze instelling zal uitgevoerd na herstart van Ccbc.</translation> </message> <message> <source>Language missing or translation incomplete? Help contributing translations here: https://www.transifex.com/ccbc-project/ccbc-project-translations</source> <translation>Taal ontbreekt of vertaling onvolledig? Help om bij te dragen aan vertalingen: https://www.transifex.com/ccbc-project/ccbc-project-translations</translation> </message> <message> <source>Map port using &amp;UPnP</source> <translation>Map poort gebruikt &amp;UPnP</translation> </message> <message> <source>Percentage of incoming CCBC which get automatically converted to zCCBC via Zerocoin Protocol (min: 10%)</source> <translation>Percentage inkomende CCBC die automatisch wordt omgezet naar zCCBC via Zerocoin Protocol (min: 10%)</translation> </message> <message> <source>Percentage of autominted zCCBC</source> <translation>Percentage autominted zCCBC</translation> </message> <message> <source>Wait with automatic conversion to Zerocoin until enough CCBC for this denomination is available</source> <translation>Wacht met automatische omzetting naar Zerocoin totdat er genoeg CCBC voor deze denominatie beschikbaar is</translation> </message> <message> <source>Preferred Automint zCCBC Denomination</source> <translation>Gewenste Automint zCCBC denominatie</translation> </message> <message> <source>Connect to the Ccbc network through a SOCKS5 proxy.</source> <translation>Maak verbinding met het Ccbc netwerk via een SOCKS5 proxy.</translation> </message> <message> <source>Proxy &amp;IP:</source> <translation>Proxy &amp;IP:</translation> </message> <message> <source>IP address of the proxy (e.g. IPv4: 127.0.0.1 / IPv6: ::1)</source> <translation>IP adres van de proxy (bijvoorbeeld IPv4: 127.0.0.1 / IPv6: ::1)</translation> </message> <message> <source>&amp;Port:</source> <translation>&amp;Poort:</translation> </message> <message> <source>Port of the proxy (e.g. 9050)</source> <translation>Proxy-poort (v.b. 9050)</translation> </message> <message> <source>&amp;Window</source> <translation>&amp;Venster</translation> </message> <message> <source>Show only a tray icon after minimizing the window.</source> <translation>Toon alleen een tray icoon na minimalisering van het venster.</translation> </message> <message> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>&amp; Minimaliseer naar de tray in plaats van de taakbalk</translation> </message> <message> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>Minimaliseer in plaats van de applicatie te verlaten wanneer het venster wordt gesloten. Wanneer deze optie is ingeschakeld, wordt de applicatie alleen gesloten nadat in het menu Quit wordt gekozen.</translation> </message> <message> <source>M&amp;inimize on close</source> <translation>Minimaliseer bij sluiten</translation> </message> <message> <source>&amp;Display</source> <translation>&amp;Weergave</translation> </message> <message> <source>User Interface &amp;language:</source>
</message> <message> <source>User Interface Theme:</source> <translation>Gebruiksinterface thema:</translation> </message> <message> <source>&amp;Unit to show amounts in:</source> <translation>Toon &amp;Unit in volgende hoeveelheden:</translation> </message> <message> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation>Kies de standaard onderverdelingseenheid die in de interface wordt weergegeven en bij het verzenden van munten.</translation> </message> <message> <source>Decimal digits</source> <translation>Decimale cijfers</translation> </message> <message> <source>Third party URLs (e.g. a block explorer) that appear in the transactions tab as context menu items. %s in the URL is replaced by transaction hash. Multiple URLs are separated by vertical bar |.</source> <translation>URL's van derden (bijvoorbeeld een blok verkenner) die in het tabblad transacties verschijnen als contextmenu items. %s in de URL wordt vervangen door transactie hash. Meerdere URL's worden gescheiden door verticale balk |.</translation> </message> <message> <source>Third party transaction URLs</source> <translation>Transactie URL's van derden</translation> </message> <message> <source>Active command-line options that override above options:</source> <translation>Actieve command line opties die bovenstaande opties overschrijven:</translation> </message> <message> <source>Reset all client options to default.</source> <translation>Reset alle client opties naar standaardinstellingen.</translation> </message> <message> <source>&amp;Reset Options</source> <translation>&amp;Herstellings Opties</translation> </message> <message> <source>&amp;OK</source> <translation>&amp;OK</translation> </message> <message> <source>&amp;Cancel</source> <translation>&amp;Annuleren</translation> </message> <message> <source>I don't care</source> <translation>Het maakt me niet uit</translation> </message> <message> <source>default</source> <translation>standaard</translation> </message> <message> <source>none</source> <translation>geen</translation> </message> <message> <source>Confirm options reset</source> <translation>Bevestig reset instellingen</translation> </message> <message> <source>Client restart required to activate changes.</source> <translation>Client herstart vereist om wijzigingen te activeren.</translation> </message> <message> <source>Client will be shutdown, do you want to proceed?</source> <translation>Client wordt uitgeschakeld, wil je doorgaan?</translation> </message> <message> <source>This change would require a client restart.</source> <translation>Deze wijziging vereist een herstart van de client.</translation> </message> <message> <source>The supplied proxy address is invalid.</source> <translation>Het opgegeven proxyadres is ongeldig.</translation> </message> </context> <context> <name>OverviewPage</name> <message> <source>Form</source> <translation>Formulier</translation> </message> <message> <source>CCBC Balances</source> <translation>CCBC Saldi</translation> </message> <message> <source>Available:</source> <translation>Beschikbaar:</translation> </message> <message> <source>Your current spendable balance</source> <translation>Uw huidige bestedingsruimte</translation> </message> <message> <source>Pending:</source> <translation>In afwachting:</translation> </message> <message> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the spendable balance</source> <translation>Totaal aantal transacties die nog niet zijn bevestigd en nog niet meetellen in het uitgeefbare saldo</translation> </message> <message> <source>Immature:</source> <translation>Immatuur:</translation> </message> <message> <source>Staked or masternode rewards that has not yet matured</source> <translation>Staked of masternode beloningen die nog niet volwassen zijn</translation> </message> <message> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the Ccbc network after a connection is established, but this process has not completed yet.</source> <translation>De weergegeven informatie kan verouderd zijn. Je portemonnee synchroniseert automatisch met het Ccbc netwerk nadat een verbinding is opgezet, maar dit proces is nog niet afgerond.</translation> </message> <message> <source>Unconfirmed transactions to watch-only addresses</source> <translation>Onbevestigde transacties naar watch-only adressen</translation> </message> <message> <source>Staked or masternode rewards in watch-only addresses that has not yet matured</source> <translation>Staked of masternode beloningen in watch-only adressen die nog niet volwassen zijn</translation> </message> <message> <source>Total:</source> <translation>Totaal:</translation> </message> <message> <source>Your current total balance</source> <translation>Jouw huidige balanstotaal</translation> </message> <message> <source>Current total balance in watch-only addresses</source> <translation>Huidig ​​totaal saldo in watch-only adressen</translation> </message> <message> <source>Watch-only:</source> <translation>Watch-only:</translation> </message> <message> <source>Your current balance in watch-only addresses</source> <translation>Je huidige saldo in watch-only adressen</translation> </message> <message> <source>Spendable:</source> <translation>Uitgeefbaar:</translation> </message> <message> <source>Combined Balances (including immature coins)</source> <translation>Gecombineerde Saldi (inclusief onvolwassen munten)</translation> </message> <message> <source>CCBC:</source> <translation>CCBC:</translation> </message> <message> <source>zCCBC:</source> <translation>zCCBC:</translation> </message> <message> <source>0 %</source> <translation>0 %</translation> </message> <message> <source>Zerocoin Balance</source> <translation>Zerocoin saldo</translation> </message> <message> <source>Recent transactions</source> <translation>Recente transacties</translation> </message> <message> <source>out of sync</source> <translation>niet gesynchroniseerd</translation> </message> </context> <context> <name>PaymentServer</name> <message> <source>Payment request error</source> <translation>Fout bij betalingsverzoek</translation> </message> <message> <source>URI handling</source> <translation>URL behandeling</translation> </message> <message> <source>Payment request fetch URL is invalid: %1</source> <translation>Betalingsverzoek ophalen URL is ongeldig: %1</translation> </message> <message> <source>Payment request file handling</source> <translation>Betalingsaanvraag bestandsverwerking</translation> </message> <message> <source>Invalid payment address %1</source> <translation>Ongeldig betalingsadres %1</translation> </message> <message> <source>Cannot start ccbc: click-to-pay handler</source> <translation>Kan ccbc niet starten: click-to-pay handler</translation> </message> <message> <source>URI cannot be parsed! This can be caused by an invalid Ccbc address or malformed URI parameters.</source> <translation>URL kan niet ontleed worden! Dit kan worden veroorzaakt door een ongeldig Ccbc adres of misvormde URL parameters.</translation> </message> <message> <source>Payment request file cannot be read! This can be caused by an invalid payment request file.</source> <translation>Uw betalingsverzoek kan niet worden gelezen! Dit kan worden veroorzaakt door een ongeldig betalingsverzoek bestand.</translation> </message> <message> <source>Payment request rejected</source> <translation>Betalingsverzoek afgewezen</translation> </message> <message> <source>Payment request network doesn't match client network.</source> <translation>Het betalingsverzoek netwerk komt niet overeen met het client netwerk.</translation> </message> <message> <source>Payment request has expired.</source> <translation>Betalingsverzoek is verlopen.</translation> </message> <message> <source>Payment request is not initialized.</source> <translation>Betalingsverzoek is niet geïnitialiseerd. </translation> </message> <message> <source>Unverified payment requests to custom payment scripts are unsupported.</source> <translation>Niet geverifieerde betalingsverzoeken naar aangepaste betaal scripts worden niet ondersteund.</translation> </message> <message> <source>Requested payment amount of %1 is too small (considered dust).</source> <translation>Gevraagd betalingsbedrag van %1 is te klein (beschouwd als dust).</translation> </message> <message> <source>Refund from %1</source> <translation>Terugbetaling van %1</translation> </message> <message> <source>Payment request %1 is too large (%2 bytes, allowed %3 bytes).</source> <translation>Betalingsverzoek %1 is te groot (%2 bytes, toegestaan %3 bytes).</translation> </message> <message> <source>Payment request DoS protection</source> <translation>Betalingsverzoek DoS bescherming</translation> </message> <message> <source>Error communicating with %1: %2</source> <translation>Fout communiceren met %1: %2</translation> </message> <message> <source>Payment request cannot be parsed!</source> <translation>Betalingsaanvraag kan niet worden geanalyseerd!</translation> </message> <message> <source>Bad response from server %1</source> <translation>Bad response van server %1</translation> </message> <message> <source>Network request error</source> <translation>Netwerkverzoek fout</translation> </message> <message> <source>Payment acknowledged</source> <translation>Betaling erkend</translation> </message> </context> <context> <name>PeerTableModel</name> <message> <source>Address/Hostname</source> <translation>Adres/Hostnaam</translation> </message> <message> <source>Version</source> <translation>Versie</translation> </message> <message> <source>Ping Time</source> <translation>Pingtijd</translation> </message> </context> <context> <name>PrivacyDialog</name> <message> <source>CCBC which were anonymized via Zerocin Protocol</source> <translation>CCBC die via het Zerocin Protocol geanonimiseerd werden</translation> </message> <message> <source>Zerocoin CCBC (zCCBC)</source> <translation>Zerocoin CCBC (zCCBC)</translation> </message> <message> <source>Zerocoin Actions:</source> <translation>Zerocoin Acties:</translation> </message> <message> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the Ccbc network after a connection is established, but this process has not completed yet.</source> <translation>De weergegeven informatie kan verouderd zijn. Je portemonnee synchroniseert automatisch met het Ccbc netwerk nadat een verbinding is opgezet, maar dit proces is nog niet afgerond.</translation> </message> <message> <source>Enter an amount of Phr to convert to zPhr</source> <translation>Vul de hoeveleheid in om Phr te converteren naar zPhr</translation> </message> <message> <source>Mint Zerocoin</source> <translation>Mint Zerocoin</translation> </message> <message> <source>0</source> <translation>0</translation> </message> <message> <source>zCCBC</source> <translation>zCCBC</translation> </message> <message> <source>Available for Minting:</source> <translation>Beschikbaar voor minting:</translation> </message> <message> <source>0.000 000 00 CCBC</source> <translation>0.000 000 00 CCBC</translation> </message> <message> <source>Reset Zerocoin Wallet DB. Deletes transactions that did not make it into the blockchain.</source> <translation>Zerocoin Wallet DB opnieuw instellen. Verwijdert transacties die het niet in de blockchain hebben gered.</translation> </message> <message> <source>Reset</source> <translation>Resetten</translation> </message> <message> <source>Coin Control...</source> <translation>Munt controle...</translation> </message> <message> <source>Quantity:</source> <translation>Kwantiteit:</translation> </message> <message> <source>Amount:</source> <translation>Hoeveelheid:</translation> </message> <message> <source>Rescan the complete blockchain for Zerocoin mints and their meta-data.</source> <translation>Rescan de complete blockchain voor Zerocoin mints en hun meta-data.</translation> </message> <message> <source>ReScan</source> <translation>ReScan</translation> </message> <message> <source>Status and/or Mesages from the last Mint Action.</source> <translation>Status en/of Berichten van de laatste mint actie.</translation> </message> <message> <source>zPhr Control</source> <translation>zPhr controle</translation> </message> <message> <source>zPhr Selected:</source> <translation>zPhr geselecteerd:</translation> </message> <message> <source>Quantity Selected:</source> <translation>Hoeveelheid geselecteerd:</translation> </message> <message> <source>Spend Zerocoin. Without 'Pay To:' address creates payments to yourself.</source> <translation>Zerocoin besteden. Zonder 'Betaal aan:' adres ontstaan betalingen aan jezelf.</translation> </message> <message> <source>Spend Zerocoin</source> <translation>Besteed Zerocoin</translation> </message> <message> <source>Available Balance:</source> <translation>Beschikbaar saldo:</translation> </message> <message> <source>0 zCCBC</source> <translation>0 zCCBC</translation> </message> <message> <source>Security Level for Zerocoin Transactions. More is better, but needs more time and resources.</source> <translation>Beveiligingsniveau voor Zerocoin transacties. Meer is beter, maar heeft meer tijd en middelen nodig.</translation> </message> <message> <source>Security Level:</source> <translation>Beveiligings niveau:</translation> </message> <message> <source>Security Level 1 - 100 (default: 42)</source> <translation>Beveiligingsniveau 1 - 100 (standaard: 42)</translation> </message> <message> <source>Pay &amp;To:</source> <translation>Betaal &amp;Naar:</translation> </message> <message> <source>The Ccbc address to send the payment to. Creates local payment to yourself when empty.</source> <translation>Het Ccbc adres om de betaling naar te verzenden. Maakt lokale betaling aan jezelf als je leeg bent.</translation> </message> <message> <source>Choose previously used address</source> <translation>Kies een eerder gebruikt adres</translation> </message> <message> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <source>Paste address from clipboard</source> <translation>Plak adres vanaf klembord</translation> </message> <message> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <source>&amp;Label:</source> <translation>&amp;Label:</translation> </message> <message> <source>Enter a label for this address to add it to the list of used addresses</source> <translation>Vul een label in voor dit adres om deze toe te voegen aan de lijst met gebruikte adressen</translation> </message> <message> <source>A&amp;mount:</source> <translation>&amp;Hoeveelheid:</translation> </message> <message> <source>Convert Change to Zerocoin (might cost additional fees)</source> <translation>Zet wisselgeld om naar Zerocoin (mogelijk extra kosten)</translation> </message> <message> <source>If checked, the wallet tries to minimize the returning change instead of minimizing the number of spent denominations.</source> <translation>Indien aangevinkt, probeert de portemonnee het terugkerende wisselgeld te minimaliseren in plaats van het aantal uitgegeven denominaties te minimaliseren.</translation> </message> <message> <source>Minimize Change</source> <translation>Minimaliseer wisselgeld</translation> </message> <message> <source>Information about the available Zerocoin funds.</source> <translation>Informatie over de beschikbare Zerocoin fondsen.</translation> </message> <message> <source>Zerocoin Stats:</source> <translation>Zerocoin Statistieken:</translation> </message> <message> <source>Available Funds</source> <translation>Beschikbare Fondsen</translation> </message> <message> <source>Available Zerocoin Balance:</source> <translation>Beschikbaar Zerocoin saldo:</translation> </message> <message> <source>Denominations with value 1:</source> <translation>Denominaties met waarde 1:</translation> </message> <message> <source>Denom. with value 1:</source> <translation>Denom. met waarde 1:</translation> </message> <message> <source>0 x</source> <translation> 0 x</translation> </message> <message> <source>Denominations with value 5:</source> <translation>Denominaties met waarde 5:</translation> </message> <message> <source>Denom. with value 5:</source> <translation>Denom. met waarde 5:</translation> </message> <message> <source>Denominations with value 10:</source> <translation>Denominaties met waarde 10:</translation> </message> <message> <source>Denom. with value 10:</source> <translation>Denom. met waarde 10:</translation> </message> <message> <source>Denominations with value 50:</source> <translation>Denominaties met waarde 50:</translation> </message> <message> <source>Denom. with value 50:</source> <translation>Denom. met waarde 50:</translation> </message> <message> <source>Denominations with value 100:</source> <translation>Denominaties met waarde 100:</translation> </message> <message> <source>Denom. with value 100:</source> <translation>Denom. met waarde 100:</translation> </message> <message> <source>Denominations with value 500:</source> <translation>Denominaties met waarde 500:</translation> </message> <message> <source>Denom. with value 500:</source> <translation>Denom. met waarde 500:</translation> </message> <message> <source>Denominations with value 1000:</source> <translation>Denominaties met waarde 1000:</translation> </message> <message> <source>Denom. with value 1000:</source> <translation>Denom. met waarde 1000:</translation> </message> <message> <source>Denominations with value 5000:</source> <translation>Denominaties met waarde 5000:</translation> </message> <message> <source>Denom. with value 5000:</source> <translation>Denom. met waarde 5000:</translation> </message> <message> <source>Note: This hidden objects are used for communication between the 'Coin Control' dialog and its parent objects. We don't want to change/reimplement the existing Coin Control, so this objects must be here, even when we don't use them. No, we don't like this approach, either.</source> <translation>Opmerking: Deze verborgen objecten worden gebruikt voor communicatie tussen de 'Munt Controle' dialoog en de parent objecten. We willen de bestaande Munt Controle niet veranderen/opnieuw implementeren, dus deze objecten moeten hier zijn, zelfs als we ze niet gebruiken. Nee, we houden ook niet van deze aanpak.</translation> </message> <message> <source>Priority:</source> <translation>Prioriteit:</translation> </message> <message> <source>Fee:</source> <translation>Kost:</translation> </message> <message> <source>Dust:</source> <translation>Stof:</translation> </message> <message> <source>no</source> <translation>nee</translation> </message> <message> <source>Bytes:</source> <translation>Bytes:</translation> </message> <message> <source>Insufficient funds!</source> <translation>Onvoldoende saldo!</translation> </message> <message> <source>Coins automatically selected</source> <translation>Munten automatisch geselecteerd</translation> </message> <message> <source>medium</source> <translation>medium</translation> </message> <message> <source>Coin Control Features</source> <translation>Munt controle kenmerken</translation> </message> <message> <source>If this is activated, but the change address is empty or invalid, change will be sent to a newly generated address.</source> <translation>Als dit geactiveerd is, maar het wisselgeld adres is leeg of ongeldig, wordt het wisselgeld verzonden naar een nieuw gegenereerd adres.</translation> </message> <message> <source>Custom change address</source> <translation>Aangepast wisselgeld adres</translation> </message> <message> <source>Amount After Fee:</source> <translation>Bedrag na kosten:</translation> </message> <message> <source>Change:</source> <translation>Wijzig:</translation> </message> <message> <source>out of sync</source> <translation>niet gesynchroniseerd</translation> </message> <message> <source>Mint Status: Okay</source> <translation>Mint status: Oké</translation> </message> <message> <source>Copy quantity</source> <translation>Kopieer kwanititeit</translation> </message> <message> <source>Copy amount</source> <translation>Kopieer hoeveelheid</translation> </message> <message> <source>Zerocoin functionality is not enabled on the Ccbc network yet.</source> <translation>Zerocoin functionaliteit is nog niet ingeschakeld op het Ccbc netwerk.</translation> </message> <message> <source>Error: Your wallet is locked. Please enter the wallet passphrase first.</source> <translation>Fout: Je portemonnee is vergrendeld. Voer alsjeblieft de wachtwoord zin voor de portemonnee in.</translation> </message> <message> <source>Message: Enter an amount &gt; 0.</source> <translation>Bericht: voer een bedrag in &gt; 0.</translation> </message> <message> <source>Minting </source> <translation>Minting </translation> </message> <message> <source>Successfully minted </source> <translation>Succesvol gemint</translation> </message> <message> <source> zCCBC in </source> <translation> zCCBC in </translation> </message> <message> <source> sec. Used denominations: </source> <translation>sec. gebruikte denominaties: </translation> </message> <message> <source>Starting ResetMintZerocoin: rescanning complete blockchain, this will need up to 30 minutes depending on your hardware. Please be patient...</source> <translation>Start ResetMintZerocoin: rescanning complete blockchain, dit zal tot 30 minuten nodig hebben, afhankelijk van uw hardware. Wees alsjeblieft geduldig...</translation> </message> <message> <source>Duration: </source> <translation>Duur:</translation> </message> <message> <source> sec. </source> <translation> sec. </translation> </message> <message> <source>Starting ResetSpentZerocoin: </source> <translation>Starten van ResetSpentZerocoin: </translation> </message> <message> <source>No 'Pay To' address provided, creating local payment</source> <translation>Geen 'Betaal aan' adres verstrekt, lokale betaling wordt gemaakt</translation> </message> <message> <source>Invalid Ccbc Address</source> <translation>Ongeldig Ccbc addres</translation> </message> <message> <source>Invalid Send Amount</source> <translation>Ongeldig verzend bedrag</translation> </message> <message> <source>Confirm additional Fees</source> <translation>Bevestig extra kosten</translation> </message> <message> <source>Are you sure you want to send?&lt;br /&gt;&lt;br /&gt;</source> <translation>Weet je zeker dat je wilt verzenden?&lt;br /&gt;&lt;br /&gt;</translation> </message> <message> <source> to address </source> <translation>naar adres</translation> </message> <message> <source> to a newly generated (unused and therefor anonymous) local address &lt;br /&gt;</source> <translation>naar een nieuw gegenereerd (ongebruikt en dus anoniem) lokaal adres&lt;br /&gt;</translation> </message> <message> <source>with Security Level </source> <translation>met beveiligingsniveau</translation> </message> <message> <source>Confirm send coins</source> <translation>Bevestig verzending coins</translation> </message> <message> <source>Spending Zerocoin. Computationally expensive, might need several minutes depending on the selected Security Level and your hardware. Please be patient...</source> <translation>Zerocoin besteden. Computationeel duur, zou mogelijk enkele minuten nodig hebben, afhankelijk van het geselecteerde beveiligingsniveau en je hardware. Wees alsjeblieft geduldig...</translation> </message> <message> <source>Spend Zerocoin failed with status = </source> <translation>Bestede Zerocoin mislukt met status =</translation> </message> <message> <source>zPhr Spend #: </source> <translation>zPhr besteed #: </translation> </message> <message> <source>denomination: </source> <translation>denominatie:</translation> </message> <message> <source>serial: </source> <translation>serial: </translation> </message> <message> <source>Spend is 1 of : </source> <translation>Besteed is 1 of : </translation> </message> <message> <source>value out: </source> <translation>Waarde uit:</translation> </message> <message> <source>address: </source> <translation>adres:</translation> </message> <message> <source>zPhr Mint</source> <translation>zPhr Mint</translation> </message> <message> <source>Sending successful, return code: </source> <translation>Verzenden succesvol, retourcode:</translation> </message> <message> <source>txid: </source> <translation>txid: </translation> </message> <message> <source>fee: </source> <translation>kosten:</translation> </message> </context> <context> <name>QObject</name> <message> <source>Amount</source> <translation>Hoeveelheid</translation> </message> <message> <source>Enter a Ccbc address (e.g. %1)</source> <translation>Voer een Ccbc adres in (b.v. %1)</translation> </message> <message> <source>%1 d</source> <translation>%1 d</translation> </message> <message> <source>%1 h</source> <translation>%1 h</translation> </message> <message> <source>%1 m</source> <translation>%1 m</translation> </message> <message> <source>%1 s</source> <translation>%1 s</translation> </message> <message> <source>NETWORK</source> <translation>NETWERK</translation> </message> <message> <source>BLOOM</source> <translation>BLOOM</translation> </message> <message> <source>UNKNOWN</source> <translation>ONBEKEND</translation> </message> <message> <source>None</source> <translation>Geen</translation> </message> <message> <source>N/A</source> <translation>NB</translation> </message> <message> <source>%1 ms</source> <translation>%1 ms</translation> </message> </context> <context> <name>QRImageWidget</name> <message> <source>&amp;Save Image...</source> <translation>&amp;Afbeelding opslaan...</translation> </message> <message> <source>&amp;Copy Image</source> <translation>&amp;Kopieer afbeelding</translation> </message> <message> <source>Save QR Code</source> <translation>QR code opslaan</translation> </message> <message> <source>PNG Image (*.png)</source> <translation>PNG Afbeelding(*.png)</translation> </message> </context> <context> <name>RPCConsole</name> <message> <source>Tools window</source> <translation>Tools venster</translation> </message> <message> <source>&amp;Information</source> <translation>&amp;Informatie</translation> </message> <message> <source>General</source> <translation>Algemeen</translation> </message> <message> <source>Name</source> <translation>Naam</translation> </message> <message> <source>Client name</source> <translation>Client naam</translation> </message> <message> <source>N/A</source> <translation>NB</translation> </message> <message> <source>Number of connections</source> <translation>Aantal connecties</translation> </message> <message> <source>&amp;Open</source> <translation>&amp;Open</translation> </message> <message> <source>Startup time</source> <translation>Opstarttijd</translation> </message> <message> <source>Network</source> <translation>Netwerk</translation> </message> <message> <source>Last block time</source> <translation>Laatste blocktijd</translation> </message> <message> <source>Debug log file</source> <translation>Debug logbestand</translation> </message> <message> <source>Using OpenSSL version</source> <translation>Gebruikt OpenSSL versie</translation> </message> <message> <source>Build date</source> <translation>Bouwdatum</translation> </message> <message> <source>Current number of blocks</source> <translation>Huidige blockaantal</translation> </message> <message> <source>Client version</source> <translation>Client versie</translation> </message> <message> <source>Using BerkeleyDB version</source> <translation>Gebruikt BerkeleyDB versie</translation> </message> <message> <source>Block chain</source> <translation>Blockchain</translation> </message> <message> <source>Open the Ccbc debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation>Open het Ccbc debug log bestand uit de huidige data directory. Dit kan enkele seconden duren voor grote logbestanden.</translation> </message> <message> <source>Number of Masternodes</source> <translation>Aantal Masternodes</translation> </message> <message> <source>&amp;Console</source> <translation>&amp;Console</translation> </message> <message> <source>Clear console</source> <translation>Console leegmaken</translation> </message> <message> <source>&amp;Network Traffic</source> <translation>&amp;Netwerk verkeer</translation> </message> <message> <source>&amp;Clear</source> <translation>&amp;Leegmaken</translation> </message> <message> <source>Totals</source> <translation>Totalen</translation> </message> <message> <source>Received</source> <translation>Ontvangen</translation> </message> <message> <source>Sent</source> <translation>Verstuurd</translation> </message> <message> <source>&amp;Peers</source> <translation>&amp;Peers</translation> </message> <message> <source>Select a peer to view detailed information.</source> <translation>Selecteer een peer om gedetailleerde informatie te bekijken.</translation> </message> <message> <source>Direction</source> <translation>Richting</translation> </message> <message> <source>Protocol</source> <translation>Protocol</translation> </message> <message> <source>Version</source> <translation>Versie</translation> </message> <message> <source>Services</source> <translation>Diensten</translation> </message> <message> <source>Starting Height</source> <translation>Start hoogte</translation> </message> <message> <source>Sync Height</source> <translation>Sync hoogte</translation> </message> <message> <source>Ban Score</source> <translation>Ban score</translation> </message> <message> <source>Connection Time</source> <translation>Connectietijd</translation> </message> <message> <source>Last Send</source> <translation>Laatst Verzonden</translation> </message> <message> <source>Last Receive</source> <translation>Laatst Ontvangen</translation> </message> <message> <source>Bytes Sent</source> <translation>Verzonden Bytes</translation> </message> <message> <source>Bytes Received</source> <translation>Ontvangen Bytes</translation> </message> <message> <source>Ping Time</source> <translation>Pingtijd</translation> </message> <message> <source>&amp;Wallet Repair</source> <translation>&amp;Portemonnee herstel</translation> </message> <message> <source>Wallet In Use:</source> <translation>Portemonnee in gebruik:</translation> </message> <message> <source>Salvage wallet</source> <translation>Red portemonnee</translation> </message> <message> <source>Attempt to recover private keys from a corrupt wallet.dat.</source> <translation>Poog om privé sleutels terug te halen uit een corrupte wallet.dat.</translation> </message> <message> <source>Rescan blockchain files</source> <translation>Herscan blockchain bestanden</translation> </message> <message> <source>Rescan the block chain for missing wallet transactions.</source> <translation>Herscan de blockchain voor ontbrekende portemonnee transacties.</translation> </message> <message> <source>Recover transactions 1</source> <translation>Herstel transacties 1</translation> </message> <message> <source>Recover transactions from blockchain (keep meta-data, e.g. account owner).</source> <translation>Herstel transacties van blockchain (houd meta-data, bijvoorbeeld account eigenaar).</translation> </message> <message> <source>Recover transactions 2</source> <translation>Herstel transacties 2</translation> </message> <message> <source>Recover transactions from blockchain (drop meta-data).</source> <translation>Herstel transacties van blockchain (laat meta-data vallen).</translation> </message> <message> <source>Upgrade wallet format</source> <translation>Upgrade portemonnee format</translation> </message> <message> <source>Rebuild block chain index from current blk000??.dat files.</source> <translation>Herstel blockchain index van huidige blk000??.dat bestanden.</translation> </message> <message> <source>The buttons below will restart the wallet with command-line options to repair the wallet, fix issues with corrupt blockhain files or missing/obsolete transactions.</source> <translation>De onderstaande knoppen zullen de portemonnee opnieuw opstarten met command line opties om de portemonnee te repareren, problemen op te lossen met corrupte blockchain bestanden of ontbrekende/verouderde transacties.</translation> </message> <message> <source>Wallet repair options.</source> <translation>Portemonnee herstelopties.</translation> </message> <message> <source>Upgrade wallet to latest format on startup. (Note: this is NOT an update of the wallet itself!)</source> <translation>Upgrade portemonnee naar nieuwste format bij opstarten. (Opmerking: dit is NIET een update van de portemonnee zelf!)</translation> </message> <message> <source>Rebuild index</source> <translation>Herbouw index</translation> </message> <message> <source>In:</source> <translation>In:</translation> </message> <message> <source>Out:</source> <translation>Uit:</translation> </message> <message> <source>Welcome to the Ccbc RPC console.</source> <translation>Welkom bij de Ccbc RPC console.</translation> </message> <message> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation>Gebruik omhoog en omlaag pijlen om de geschiedenis te navigeren, en&lt;b&gt;Ctrl-L&lt;/b&gt;om scherm te wissen.</translation> </message> <message> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation>Type &lt;b&gt;help &lt;/b&gt;voor een overzicht van beschikbare commando's.</translation> </message> <message> <source>%1 B</source> <translation>%1 B</translation> </message> <message> <source>%1 KB</source> <translation>%1 KB</translation> </message> <message> <source>%1 MB</source> <translation>%1 MB</translation> </message> <message> <source>%1 GB</source> <translation>%1 GB</translation> </message> <message> <source>via %1</source> <translation>via %1</translation> </message> <message> <source>never</source> <translation>nooit</translation> </message> <message> <source>Inbound</source> <translation>Inkomende</translation> </message> <message> <source>Outbound</source> <translation>Uitgaande</translation> </message> <message> <source>Unknown</source> <translation>Ongekend</translation> </message> <message> <source>Fetching...</source> <translation>Ophalen...</translation> </message> </context> <context> <name>ReceiveCoinsDialog</name> <message> <source>Reuse one of the previously used receiving addresses.&lt;br&gt;Reusing addresses has security and privacy issues.&lt;br&gt;Do not use this unless re-generating a payment request made before.</source> <translation>Hergebruik een van de eerder gebruikte ontvangstadressen.&lt;br&gt;Hergebruik van adressen heeft beveiligings- en privacyproblemen. &lt;br&gt;Gebruik dit niet tenzij u eerder een betalingsverzoek heeft aangemaakt.</translation> </message> <message> <source>R&amp;euse an existing receiving address (not recommended)</source> <translation>Hergebruik een bestaand ontvangstadres (niet aanbevolen)</translation> </message> <message> <source>&amp;Message:</source> <translation>&amp;Bericht:</translation> </message> <message> <source>An optional label to associate with the new receiving address.</source> <translation>Een optioneel label om te associëren met het nieuwe ontvangstadres.</translation> </message> <message> <source>An optional message to attach to the payment request, which will be displayed when the request is opened. Note: The message will not be sent with the payment over the Ccbc network.</source> <translation>Een optioneel bericht dat aan het betalingsverzoek wordt gehecht, dat wordt weergegeven wanneer het verzoek wordt geopend. Opmerking: het bericht wordt niet verzonden met de betaling via het Ccbc netwerk.</translation> </message> <message> <source>An optional message to attach to the payment request, which will be displayed when the request is opened.&lt;br&gt;Note: The message will not be sent with the payment over the Ccbc network.</source> <translation>Een optioneel bericht dat aan het betalingsverzoek wordt gehecht, dat wordt weergegeven wanneer het verzoek wordt geopend.&lt;br&gt;Opmerking: het bericht wordt niet verzonden met de betaling via het Ccbc netwerk.</translation> </message> <message> <source>Use this form to request payments. All fields are &lt;b&gt;optional&lt;/b&gt;.</source> <translation>Gebruik dit formulier om betalingen aan te vragen. Alle velden zijn&lt;b&gt;optioneel&lt;/b&gt;.</translation> </message> <message> <source>&amp;Label:</source> <translation>&amp;Label:</translation> </message> <message> <source>An optional amount to request. Leave this empty or zero to not request a specific amount.</source> <translation>Een optioneel bedrag om te vragen. Laat dit leeg of vul een nul in om geen specifiek bedrag te vragen.</translation> </message> <message> <source>&amp;Amount:</source> <translation>&amp;Hoeveelheid:</translation> </message> <message> <source>&amp;Request payment</source> <translation>&amp;Verzoek betaling</translation> </message> <message> <source>Clear all fields of the form.</source> <translation>Leeg alle velden van het formulier.</translation> </message> <message> <source>Clear</source> <translation>Leegmaken</translation> </message> <message> <source>Requested payments history</source> <translation>Betalingsverzoeken geschiedenis</translation> </message> <message> <source>Show the selected request (does the same as double clicking an entry)</source> <translation>Toon het geselecteerde verzoek (doet hetzelfde als dubbelklik op een item)</translation> </message> <message> <source>Show</source> <translation>Toon</translation> </message> <message> <source>Remove the selected entries from the list</source> <translation>Verwijder de geselecteerde vermeldingen uit de lijst</translation> </message> <message> <source>Remove</source> <translation>Verwijder</translation> </message> <message> <source>Copy label</source> <translation>Kopiëer label</translation> </message> <message> <source>Copy message</source> <translation>Bericht kopiëren</translation> </message> <message> <source>Copy amount</source> <translation>Kopieer hoeveelheid</translation> </message> </context> <context> <name>ReceiveRequestDialog</name> <message> <source>QR Code</source> <translation>QR Code</translation> </message> <message> <source>Copy &amp;URI</source> <translation>Kopieer &amp;URI</translation> </message> <message> <source>Copy &amp;Address</source> <translation>Kopieer &amp;Adres</translation> </message> <message> <source>&amp;Save Image...</source> <translation>&amp;Afbeelding opslaan...</translation> </message> <message> <source>Request payment to %1</source> <translation>Verzoek betaling aan %1</translation> </message> <message> <source>Payment information</source> <translation>Betalingsinformatie</translation> </message> <message> <source>URI</source> <translation>URI</translation> </message> <message> <source>Address</source> <translation>Adres</translation> </message> <message> <source>Amount</source> <translation>Hoeveelheid</translation> </message> <message> <source>Label</source> <translation>Label</translation> </message> <message> <source>Message</source> <translation>Bericht</translation> </message> <message> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation>Resulterende URI te lang geleden, probeer de tekst voor label/bericht te verminderen.</translation> </message> <message> <source>Error encoding URI into QR Code.</source> <translation>Fout bij het coderen van URI in QR-code.</translation> </message> </context> <context> <name>RecentRequestsTableModel</name> <message> <source>Date</source> <translation>Datum</translation> </message> <message> <source>Label</source> <translation>Label</translation> </message> <message> <source>Message</source> <translation>Bericht</translation> </message> <message> <source>Amount</source> <translation>Hoeveelheid</translation> </message> <message> <source>(no label)</source> <translation>(geen label)</translation> </message> <message> <source>(no message)</source> <translation>(geen bericht)</translation> </message> <message> <source>(no amount)</source> <translation>(geen hoeveelheid)</translation> </message> </context> <context> <name>SendCoinsDialog</name> <message> <source>Send Coins</source> <translation>Verzend Munten</translation> </message> <message> <source>Coin Control Features</source> <translation>Munt controle kenmerken</translation> </message> <message> <source>Insufficient funds!</source> <translation>Onvoldoende saldo!</translation> </message> <message> <source>Quantity:</source> <translation>Kwantiteit:</translation> </message> <message> <source>Bytes:</source> <translation>Bytes:</translation> </message> <message> <source>Amount:</source> <translation>Hoeveelheid:</translation> </message> <message> <source>Priority:</source> <translation>Prioriteit:</translation> </message> <message> <source>medium</source> <translation>medium</translation> </message> <message> <source>Fee:</source> <translation>Kost:</translation> </message> <message> <source>Dust:</source> <translation>Stof:</translation> </message> <message> <source>no</source> <translation>nee</translation> </message> <message> <source>After Fee:</source> <translation>Na de kost:</translation> </message> <message> <source>Change:</source> <translation>Wijzig:</translation> </message> <message> <source>If this is activated, but the change address is empty or invalid, change will be sent to a newly generated address.</source> <translation>Als dit geactiveerd is, maar het wisselgeld adres is leeg of ongeldig, wordt het wisselgeld verzonden naar een nieuw gegenereerd adres.</translation> </message> <message> <source>Custom change address</source> <translation>Aangepast wisselgeld adres</translation> </message> <message> <source>Split UTXO</source> <translation>Split UTXO</translation> </message> <message> <source># of outputs</source> <translation># outputs</translation> </message> <message> <source>UTXO Size:</source> <translation>UTXO grootte:</translation> </message> <message> <source>0 CCBC</source> <translation>0 CCBC</translation> </message> <message> <source>Transaction Fee:</source> <translation>Transactiekosten: </translation> </message> <message> <source>Choose...</source> <translation>Kies...</translation> </message> <message> <source>collapse fee-settings</source> <translation>Klap kosten instellingen in</translation> </message> <message> <source>Minimize</source> <translation>Minimaliseer</translation> </message> <message> <source>per kilobyte</source> <translation>per kilobyte</translation> </message> <message> <source>total at least</source> <translation>totaal tenminste</translation> </message> <message> <source>(read the tooltip)</source> <translation>(lees de tooltip)</translation> </message> <message> <source>Custom:</source> <translation>Aangepast:</translation> </message> <message> <source>(Smart fee not initialized yet. This usually takes a few blocks...)</source> <translation>(Smart fee nog niet geïnitialiseerd. Dit duurt meestal een paar blokken ...)</translation> </message> <message> <source>Confirmation time:</source> <translation>Bevestigingstijd:</translation> </message> <message> <source>Open Coin Control...</source> <translation>Open munt controle...</translation> </message> <message> <source>Coins automatically selected</source> <translation>Munten automatisch geselecteerd</translation> </message> <message> <source>If the custom fee is set to 1000 uCCBCs and the transaction is only 250 bytes, then "per kilobyte" only pays 250 uCCBCs in fee,&lt;br /&gt;while "at least" pays 1000 uCCBCs. For transactions bigger than a kilobyte both pay by kilobyte.</source> <translation>Als de aangepaste vergoeding is ingesteld op 1000 uCCBCs en de transactie is slechts 250 bytes, dan betaalt per kilobyte alleen 250 uCCBC's in vergoeding,&lt;br /&gt;terwijl "minstens" 1000 uCCBC's betaalt. Voor transacties die groter zijn dan een kilobyte, betalen beiden per kilobyte.</translation> </message> <message> <source>If the custom fee is set to 1000 uCCBCs and the transaction is only 250 bytes, then "per kilobyte" only pays 250 uCCBCs in fee,&lt;br /&gt;while "total at least" pays 1000 uCCBCs. For transactions bigger than a kilobyte both pay by kilobyte.</source> <translation>Als de aangepaste vergoeding is ingesteld op 1000 uCCBCs en de transactie is slechts 250 bytes, dan betaalt per kilobyte alleen 250 uCCBC's in vergoeding,&lt;br /&gt;terwijl "totaal minstens" 1000 uCCBC's betaalt. Voor transacties die groter zijn dan een kilobyte, betalen beiden per kilobyte.</translation> </message> <message> <source>Paying only the minimum fee is just fine as long as there is less transaction volume than space in the blocks.&lt;br /&gt;But be aware that this can end up in a never confirming transaction once there is more demand for Ccbc transactions than the network can process.</source> <translation>Alleen de minimale vergoeding betalen is prima, zolang er minder transactie volume is dan ruimte in de blokken. &lt;br /&gt;Maar wees ervan bewust dat dit kan leiden tot een nooit bevestigende transactie wanneer er meer vraag is naar Ccbc transacties dan het netwerk kan verwerken.</translation> </message> <message> <source>normal</source> <translation>normaal</translation> </message> <message> <source>fast</source> <translation>snel</translation> </message> <message> <source>Recommended</source> <translation>Aanbevolen</translation> </message> <message> <source>Send as zero-fee transaction if possible</source> <translation>Zend als zero-fee transactie indien mogelijk</translation> </message> <message> <source>(confirmation may take longer)</source> <translation>(bevestiging kan langer duren)</translation> </message> <message> <source>Confirm the send action</source> <translation>Bevestig de verzendactie</translation> </message> <message> <source>S&amp;end</source> <translation>V&amp;erzonden</translation> </message> <message> <source>Clear all fields of the form.</source> <translation>Leeg alle velden van het formulier.</translation> </message> <message> <source>Clear &amp;All</source> <translation>Verwijder &amp;Alles</translation> </message> <message> <source>Send to multiple recipients at once</source> <translation>Verzend naar meerdere ontvangers tegelijk</translation> </message> <message> <source>Add &amp;Recipient</source> <translation>Voeg &amp;ontvanger toe</translation> </message> <message> <source>Anonymized CCBC</source> <translation>Geanonimiseerde CCBC</translation> </message> <message> <source>SwiftTX</source> <translation>SwiftTX</translation> </message> <message> <source>Balance:</source> <translation>Balans:</translation> </message> <message> <source>Copy quantity</source> <translation>Kopieer kwanititeit</translation> </message> <message> <source>Copy amount</source> <translation>Kopieer hoeveelheid</translation> </message> <message> <source>Copy fee</source> <translation>Kopiëer kost</translation> </message> <message> <source>Copy after fee</source> <translation>Kopiëer na kost</translation> </message> <message> <source>Copy bytes</source> <translation>Kopieer bytes</translation> </message> <message> <source>Copy priority</source> <translation>Kopieer prioriteit</translation> </message> <message> <source>Copy dust</source> <translation>Kopieer dust</translation> </message> <message> <source>Copy change</source> <translation>Kopieer wisselgeld</translation> </message> <message> <source>The split block tool does not work when sending to outside addresses. Try again.</source> <translation>Het split block tool werkt niet bij het verzenden naar externe adressen. Probeer het nog eens.</translation> </message> <message> <source>The split block tool does not work with multiple addresses. Try again.</source> <translation>Het split block tool werkt niet met meerdere adressen. Probeer het nog eens.</translation> </message> <message> <source>using</source> <translation>gebruikt</translation> </message> <message> <source>anonymous funds</source> <translation>anonieme financiën</translation> </message> <message> <source>Warning: Invalid Ccbc address</source> <translation>Waarschuwing: Ongeldig Ccbc adres</translation> </message> <message> <source>any available funds (not recommended)</source> <translation>elke beschikbare fonds (niet aanbevolen)</translation> </message> <message> <source>and SwiftTX</source> <translation>en SwiftTX</translation> </message> <message> <source>%1 to %2</source> <translation>%1 naar %2</translation> </message> <message> <source>Are you sure you want to send?</source> <translation>Ben je zeker dat je wilt verzenden?</translation> </message> <message> <source>are added as transaction fee</source> <translation>worden toegevoegd als transactiekosten</translation> </message> <message> <source>Total Amount = &lt;b&gt;%1&lt;/b&gt;&lt;br /&gt;= %2</source> <translation>Totale hoeveelheid = &lt;b&gt;%1 &lt;/b&gt;&lt;br /&gt;= %2</translation> </message> <message> <source>Confirm send coins</source> <translation>Bevestig verzending coins</translation> </message> <message> <source>A fee %1 times higher than %2 per kB is considered an insanely high fee.</source> <translation>Een vergoeding %1 keer hoger dan %2 per kB wordt beschouwd als een zwaar hoge vergoeding.</translation> </message> <message numerus="yes"> <source>Estimated to begin confirmation within %n block(s).</source> <translation><numerusform>Schatting om te beginnen met bevestiging in %n blok.</numerusform><numerusform>Schatting om te beginnen met bevestiging in %n blokken.</numerusform></translation> </message> <message> <source>The recipient address is not valid, please recheck.</source> <translation>Het ontvangstadres is niet geldig, controleer deze.</translation> </message> <message> <source> split into %1 outputs using the UTXO splitter.</source> <translation>gesplitst in %1 outputs met behulp van de UTXO splitter.</translation> </message> <message> <source>&lt;b&gt;(%1 of %2 entries displayed)&lt;/b&gt;</source> <translation>&lt;b&gt;(%1 van %2 vermeldingen weergegeven)</translation> </message> <message> <source>The amount to pay must be larger than 0.</source> <translation>Het te betalen bedrag moet groter zijn dan 0.</translation> </message> <message> <source>The amount exceeds your balance.</source> <translation>Het bedrag overschrijdt uw saldo.</translation> </message> <message> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>Het totaal overschrijdt uw saldo wanneer de transactievergoeding %1 is inbegrepen.</translation> </message> <message> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>Dubbel adres gevonden, kan alleen per keer per verzendoperatie naar elk adres versturen.</translation> </message> <message> <source>Transaction creation failed!</source> <translation>Transactie creatie mislukt!</translation> </message> <message> <source>The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>De transactie is afgewezen! Dit kan gebeuren als sommige munten in je portemonnee al waren uitgegeven, zoals als je een kopie van de wallet.dat en munten in de kopie waren besteed maar niet gemarkeerd zoals hier besteed.</translation> </message> <message> <source>Error: The wallet was unlocked only to anonymize coins.</source> <translation>Fout: De portemonnee was alleen geopend om munten te anonimiseren.</translation> </message> <message> <source>Error: The wallet was unlocked only to anonymize coins. Unlock canceled.</source> <translation>Fout: De portemonnee was alleen geopend om munten te anonimiseren. Ontgrendelen geannuleerd.</translation> </message> <message> <source>Pay only the minimum fee of %1</source> <translation>Betaal alleen de minimumkost van %1</translation> </message> <message> <source>Warning: Unknown change address</source> <translation>Waarschuwing: Ongekend wisselgeld adres</translation> </message> <message> <source>(no label)</source> <translation>(geen label)</translation> </message> </context> <context> <name>SendCoinsEntry</name> <message> <source>This is a normal payment.</source> <translation>Dit is een normale betaling.</translation> </message> <message> <source>Pay &amp;To:</source> <translation>Betaal &amp;Naar:</translation> </message> <message> <source>The Ccbc address to send the payment to</source> <translation>Het Ccbc adres om de betaling naar te verzenden</translation> </message> <message> <source>Choose previously used address</source> <translation>Kies een eerder gebruikt adres</translation> </message> <message> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <source>Paste address from clipboard</source> <translation>Plak adres vanaf klembord</translation> </message> <message> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <source>Remove this entry</source> <translation>Verwijder dit item</translation> </message> <message> <source>&amp;Label:</source> <translation>&amp;Label:</translation> </message> <message> <source>Enter a label for this address to add it to the list of used addresses</source> <translation>Vul een label in voor dit adres om deze toe te voegen aan de lijst met gebruikte adressen</translation> </message> <message> <source>A&amp;mount:</source> <translation>&amp;Hoeveelheid:</translation> </message> <message> <source>Message:</source> <translation>Bericht:</translation> </message> <message> <source>A message that was attached to the Ccbc: URI which will be stored with the transaction for your reference. Note: This message will not be sent over the Ccbc network.</source> <translation>Een bericht dat is gehecht aan de Ccbc: URI die bij de transactie wordt opgeslagen voor uw referentie. Opmerking: dit bericht wordt niet verzonden via het Ccbc netwerk.</translation> </message> <message> <source>This is an unverified payment request.</source> <translation>Dit is een ongeverifieerde betalingsverzoek.</translation> </message> <message> <source>Pay To:</source> <translation>Betaal aan:</translation> </message> <message> <source>Memo:</source> <translation>Memo:</translation> </message> <message> <source>This is a verified payment request.</source> <translation>Dit is een geverifieerd betalingsverzoek.</translation> </message> <message> <source>Enter a label for this address to add it to your address book</source> <translation>Voer een label in voor dit adres om het toe te voegen aan jouw adresboek</translation> </message> </context> <context> <name>ShutdownWindow</name> <message> <source>Ccbc Core is shutting down...</source> <translation>Ccbc Core is aan het afsluiten...</translation> </message> <message> <source>Do not shut down the computer until this window disappears.</source> <translation>Sluit de computer niet af voordat dit venster verdwenen is.</translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <source>Signatures - Sign / Verify a Message</source> <translation>Handtekeningen - Onderteken / Verifieer een Bericht</translation> </message> <message> <source>&amp;Sign Message</source> <translation>&amp;Onderteken Bericht</translation> </message> <message> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation>Je kunt berichten met je adressen ondertekenen om te bewijzen dat je ze bezit. Wees voorzichtig om niets vaags te ondertekenen, omdat phishing aanvallen je misschien kunnen proberen om je identiteit over te geven. Teken alleen volledig gedetailleerde verklaringen aan waar je mee akkoord gaat.</translation> </message> <message> <source>The Ccbc address to sign the message with</source> <translation>Het Ccbc adres om het bericht met te ondertekenen</translation> </message> <message> <source>Choose previously used address</source> <translation>Kies een eerder gebruikt adres</translation> </message> <message> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <source>Paste address from clipboard</source> <translation>Plak adres vanaf klembord</translation> </message> <message> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <source>Enter the message you want to sign here</source> <translation>Voer hier het bericht in dat u wilt ondertekenen</translation> </message> <message> <source>Signature</source> <translation>Handtekening</translation> </message> <message> <source>Copy the current signature to the system clipboard</source> <translation>Kopieer de huidige handtekening naar het systeemklembord</translation> </message> <message> <source>Sign the message to prove you own this Ccbc address</source> <translation>Onderteken het bericht om te bewijzen dat u het Ccbc adres bezit</translation> </message> <message> <source>The Ccbc address the message was signed with</source> <translation>Het Ccbc adres waarmee het bericht was ondertekend</translation> </message> <message> <source>Verify the message to ensure it was signed with the specified Ccbc address</source> <translation>Controleer een bericht om te verifiëren dat het ondertekend is door het gespecificeerde Ccbc adres</translation> </message> <message> <source>Sign &amp;Message</source> <translation>Onderteken &amp;Bericht</translation> </message> <message> <source>Reset all sign message fields</source> <translation>Maak alle ondertekenvelden leeg</translation> </message> <message> <source>Clear &amp;All</source> <translation>Verwijder &amp;Alles</translation> </message> <message> <source>&amp;Verify Message</source> <translation>&amp;Verifieer Bericht</translation> </message> <message> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation>Voer het ondertekening adres in, verifieer het bericht (zorg ervoor dat je line breaks, spaties, tabs, enz.) en onderteken hieronder om het bericht te verifiëren. Wees voorzichtig om niet meer in de handtekening te lezen dan in het ondertekende bericht zelf, om te voorkomen dat je door een man-in-de-middle aanval wordt getroffen.</translation> </message> <message> <source>Verify &amp;Message</source> <translation>Verifeer &amp;Message</translation> </message> <message> <source>Reset all verify message fields</source> <translation>Maak alle verifiëren van het bericht velden leeg</translation> </message> <message> <source>Click "Sign Message" to generate signature</source> <translation>Klik op "Onderteken Bericht" om een handtekening te genereren</translation> </message> <message> <source>The entered address is invalid.</source> <translation>Het ingevoerde adres is ongeldig.</translation> </message> <message> <source>Please check the address and try again.</source> <translation>Controleer het adres en probeer het opnieuw.</translation> </message> <message> <source>The entered address does not refer to a key.</source> <translation>Het opgegeven adres verwijst niet naar een sleutel.</translation> </message> <message> <source>Wallet unlock was cancelled.</source> <translation>Portemonnee-ontsleuteling is geannuleerd.</translation> </message> <message> <source>Private key for the entered address is not available.</source> <translation>Geheime sleutel voor het ingevoerde adres is niet beschikbaar.</translation> </message> <message> <source>Message signing failed.</source> <translation>Bericht ondertekenen mislukt.</translation> </message> <message> <source>Message signed.</source> <translation>Bericht ondertekend.</translation> </message> <message> <source>The signature could not be decoded.</source> <translation>De handtekening kon niet gedecodeerd worden.</translation> </message> <message> <source>Please check the signature and try again.</source> <translation>Controleer de handtekening en probeer het opnieuw.</translation> </message> <message> <source>The signature did not match the message digest.</source> <translation>De handtekening kwam niet overeen met de berichtverdeling.</translation> </message> <message> <source>Message verification failed.</source> <translation>Bericht verificatie mislukt.</translation> </message> <message> <source>Message verified.</source> <translation>Bericht geverifieerd.</translation> </message> </context> <context> <name>SplashScreen</name> <message> <source>Ccbc Core</source> <translation>Ccbc Kern</translation> </message> <message> <source>Version %1</source> <translation>Versie %1</translation> </message> <message> <source>The Bitcoin Core developers</source> <translation>De Bitcoin Kernontwikkelaars</translation> </message> <message> <source>The Dash Core developers</source> <translation>De Dash Kernontwikkelaars</translation> </message> <message> <source>The Ccbc Core developers</source> <translation>De Ccbc Kernontwikkelaars</translation> </message> <message> <source>[testnet]</source> <translation>[testnet]</translation> </message> </context> <context> <name>TrafficGraphWidget</name> <message> <source>KB/s</source> <translation>KB/s</translation> </message> </context> <context> <name>TransactionDesc</name> <message numerus="yes"> <source>Open for %n more block(s)</source> <translation><numerusform>Klaar voor %n blok extra</numerusform><numerusform>Klaar voor %n meer blokken</numerusform></translation> </message> <message> <source>Open until %1</source> <translation>Open tot %1</translation> </message> <message> <source>conflicted</source> <translation>conflicteert</translation> </message> <message> <source>%1/offline (verified via swifttx)</source> <translation>%1/offline (geverifieerd via swifttx)</translation> </message> <message> <source>%1/confirmed (verified via swifttx)</source> <translation>%1/bevestigd (geverifieerd via swifttx)</translation> </message> <message> <source>%1 confirmations (verified via swifttx)</source> <translation>%1 confirmaties (geverifieerd via swifttx)</translation> </message> <message> <source>%1/offline</source> <translation>%1/offline</translation> </message> <message> <source>%1/unconfirmed</source> <translation>%1/onbevestigd</translation> </message> <message> <source>%1 confirmations</source> <translation>%1 bevestigingen</translation> </message> <message> <source>%1/confirmed (SwiftTX verification in progress - %2 of %3 signatures )</source> <translation>%1/bevestigd (SwiftTX verificatie in uitvoering - %2 van %3 handtekeningen)</translation> </message> <message> <source>%1 confirmations (SwiftTX verification in progress - %2 of %3 signatures)</source> <translation>%1 bevestigingen (SwiftTX verificatie in uitvoering - %2 van %3 handtekeningen)</translation> </message> <message> <source>%1/offline (SwiftTX verification failed)</source> <translation>%1/offline (SwiftTX verificatie mislukt)</translation> </message> <message> <source>%1/confirmed (SwiftTX verification failed)</source> <translation>%1/bevestigd (SwiftTX verificatie mislukt)</translation> </message> <message> <source>Status</source> <translation>Status</translation> </message> <message> <source>, has not been successfully broadcast yet</source> <translation>, is nog niet succesvol uitgezonden</translation> </message> <message numerus="yes"> <source>, broadcast through %n node(s)</source> <translation><numerusform>, uitgezonden via %n node</numerusform><numerusform>, uitgezonden via %n nodes</numerusform></translation> </message> <message> <source>Date</source> <translation>Datum</translation> </message> <message> <source>Source</source> <translation>Bron</translation> </message> <message> <source>Generated</source> <translation>Gegeneerd</translation> </message> <message> <source>From</source> <translation>Van</translation> </message> <message> <source>unknown</source> <translation>ongekend</translation> </message> <message> <source>To</source> <translation>Naar</translation> </message> <message> <source>own address</source> <translation>eigen adres</translation> </message> <message> <source>watch-only</source> <translation>watch-only</translation> </message> <message> <source>label</source> <translation>label</translation> </message> <message> <source>Credit</source> <translation>Credit</translation> </message> <message numerus="yes"> <source>matures in %n more block(s)</source> <translation><numerusform>matureit in %n blok meer</numerusform><numerusform>maturiteit in %n meer blokken</numerusform></translation> </message> <message> <source>not accepted</source> <translation>niet geaccepteerd</translation> </message> <message> <source>Debit</source> <translation>Debet</translation> </message> <message> <source>Total debit</source> <translation>Totaal debet</translation> </message> <message> <source>Total credit</source> <translation>Totaal credit</translation> </message> <message> <source>Transaction fee</source> <translation>Transactiekosten</translation> </message> <message> <source>Net amount</source> <translation>Netto bedrag</translation> </message> <message> <source>Message</source> <translation>Bericht</translation> </message> <message> <source>Comment</source> <translation>Reactie</translation> </message> <message> <source>Transaction ID</source> <translation>Transactie ID</translation> </message> <message> <source>Output index</source> <translation>Output index</translation> </message> <message> <source>Merchant</source> <translation>Winkelier</translation> </message> <message> <source>Generated coins must mature %1 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation>Gegenereerde munten moeten %1 blokken rijpen voordat ze kunnen worden uitgegeven. Wanneer je dit blok hebt gegenereerd, wordt het naar het netwerk uitgezonden om aan de blockchain toegevoegd te worden. Als het niet in de keten komt, verandert de staat in "niet geaccepteerd" en zal het niet uitgeefbaar worden. Dit kan af en toe gebeuren als een andere node binnen een paar seconden van u een blok genereert.</translation> </message> <message> <source>Debug information</source> <translation>Debug informatie</translation> </message> <message> <source>Transaction</source> <translation>Transactie</translation> </message> <message> <source>Inputs</source> <translation>Inputs</translation> </message> <message> <source>Amount</source> <translation>Hoeveelheid</translation> </message> <message> <source>true</source> <translation>waar</translation> </message> <message> <source>false</source> <translation>onwaar</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <source>Transaction details</source> <translation>Transactiedetails</translation> </message> <message> <source>This pane shows a detailed description of the transaction</source> <translation>Dit venster laat een gedetailleerde beschrijving van de transactie zien</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <source>Date</source> <translation>Datum</translation> </message> <message> <source>Type</source> <translation>Type</translation> </message> <message> <source>Address</source> <translation>Adres</translation> </message> <message numerus="yes"> <source>Open for %n more block(s)</source> <translation><numerusform>Klaar voor %n blok extra</numerusform><numerusform>Klaar voor %n meer blokken</numerusform></translation> </message> <message> <source>Open until %1</source> <translation>Open tot %1</translation> </message> <message> <source>Offline</source> <translation>Offline</translation> </message> <message> <source>Unconfirmed</source> <translation>Onbevestigd</translation> </message> <message> <source>Confirming (%1 of %2 recommended confirmations)</source> <translation>Bevestigen (%1 van %2 aanbevolen bevestigingen)</translation> </message> <message> <source>Confirmed (%1 confirmations)</source> <translation>Bevestigd (%1 bevestigingen)</translation> </message> <message> <source>Conflicted</source> <translation>Geconflicteerd</translation> </message> <message> <source>Immature (%1 confirmations, will be available after %2)</source> <translation>Onvolwassen (%1 bevestigingen, zullen beschikbaar zijn na %2)</translation> </message> <message> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Dit blok is niet ontvangen door andere noden en wordt waarschijnlijk niet geaccepteerd!</translation> </message> <message> <source>Received with</source> <translation>Ontvangen met</translation> </message> <message> <source>Masternode Reward</source> <translation>Masternode beloning</translation> </message> <message> <source>Received from</source> <translation>Ontvangen van</translation> </message> <message> <source>Received via Obfuscation</source> <translation>Verkregen via verduistering</translation> </message> <message> <source>Obfuscation Denominate</source> <translation>Verduistering denominatie</translation> </message> <message> <source>Obfuscation Collateral Payment</source> <translation>Verduistering zijdelingse betaling</translation> </message> <message> <source>Obfuscation Make Collateral Inputs</source> <translation>Verduistering maakt zijdelingse inputs</translation> </message> <message> <source>Obfuscation Create Denominations</source> <translation>Verduistering creëert denominaties</translation> </message> <message> <source>Sent to</source> <translation>Verzenden naar</translation> </message> <message> <source>Orphan Block - Generated but not accepted. This does not impact your holdings.</source> <translation>Orphan Block - Gegenereerd maar niet geaccepteerd. Dit heeft geen invloed op uw bezit.</translation> </message> <message> <source>Payment to yourself</source> <translation>Betaling naar jezelf</translation> </message> <message> <source>Minted</source> <translation>Minted</translation> </message> <message> <source>Mined</source> <translation>Mined</translation> </message> <message> <source>Obfuscated</source> <translation>Verduisterd</translation> </message> <message> <source>Converted Phr to zPhr</source> <translation>Geconverteerde Phr naar zPhr</translation> </message> <message> <source>Spent zPhr</source> <translation>Uitgegeven zPhr</translation> </message> <message> <source>Received Phr from zPhr</source> <translation>Verkregen Phr van zPhr</translation> </message> <message> <source>Minted Change as zPhr from zPhr Spend</source> <translation>Minted wisselgeld als zPhr van uitgegeven zPhr</translation> </message> <message> <source>Converted zPhr to Phr</source> <translation>Geconverteerde zPhr naar Phr</translation> </message> <message> <source>watch-only</source> <translation>watch-only</translation> </message> <message> <source>zPhr Accumulator</source> <translation>zPhr Accumulator</translation> </message> <message> <source>(n/a)</source> <translation>(n/b)</translation> </message> <message> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Transactie status. Beweeg over dit veld om het aantal bevestigingen te tonen.</translation> </message> <message> <source>Date and time that the transaction was received.</source> <translation>Datum en tijd waarop de transactie is ontvangen.</translation> </message> <message> <source>Type of transaction.</source> <translation>Type transactie.</translation> </message> <message> <source>Whether or not a watch-only address is involved in this transaction.</source> <translation>Of een watch-only adres wel of niet betrokken is bij deze transactie.</translation> </message> <message> <source>Destination address of transaction.</source> <translation>Bestemming adres van de transactie.</translation> </message> <message> <source>Amount removed from or added to balance.</source> <translation>Bedrag verwijderd uit of toegevoegd aan saldo.</translation> </message> </context> <context> <name>TransactionView</name> <message> <source>All</source> <translation>Alle</translation> </message> <message> <source>Today</source> <translation>Vandaag</translation> </message> <message> <source>This week</source> <translation>Deze week</translation> </message> <message> <source>This month</source> <translation>Deze maand</translation> </message> <message> <source>Last month</source> <translation>Afgelopen maand</translation> </message> <message> <source>This year</source> <translation>Dit jaar</translation> </message> <message> <source>Range...</source> <translation>Omvang...</translation> </message> <message> <source>Most Common</source> <translation>Meest voorkomend</translation> </message> <message> <source>Received with</source> <translation>Ontvangen met</translation> </message> <message> <source>Sent to</source> <translation>Verzenden naar</translation> </message> <message> <source>Obfuscated</source> <translation>Verduisterd</translation> </message> <message> <source>Obfuscation Make Collateral Inputs</source> <translation>Verduistering maakt zijdelingse inputs</translation> </message> <message> <source>Obfuscation Create Denominations</source> <translation>Verduistering creëert denominaties</translation> </message> <message> <source>Obfuscation Denominate</source> <translation>Verduistering denominatie</translation> </message> <message> <source>Obfuscation Collateral Payment</source> <translation>Verduistering zijdelingse betaling</translation> </message> <message> <source>To yourself</source> <translation>Naar jezelf</translation> </message> <message> <source>Mined</source> <translation>Mined</translation> </message> <message> <source>Minted</source> <translation>Minted</translation> </message> <message> <source>Masternode Reward</source> <translation>Masternode beloning</translation> </message> <message> <source>Received Phr from zPhr</source> <translation>Verkregen Phr van zPhr</translation> </message> <message> <source>Zerocoin Mint</source> <translation>Zerocoin Mint</translation> </message> <message> <source>Zerocoin Spend</source> <translation>Zerocoin uitgegeven</translation> </message> <message> <source>Zerocoin Spend, Change in zPhr</source> <translation>Zerocoin uitgegeven, wisselgeld in zPhr</translation> </message> <message> <source>Zerocoin Spend to Self</source> <translation>Zerocoin uitgegeven aan jezelf</translation> </message> <message> <source>Other</source> <translation>Andere</translation> </message> <message> <source>Enter address or label to search</source> <translation>Adres of label invullen om te zoeken</translation> </message> <message> <source>Min amount</source> <translation>Minimale hoeveelheid</translation> </message> <message> <source>Copy address</source> <translation>Kopieer adres</translation> </message> <message> <source>Copy label</source> <translation>Kopiëer label</translation> </message> <message> <source>Copy amount</source> <translation>Kopieer hoeveelheid</translation> </message> <message> <source>Copy transaction ID</source> <translation>Kopier transactie ID</translation> </message> <message> <source>Edit label</source> <translation>Label wijzigen</translation> </message> <message> <source>Show transaction details</source> <translation>Bekijk transactiedetails</translation> </message> <message> <source>Export Transaction History</source> <translation>Exporteer Transactiegeschiedenis</translation> </message> <message> <source>Comma separated file (*.csv)</source> <translation>Kommagescheiden bestand (*.csv)</translation> </message> <message> <source>Confirmed</source> <translation>Bevestigd</translation> </message> <message> <source>Watch-only</source> <translation>Watch-only</translation> </message> <message> <source>Date</source> <translation>Datum</translation> </message> <message> <source>Type</source> <translation>Type</translation> </message> <message> <source>Label</source> <translation>Label</translation> </message> <message> <source>Address</source> <translation>Adres</translation> </message> <message> <source>ID</source> <translation>ID</translation> </message> <message> <source>Exporting Failed</source> <translation>Export Mislukt</translation> </message> <message> <source>There was an error trying to save the transaction history to %1.</source> <translation>Er is een fout opgetreden om de transactiegeschiedenis te bewaren naar %1.</translation> </message> <message> <source>Exporting Successful</source> <translation>Exporteren succesvol</translation> </message> <message> <source>The transaction history was successfully saved to %1.</source> <translation>De transactiegeschiedenis is succesvol bewaard in %1.</translation> </message> <message> <source>Range:</source> <translation>Bereik:</translation> </message> <message> <source>to</source> <translation>naar</translation> </message> </context> <context> <name>UnitDisplayStatusBarControl</name> <message> <source>Unit to show amounts in. Click to select another unit.</source> <translation>Eenheid om bedragen te laten zien. Klik om een andere eenheid te selecteren.</translation> </message> </context> <context> <name>WalletFrame</name> <message> <source>No wallet has been loaded.</source> <translation>Er is geen portemonnee ingeladen.</translation> </message> </context> <context> <name>WalletModel</name> <message> <source>Send Coins</source> <translation>Verzend Munten</translation> </message> <message> <source>SwiftTX doesn't support sending values that high yet. Transactions are currently limited to %1 CCBC.</source> <translation>SwiftTX ondersteunt geen verzendwaarden die zo hoog zijn. Transacties zijn momenteel beperkt tot %1 CCBC.</translation> </message> </context> <context> <name>WalletView</name> <message> <source>&amp;Export</source> <translation>&amp;Exporteren</translation> </message> <message> <source>Export the data in the current tab to a file</source> <translation>Exporteer de data in de huidige tab naar een bestand</translation> </message> <message> <source>Selected amount:</source> <translation>Geselecteerde hoeveelheid:</translation> </message> <message> <source>Backup Wallet</source> <translation>Backup portemonnee</translation> </message> <message> <source>Wallet Data (*.dat)</source> <translation>Portemonneegegevens (*.dat)</translation> </message> <message> <source>Backup Failed</source> <translation>Backup mislukt</translation> </message> <message> <source>There was an error trying to save the wallet data to %1.</source> <translation>Er is een fout opgetreden om de portemonnee data op te slaan naar %1.</translation> </message> <message> <source>Backup Successful</source> <translation>Backup Succesvol</translation> </message> <message> <source>The wallet data was successfully saved to %1.</source> <translation>De portemonneegegevens zijn succesvol opgeslagen op %1.</translation> </message> </context> <context> <name>ZPhrControlDialog</name> <message> <source>Select zPhr to Spend</source> <translation>Selecteer zPhr om te spenderen</translation> </message> <message> <source>Quantity</source> <translation>Hoeveelheid</translation> </message> <message> <source>0</source> <translation>0</translation> </message> <message> <source>zPhr</source> <translation>zPhr</translation> </message> <message> <source>Select/Deselect All</source> <translation>Selecteer/Deselecteer Alles</translation> </message> <message> <source>Is Spendable</source> <translation>Is Uitgeefbaar</translation> </message> </context> <context> <name>ccbc-core</name> <message> <source>(1 = keep tx meta data e.g. account owner and payment request information, 2 = drop tx meta data)</source> <translation>(1 = houd tx meta data, bijvoorbeeld account eigenaar en betalingsverzoek informatie, 2 = drop tx meta data)</translation> </message> <message> <source>Allow JSON-RPC connections from specified source. Valid for &lt;ip&gt; are a single IP (e.g. 1.2.3.4), a network/netmask (e.g. 1.2.3.4/255.255.255.0) or a network/CIDR (e.g. 1.2.3.4/24). This option can be specified multiple times</source> <translation>Sta JSON-RPC connecties toe van de opgegeven bron. Geldig voor&lt;ip&gt;zijn een enkel IP (bijvoorbeeld 1.2.3.4), een netwerk/netmask (bijvoorbeeld 1.2.3.4/255.255.255.0) of een netwerk/CIDR (bijvoorbeeld 1.2.3.4/24). Deze optie kan meerdere keren worden opgegeven</translation> </message> <message> <source>An error occurred while setting up the RPC address %s port %u for listening: %s</source> <translation>Er is een fout opgetreden tijdens het instellen van het RPC adres %s poort %u voor het luisteren: %s</translation> </message> <message> <source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source> <translation>Bind naar het gegeven adres en luister er altijd naar. Gebruik [host]:poort notatie voor IPv6</translation> </message> <message> <source>Bind to given address and whitelist peers connecting to it. Use [host]:port notation for IPv6</source> <translation>Bind naar bepaald adres en whitelist peers die er verbinding mee maken. Gebruik [host]:poort notatie voor IPv6</translation> </message> <message> <source>Bind to given address to listen for JSON-RPC connections. Use [host]:port notation for IPv6. This option can be specified multiple times (default: bind to all interfaces)</source> <translation>Bind naar het opgegeven adres om te luisteren naar JSON-RPC verbindingen. Gebruik [host]:poort notatie voor IPv6. Deze optie kan meerdere keren worden opgegeven (standaard: bind aan alle interfaces)</translation> </message> <message> <source>Calculated accumulator checkpoint is not what is recorded by block index</source> <translation>Het berekende accumulatie controlepunt is niet wat wordt geregistreerd door de blokindex</translation> </message> <message> <source>Cannot obtain a lock on data directory %s. Ccbc Core is probably already running.</source> <translation>Kan geen vergrendeling op data directory %s verkrijgen. Ccbc Core loopt waarschijnlijk al.</translation> </message> <message> <source>Change automatic finalized budget voting behavior. mode=auto: Vote for only exact finalized budget match to my generated budget. (string, default: auto)</source> <translation>Verander automatisch gefinaliseerd budget voting gedrag. modus=auto: Stem enkel voor exact gefinaliseerde budget overeenkomend met mijn gegenereerde budget. (tekenreeks, standaard: auto)</translation> </message> <message> <source>Continuously rate-limit free transactions to &lt;n&gt;*1000 bytes per minute (default:%u)</source> <translation>Doorlopend rate-limit gratis transacties naar&lt;n&gt;*1000 bytes per minuut (default:%u)</translation> </message> <message> <source>Create new files with system default permissions, instead of umask 077 (only effective with disabled wallet functionality)</source> <translation>Maak nieuwe bestanden met systeem standaard permissies, in plaats van umask 077 (alleen effectief met gedeactiveerde wallet functionaliteit)</translation> </message> <message> <source>Delete all wallet transactions and only recover those parts of the blockchain through -rescan on startup</source> <translation>Verwijder alle portemonnee transacties en herstel alleen die delen van de blockchain via -rescan bij opstarten</translation> </message> <message> <source>Disable all Ccbc specific functionality (Masternodes, Obfuscation, SwiftTX, Budgeting) (0-1, default: %u)</source> <translation>Schakel alle Ccbc specifieke functionaliteit uit (Masternodes, Verduistering, SwiftTX, Budgeting) (0-1, standaard: %u)</translation> </message> <message> <source>Distributed under the MIT software license, see the accompanying file COPYING or &lt;http://www.opensource.org/licenses/mit-license.php&gt;.</source> <translation>Gedistribueerd onder de MIT software licentie, zie het bijgevoegde bestand COPYING of &lt;http://www.opensource.org/licenses/mit-license.php&gt;.</translation> </message> <message> <source>Enable spork administration functionality with the appropriate private key.</source> <translation>Activeer de spork administratie functionaliteit met de juiste private sleutel.</translation> </message> <message> <source>Enable swifttx, show confirmations for locked transactions (bool, default: %s)</source> <translation>Schakel swifttx in, laat bevestigingen zien voor gesloten transacties (bool, standaard: %s)</translation> </message> <message> <source>Enter regression test mode, which uses a special chain in which blocks can be solved instantly.</source> <translation>Voer de regressietest modus uit, die een speciale chain gebruikt waarin blokken direct kunnen worden opgelost.</translation> </message> <message> <source>Error: Listening for incoming connections failed (listen returned error %s)</source> <translation>Fout: het luisteren naar inkomende verbindingen is mislukt (luister terug fout %s)</translation> </message> <message> <source>Error: Unsupported argument -socks found. Setting SOCKS version isn't possible anymore, only SOCKS5 proxies are supported.</source> <translation>Fout: Niet ondersteund argument -socks gevonden. Het instellen van SOCKS versie is niet meer mogelijk, alleen SOCKS5 proxy's worden ondersteund.</translation> </message> <message> <source>Execute command when a relevant alert is received or we see a really long fork (%s in cmd is replaced by message)</source> <translation>Uitvoeren commando wanneer een relevante waarschuwing is ontvangen of we zien een echt lange fork (%s in cmd wordt vervangen door bericht)</translation> </message> <message> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation>Uitvoeren commando wanneer een portemonnee transactie verandert (%s in cmd wordt vervangen door TxID)</translation> </message> <message> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation>Uitvoeren commando wanneer het beste blok verandert (%s in cmd is vervangen door block hash)</translation> </message> <message> <source>Fees (in CCBC/Kb) smaller than this are considered zero fee for relaying (default: %s)</source> <translation>Kosten (in CCBC/Kb) kleiner dan dit worden beschouwd als zero fee voor heruitzending (standaard: %s)</translation> </message> <message> <source>Fees (in CCBC/Kb) smaller than this are considered zero fee for transaction creation (default: %s)</source> <translation>Kosten (in CCBC/Kb) kleiner dan dit worden beschouwd als zero fee voor transactie verrichting (standaard: %s)</translation> </message> <message> <source>Flush database activity from memory pool to disk log every &lt;n&gt; megabytes (default: %u)</source> <translation>Leeg database activiteit uit geheugen pool naar schijf log elke keer&lt;n&gt;megabytes (default: %u)</translation> </message> <message> <source>Found unconfirmed denominated outputs, will wait till they confirm to continue.</source> <translation>Gevonden onbevestigde gedenomineerde outputs, wachten tot ze bevestigd zijn om verder te gaan.</translation> </message> <message> <source>If paytxfee is not set, include enough fee so transactions begin confirmation on average within n blocks (default: %u)</source> <translation>Als paytxfee niet is ingesteld, sluit voldoende kosten in, zodat transacties beginnen te confirmeren binnen gemiddeld n blokken (standaard: %u)</translation> </message> <message> <source>In this mode -genproclimit controls how many blocks are generated immediately.</source> <translation>In deze modus controleert -genproclimit hoeveel blokken er onmiddellijk worden gegenereerd.</translation> </message> <message> <source>Invalid amount for -maxtxfee=&lt;amount&gt;: '%s' (must be at least the minrelay fee of %s to prevent stuck transactions)</source> <translation>Ongeldige hoeveelheid voor -maxtxfee=&lt;amount&gt;: '%s' (moet ten minste de minimale vergoeding van %s zijn om hangende transacties te voorkomen)</translation> </message> <message> <source>Keep the specified amount available for spending at all times (default: 0)</source> <translation>Houd het gespecificeerde bedrag altijd beschikbaar voor uitgaven te allen tijde (standaard: 0)</translation> </message> <message> <source>Log transaction priority and fee per kB when mining blocks (default: %u)</source> <translation>Log transactie prioriteit en vergoeding per kB wanneer blokken worden gemined (standaard: %u)</translation> </message> <message> <source>Maintain a full transaction index, used by the getrawtransaction rpc call (default: %u)</source> <translation>Onderhoud een volledige transactie index, gebruikt door de getrawtransaction rpc call (standaard: %u)</translation> </message> <message> <source>Maximum size of data in data carrier transactions we relay and mine (default: %u)</source> <translation>Maximale gegevensgrootte in data carrier transacties die we relayen en minen (standaard: %u)</translation> </message> <message> <source>Maximum total fees to use in a single wallet transaction, setting too low may abort large transactions (default: %s)</source> <translation>Maximale totale kosten die in een enkele portefeuille kunnen worden gebruikt, een te lage instelling kan grote transacties afbreken (standaard: %s)</translation> </message> <message> <source>Number of seconds to keep misbehaving peers from reconnecting (default: %u)</source> <translation>Aantal seconden om te voorkomen dat misdragende peers opnieuw connectoren (standaard: %u)</translation> </message> <message> <source>Obfuscation uses exact denominated amounts to send funds, you might simply need to anonymize some more coins.</source> <translation>Verduistering maakt gebruik van exacte gedenomineerde bedragen om fondsen te verzenden, je zou misschien nog wat munten moeten anonimiseren.</translation> </message> <message> <source>Output debugging information (default: %u, supplying &lt;category&gt; is optional)</source> <translation>Output debugging informatie (default: %u, verschaffen &lt;category&gt; is optioneel)</translation> </message> <message> <source>Query for peer addresses via DNS lookup, if low on addresses (default: 1 unless -connect)</source> <translation>Query voor peer adressen via DNS lookup, als er weinig adressen zijn (standaard: 1 tenzij -connect)</translation> </message> <message> <source>Randomize credentials for every proxy connection. This enables Tor stream isolation (default: %u)</source> <translation>Willekeurige credentials voor elke proxy verbinding. Dit stelt Tor stream isolatie in staat (standaard: %u)</translation> </message> <message> <source>Require high priority for relaying free or low-fee transactions (default:%u)</source> <translation>Vereist hoge prioriteit voor het relayen van gratis of low-fee transacties (standaard: %u)</translation> </message> <message> <source>Send trace/debug info to console instead of debug.log file (default: %u)</source> <translation>Stuur trace/debug info naar console in plaats van debug.log bestand (standaard: %u)</translation> </message> <message> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: %d)</source> <translation>Stel maximale grootte van transacties met hoge prioriteit/low-fee in bytes in (standaard: %d)</translation> </message> <message> <source>Set the number of script verification threads (%u to %d, 0 = auto, &lt;0 = leave that many cores free, default: %d)</source> <translation>Stel het aantal script verificatie threads (%u tot %d, 0 = auto, &lt;0 = laat dat aantal kernen vrij, standaard: %d)</translation> </message> <message> <source>Set the number of threads for coin generation if enabled (-1 = all cores, default: %d)</source> <translation>Stel het aantal threads voor munt generatie in indien geactiveerd (-1 = alle kernen, standaard: %d)</translation> </message> <message> <source>Show N confirmations for a successfully locked transaction (0-9999, default: %u)</source> <translation>Toon N bevestigingen voor een succesvol opgesloten transactie (0-9999, standaard: %u)</translation> </message> <message> <source>Support filtering of blocks and transaction with bloom filters (default: %u)</source> <translation>Ondersteun filteren van blokken en transactie met bloom filters (standaard: %u)</translation> </message> <message> <source>SwiftTX requires inputs with at least 6 confirmations, you might need to wait a few minutes and try again.</source> <translation>SwiftTX vereist inputs met tenminste 6 bevestigingen, je dient wellicht een paar minuten te wachten en probeer het opnieuw.</translation> </message> <message> <source>This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit &lt;https://www.openssl.org/&gt; and cryptographic software written by Eric Young and UPnP software written by Thomas Bernard.</source> <translation>Dit product bevat software ontwikkeld door het OpenSSL Project voor gebruik in de OpenSSL Toolkit &lt;https://www.openssl.org/&gt; en cryptografische software geschreven door Eric Young en UPnP software geschreven door Thomas Bernard.</translation> </message> <message> <source>To use ccbcd, or the -server option to ccbc-qt, you must set an rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=ccbcrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s "Ccbc Alert" [email protected] </source> <translation>Om ccbcd of de -server optie om ccbc-qt te gebruiken, moet je een rpcpassword instellen in het configuratiebestand: %s Het is aan te raden het volgende willekeurige wachtwoord te gebruiken: rpcuser=ccbcrpc rpcpassword=%s (je hoeft dit wachtwoord niet te onthouden) De gebruikersnaam en het wachtwoord MOETEN NIET hetzelfde zijn. Als het bestand niet bestaat, creëer het dan met owner-readable-only bestandsrechten. Het is ook aan te raden om alertnotify in te stellen, zodat je van problemen op de hoogte wordt gebracht; bijvoorbeeld: alertnotify = echo %%s | mail -s "Ccbc Alert" [email protected] </translation> </message> <message> <source>Unable to bind to %s on this computer. Ccbc Core is probably already running.</source> <translation>Niet mogelijk te binden aan %s op deze computer. Ccbc Core loopt waarschijnlijk al.</translation> </message> <message> <source>Unable to locate enough Obfuscation denominated funds for this transaction.</source> <translation>Kan niet genoeg verduistering gedenomineerde fondsen voor deze transactie vinden.</translation> </message> <message> <source>Unable to locate enough Obfuscation non-denominated funds for this transaction that are not equal 10000 CCBC.</source> <translation>Kan niet genoeg verduistering niet gedenomineerde fondsen voor deze transactie vinden die niet gelijk zijn aan 10000 CCBC.</translation> </message> <message> <source>Unable to locate enough funds for this transaction that are not equal 10000 CCBC.</source> <translation>Kan niet genoeg fondsen voor deze transactie vinden die niet gelijk zijn aan 10000 CCBC.</translation> </message> <message> <source>Use separate SOCKS5 proxy to reach peers via Tor hidden services (default: %s)</source> <translation>Gebruik aparte SOCKS5 proxy om peers via Tor verborgen services te bereiken (standaard: %s)</translation> </message> <message> <source>Warning: -maxtxfee is set very high! Fees this large could be paid on a single transaction.</source> <translation>Waarschuwing: -maxtxfee is zeer hoog ingesteld! Deze hoge kosten kunnen worden betaald op een enkele transactie.</translation> </message> <message> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>Waarschuwing: -paytxfee is zeer hoog ingesteld! Dit zijn de transactie kosten die je betaalt als je een transactie verstuurt.</translation> </message> <message> <source>Warning: Please check that your computer's date and time are correct! If your clock is wrong Ccbc Core will not work properly.</source> <translation>Waarschuwing: Controleer of de datum en tijd van je computer juist zijn! Als je klok verkeerd staat, werkt Ccbc Core niet goed.</translation> </message> <message> <source>Warning: The network does not appear to fully agree! Some miners appear to be experiencing issues.</source> <translation>Waarschuwing: het netwerk lijkt er niet helemaal mee eens te zijn! Sommige miners lijken problemen te ondervinden.</translation> </message> <message> <source>Warning: We do not appear to fully agree with our peers! You may need to upgrade, or other nodes may need to upgrade.</source> <translation>Waarschuwing: Wij lijken het er niet helemaal eens te zijn met onze peers! Mogelijk moet je upgraden, of andere nodes moeten mogelijk upgraden.</translation> </message> <message> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation>Waarschuwing: fout lezen wallet.dat! Alle sleutels lezen correct, maar transactie gegevens of adresboek invoeringen kunnen missen of niet correct zijn.</translation> </message> <message> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation>Waarschuwing: wallet.dat corrupt, data gered! Originele wallet.dat opgeslagen als wallet.{timestamp}.bak in %s; als je saldo of transacties onjuist zijn, moet je een back-up herstellen.</translation> </message> <message> <source>Whitelist peers connecting from the given netmask or IP address. Can be specified multiple times.</source> <translation>Whitelist peers verbinden van het opgegeven netmask of IP adres. Kan meerdere keren worden opgegeven.</translation> </message> <message> <source>Whitelisted peers cannot be DoS banned and their transactions are always relayed, even if they are already in the mempool, useful e.g. for a gateway</source> <translation>Whitelisted peers kunnen niet DoS banned worden en hun transacties worden altijd doorgestuurd, zelfs als ze al in de mempool zijn, nuttig bijv. voor een gateway</translation> </message> <message> <source>You must specify a masternodeprivkey in the configuration. Please see documentation for help.</source> <translation>Je moet een masternodeprivkey opgeven in de configuratie. Raadpleeg de documentatie voor hulp.</translation> </message> <message> <source>(5520 could be used only on mainnet)</source> <translation>(5520 kan alleen op mainnet worden gebruikt)</translation> </message> <message> <source>(default: %s)</source> <translation>(standaard: %s)</translation> </message> <message> <source>(default: 1)</source> <translation>(standaard: 1)</translation> </message> <message> <source>(must be 5520 for mainnet)</source> <translation>(moet 5520 voor mainnet zijn)</translation> </message> <message> <source>Accept command line and JSON-RPC commands</source> <translation>Accepteer command line en JSON-RPC commando's</translation> </message> <message> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation>Accepteer verbindingen van buitenaf (standaard: 1 als geen -proxy of -connect)</translation> </message> <message> <source>Accept public REST requests (default: %u)</source> <translation>Accepteer publieke REST verzoeken (standaard: %u)</translation> </message> <message> <source>Acceptable ciphers (default: %s)</source> <translation>Acceptabele ciphers (standaard: %s)</translation> </message> <message> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Voeg een node toe om verbinding mee te maken en probeer de verbinding open te houden</translation> </message> <message> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>DNS lookups toestaan voor -addnode, -seednode en -connect</translation> </message> <message> <source>Already have that input.</source> <translation>Heeft die input al.</translation> </message> <message> <source>Always query for peer addresses via DNS lookup (default: %u)</source> <translation>Vraag altijd naar peer adressen via DNS lookup (standaard: %u)</translation> </message> <message> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation>Poog om privé sleutels te herstellen van een corrupte wallet.dat</translation> </message> <message> <source>Automatically create Tor hidden service (default: %d)</source> <translation>Creëer automatisch de Tor verborgen service (standaard: %d)</translation> </message> <message> <source>Block creation options:</source> <translation>Block creatie opties:</translation> </message> <message> <source>Calculating missing accumulators...</source> <translation>Calculeren van ontbrekende accumulators...</translation> </message> <message> <source>Can't denominate: no compatible inputs left.</source> <translation>Kan niet denomineren: er zijn geen compatibele inputs over.</translation> </message> <message> <source>Can't find random Masternode.</source> <translation>Kan geen willekeurige Masternode vinden.</translation> </message> <message> <source>Can't mix while sync in progress.</source> <translation>Kan niet mixen terwijl synchronisatie wordt uitgevoerd.</translation> </message> <message> <source>Cannot downgrade wallet</source> <translation>Kan de portemonnee niet downgraden</translation> </message> <message> <source>Cannot resolve -bind address: '%s'</source> <translation>Kan -bind adres niet oplossen: '%s'</translation> </message> <message> <source>Cannot resolve -externalip address: '%s'</source> <translation>Kan -externalip adres niet oplossen: '%s'</translation> </message> <message> <source>Cannot resolve -whitebind address: '%s'</source> <translation>Kan -whitebind adres niet oplossen: '%s'</translation> </message> <message> <source>Cannot write default address</source> <translation>Kan standaard adres niet schrijven</translation> </message> <message> <source>Collateral not valid.</source> <translation>Terugbetaling niet geldig.</translation> </message> <message> <source>Connect only to the specified node(s)</source> <translation>Verbind alleen met de opgegeven node(s)</translation> </message> <message> <source>Connect through SOCKS5 proxy</source> <translation>Verbind via SOCKS5 proxy</translation> </message> <message> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation>Verbind met een node om peer adressen te verkrijgen en verbreek verbinding</translation> </message> <message> <source>Connection options:</source> <translation>Connectie opties:</translation> </message> <message> <source>Copyright (C) 2009-%i The Bitcoin Core Developers</source> <translation>Copyright (C) 2009-%i The Bitcoin Kernontwikkelaars</translation> </message> <message> <source>Copyright (C) 2014-%i The Dash Core Developers</source> <translation>Copyright (C) 2014-%i The Dash Kernontwikkelaars</translation> </message> <message> <source>Copyright (C) 2015-%i The Ccbc Core Developers</source> <translation>Copyright (C) 2015-%i The Ccbc Kernontwikkelaars</translation> </message> <message> <source>Corrupted block database detected</source> <translation>Corrupte block database gedetecteerd</translation> </message> <message> <source>Could not parse -rpcbind value %s as network address</source> <translation>Kan -rpcbind value %s niet parsen als een netwerk adres</translation> </message> <message> <source>Could not parse masternode.conf</source> <translation>Kan masternode.conf niet parsen</translation> </message> <message> <source>Debugging/Testing options:</source> <translation>Debugging/Test opties:</translation> </message> <message> <source>Disable OS notifications for incoming transactions (default: %u)</source> <translation>Schakel OS notificaties uit voor inkomende transacties (standaard: %u)</translation> </message> <message> <source>Disable safemode, override a real safe mode event (default: %u)</source> <translation>Schakel safe mode uit, override een echte safe mode gebeurtenis (standaard: %u)</translation> </message> <message> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation>Ontdek eigen IP adres (standaard: 1 bij luisteren en niet -externalip)</translation> </message> <message> <source>Do not load the wallet and disable wallet RPC calls</source> <translation>Laad de portemonnee niet in en schakel portemonnee RPC oproepen uit</translation> </message> <message> <source>Do you want to rebuild the block database now?</source> <translation>Wil je de blok database nu herbouwen?</translation> </message> <message> <source>Done loading</source> <translation>Klaar met laden</translation> </message> <message> <source>Enable automatic Zerocoin minting (0-1, default: %u)</source> <translation>Zet automatische Zerocoin minting aan (0-1, standaard: %u)</translation> </message> <message> <source>Enable the client to act as a masternode (0-1, default: %u)</source> <translation>Schakel de client in als masternode (0-1, standaard: %u)</translation> </message> <message> <source>Entries are full.</source> <translation>De entries zijn vol.</translation> </message> <message> <source>Error connecting to Masternode.</source> <translation>Fout bij verbinden met Masternode.</translation> </message> <message> <source>Error initializing block database</source> <translation>Fout bij het initialiseren van blok database</translation> </message> <message> <source>Error initializing wallet database environment %s!</source> <translation>Fout bij het initialiseren van de wallet database omgeving %s!</translation> </message> <message> <source>Error loading block database</source> <translation>Error tijdens het laden van de block database</translation> </message> <message> <source>Error loading wallet.dat</source> <translation>Error tijdens het laden van wallet.dat</translation> </message> <message> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>Error tijdens het laden van wallet.dat: Portemonnee corrupt</translation> </message> <message> <source>Error loading wallet.dat: Wallet requires newer version of Ccbc Core</source> <translation>Fout bij het laden van wallet.dat: Portemonnee vereist een nieuwere versie van Ccbc Core</translation> </message> <message> <source>Error opening block database</source> <translation>Error tijdens het openen van de block database</translation> </message> <message> <source>Error reading from database, shutting down.</source> <translation>Error tijdens het lezen van de database, aan het afsluiten.</translation> </message> <message> <source>Error recovering public key.</source> <translation>Fout bij het herstellen van de publieke sleutel.</translation> </message> <message> <source>Error</source> <translation>Error</translation> </message> <message> <source>Error: A fatal internal error occured, see debug.log for details</source> <translation>Fout: Er is een fatale interne fout opgetreden, zie debug.log voor details</translation> </message> <message> <source>Error: Can't select current denominated inputs</source> <translation>Fout: Kan de huidige gedenomineerde inputs niet selecteren</translation> </message> <message> <source>Error: Disk space is low!</source> <translation>Error: Schijfruimte is laag!</translation> </message> <message> <source>Error: Unsupported argument -tor found, use -onion.</source> <translation>Fout: Niet ondersteunde argument -tor gevonden, gebruik -onion.</translation> </message> <message> <source>Error: Wallet locked, unable to create transaction!</source> <translation>Fout: Portemonnee vergrendeld, niet in staat om transactie te creëren!</translation> </message> <message> <source>Error: You already have pending entries in the Obfuscation pool</source> <translation>Fout: U heeft al entries in afwachting in de verduistering pool</translation> </message> <message> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation>Niet gelukt om te luisteren op een poort. Gebruik -listen=0 als je dit wilt.</translation> </message> <message> <source>Failed to read block</source> <translation>Mislukt om block te lezen</translation> </message> <message> <source>Fee (in CCBC/kB) to add to transactions you send (default: %s)</source> <translation>Fee (in CCBC/kB) om toe te voegen aan transacties die je verzendt (standaard: %s)</translation> </message> <message> <source>Finalizing transaction.</source> <translation>Transactie aan het voltooien.</translation> </message> <message> <source>Force safe mode (default: %u)</source> <translation>Forceer safe mode (standaard: %u)</translation> </message> <message> <source>Found enough users, signing ( waiting %s )</source> <translation>Genoeg gebruikers gevonden, aan het ondertekenen (%s aan het wachten)</translation> </message> <message> <source>Found enough users, signing ...</source> <translation>Genoeg gebruikers gevonden, aan het ondertekenen ...</translation> </message> <message> <source>Generate coins (default: %u)</source> <translation>Genereer munten (standaard: %u)</translation> </message> <message> <source>How many blocks to check at startup (default: %u, 0 = all)</source> <translation>Hoeveel blokken bij het opstarten controleren (standaard: %u, 0 = alles)</translation> </message> <message> <source>If &lt;category&gt; is not supplied, output all debugging information.</source> <translation>Als &lt;category&gt; niet is opgegeven, output alle debugging informatie.</translation> </message> <message> <source>Importing...</source> <translation>Importeren...</translation> </message> <message> <source>Imports blocks from external blk000??.dat file</source> <translation>Importeert blokken uit extern blk000??.dat bestand</translation> </message> <message> <source>Include IP addresses in debug output (default: %u)</source> <translation>Inclusief IP adressen in debug output (standaard: %u)</translation> </message> <message> <source>Incompatible mode.</source> <translation>Modus is niet compatibel.</translation> </message> <message> <source>Incompatible version.</source> <translation>Versie is niet compatibel.</translation> </message> <message> <source>Incorrect or no genesis block found. Wrong datadir for network?</source> <translation>Het genesis block kan niet worden gevonden of is incorrect. Klopt datadir voor het netwerk?</translation> </message> <message> <source>Information</source> <translation>Informatie</translation> </message> <message> <source>Initialization sanity check failed. Ccbc Core is shutting down.</source> <translation>Initialisatie saniteitscontrole mislukt. Ccbc Core wordt afgesloten.</translation> </message> <message> <source>Input is not valid.</source> <translation>Ongeldige invoer.</translation> </message> <message> <source>Insufficient funds</source> <translation>Onvoldoende saldo.</translation> </message> <message> <source>Insufficient funds.</source> <translation>Onvoldoende saldo.</translation> </message> <message> <source>Invalid -onion address or hostname: '%s'</source> <translation>Ongeldig -onion adres of hostnaam: '%s'</translation> </message> <message> <source>Invalid -proxy address or hostname: '%s'</source> <translation>Ongeldig -proxy adres of hostnaam: '%s'</translation> </message> <message> <source>Invalid amount for -maxtxfee=&lt;amount&gt;: '%s'</source> <translation>Ongeldige hoeveelheid voor -maxtxfee=&lt;amount&gt;: '%s'</translation> </message> <message> <source>Invalid amount for -minrelaytxfee=&lt;amount&gt;: '%s'</source> <translation>Ongeldige hoeveelheid voor -minrelaytxfee=&lt;amount&gt;: '%s'</translation> </message> <message> <source>Invalid amount for -mintxfee=&lt;amount&gt;: '%s'</source> <translation>Ongeldige hoeveelheid voor -mintxfee=&lt;amount&gt;: '%s'</translation> </message> <message> <source>Invalid amount for -paytxfee=&lt;amount&gt;: '%s' (must be at least %s)</source> <translation>Ongeldige hoeveelheid voor -paytxfee=&lt;amount&gt;: '%s' (moet tenminste %s zijn)</translation> </message> <message> <source>Invalid amount for -paytxfee=&lt;amount&gt;: '%s'</source> <translation>Ongeldige hoeveelheid voor -paytxfee=&lt;amount&gt;: '%s'</translation> </message> <message> <source>Invalid amount for -reservebalance=&lt;amount&gt;</source> <translation>Ongeldige hoeveelheid voor -reservebalance=&lt;amount&gt;</translation> </message> <message> <source>Invalid amount</source> <translation>Ongeldige hoeveelheid</translation> </message> <message> <source>Invalid masternodeprivkey. Please see documenation.</source> <translation>Ongeldige masternodeprivkey. Zie documentatie.</translation> </message> <message> <source>Invalid netmask specified in -whitelist: '%s'</source> <translation>Ongeldige netmask opgegeven in -whitelist: '%s'</translation> </message> <message> <source>Invalid port detected in masternode.conf</source> <translation>Ongeldige poort gedetecteerd in masternode.conf</translation> </message> <message> <source>Invalid private key.</source> <translation>Ongeldige privésleutel.</translation> </message> <message> <source>Invalid script detected.</source> <translation>Ongeldige script gedetecteerd.</translation> </message> <message> <source>This is a pre-release test build - use at your own risk - do not use for staking or merchant applications!</source> <translation>Dit is een pre-release test build - gebruik op eigen risico - niet gebruiken voor staking of handel applicaties!</translation> </message> <message> <source> mints deleted </source> <translation>mints verwijderd </translation> </message> <message> <source> mints updated, </source> <translation>mints bijgewerkt,</translation> </message> <message> <source> unconfirmed transactions removed </source> <translation>onbevestigde transacties verwijderd </translation> </message> <message> <source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Fout: De transactie is afgewezen! Dit kan gebeuren als sommige munten in je portemonnee al waren uitgegeven, bijvoorbeeld als je een kopie van wallet.dat gebruikt en munten in de kopie waren besteed maar hier niet als zodanig gemarkeerd.</translation> </message> <message> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source> <translation>Fout: Deze transactie vereist transactiekosten van ten minste %s vanwege de hoeveelheid, de complexiteit of het gebruik van recent ontvangen fondsen!</translation> </message> <message> <source>Error: Unsupported argument -checklevel found. Checklevel must be level 4.</source> <translation>Fout: Niet ondersteund argument -checklevel gevonden. Checklevel moet niveau 4 zijn.</translation> </message> <message> <source>Preferred Denomination for automatically minted Zerocoin (1/5/10/50/100/500/1000/5000), 0 for no preference. default: %u)</source> <translation>Voorkeur denominatie voor automatisch minted Zerocoin (1/5/10/50/100/500/1000/5000), 0 voor geen voorkeur. standaard: %u)</translation> </message> <message> <source>&lt;category&gt; can be:</source> <translation>&lt;category&gt;kan zijn: </translation> </message> <message> <source>Attempt to force blockchain corruption recovery</source> <translation>Poging om blockchain corruptie herstel te forceren</translation> </message> <message> <source>Display the stake modifier calculations in the debug.log file.</source> <translation>Toon de berekeningen van de stake modificator in het debug.log bestand.</translation> </message> <message> <source>Display verbose coin stake messages in the debug.log file.</source> <translation>Toon verbose munt stake berichten in het debug.log bestand.</translation> </message> <message> <source>Enable publish hash block in &lt;address&gt;</source> <translation>Activeer publicatie hash blok in &lt;address&gt;</translation> </message> <message> <source>Enable publish hash transaction (locked via SwiftTX) in &lt;address&gt;</source> <translation>Activeer publicatie hash transactie (vergrendeld via SwiftTX) in &lt;address&gt;</translation> </message> <message> <source>Enable publish hash transaction in &lt;address&gt;</source> <translation>Activeer publicatie has transactie in &lt;address&gt;</translation> </message> <message> <source>Enable publish raw block in &lt;address&gt;</source> <translation>Activeer publicatie raw block in &lt;address&gt;</translation> </message> <message> <source>Enable publish raw transaction (locked via SwiftTX) in &lt;address&gt;</source> <translation>Activeer publicatie raw transactie (vergrendeld via SwiftTX) in &lt;address&gt;</translation> </message> <message> <source>Enable publish raw transaction in &lt;address&gt;</source> <translation>Activeer publicatie raw transactie in &lt;address&gt;</translation> </message> <message> <source>Enable staking functionality (0-1, default: %u)</source> <translation>Activeer staking functionaliteit (0-1, standaard: %u)</translation> </message> <message> <source>Keep at most &lt;n&gt; unconnectable transactions in memory (default: %u)</source> <translation>Houd maximaal &lt;n&gt;niet te verbinden transacties in het geheugen (standaard: %u)</translation> </message> <message> <source>Last Obfuscation was too recent.</source> <translation>Laatste verduistering was te recent.</translation> </message> <message> <source>Last successful Obfuscation action was too recent.</source> <translation>Laatste succesvolle verduistering actie was te recent.</translation> </message> <message> <source>Less than 3 mints added, unable to create spend</source> <translation>Minder dan 3 mints toegevoegd, niet in staat om uitgave te creëren</translation> </message> <message> <source>Limit size of signature cache to &lt;n&gt; entries (default: %u)</source> <translation>Limiet grootte van signature cache naar &lt;n&gt; invoer (standaard: %u)</translation> </message> <message> <source>Line: %d</source> <translation>Line: %d</translation> </message> <message> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: %u or testnet: %u)</source> <translation>Luister naar JSON-RPC verbindingen op &lt;port&gt; (standaard: %u of testnet: %u)</translation> </message> <message> <source>Listen for connections on &lt;port&gt; (default: %u or testnet: %u)</source> <translation>Luister naar verbindingen op &lt;port&gt; (standaard: %u of testnet: %u)</translation> </message> <message> <source>Loading addresses...</source> <translation>Adressen laden...</translation> </message> <message> <source>Loading block index...</source> <translation>Block index laden...</translation> </message> <message> <source>Loading budget cache...</source> <translation>Budget cache laden...</translation> </message> <message> <source>Loading masternode cache...</source> <translation>Masternode cache laden...</translation> </message> <message> <source>Loading masternode payment cache...</source> <translation>Masternode betalingscache laden...</translation> </message> <message> <source>Loading sporks...</source> <translation>Sporks laden...</translation> </message> <message> <source>Loading wallet... (%3.2f %%)</source> <translation>Portemonnee laden... (%3.2f %%)</translation> </message> <message> <source>Loading wallet...</source> <translation>Portemonnee aan het laden...</translation> </message> <message> <source>Lock is already in place.</source> <translation>Vergrendeling is al uitgevoerd.</translation> </message> <message> <source>Lock masternodes from masternode configuration file (default: %u)</source> <translation>Masternodes vergrendelen van masternode configuratiebestand (standaard: %u)</translation> </message> <message> <source>Maintain at most &lt;n&gt; connections to peers (default: %u)</source> <translation>Bijhouden maximaal &lt;n&gt; connecties naar peers (standaard: %u)</translation> </message> <message> <source>Masternode options:</source> <translation>Masternode opties:</translation> </message> <message> <source>Masternode queue is full.</source> <translation>Masternode wachtrij zit vol.</translation> </message> <message> <source>Masternode:</source> <translation>Masternode:</translation> </message> <message> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: %u)</source> <translation>Maximaal per connectie ontvangst buffer, &lt;n&gt;*1000 bytes (standaard: %u)</translation> </message> <message> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: %u)</source> <translation>Maximaal per connectie verstuur buffer, &lt;n&gt;*1000 bytes (standaard: %u)</translation> </message> <message> <source>Missing input transaction information.</source> <translation>Ontbrekende invoer transactie informatie ontbreekt.</translation> </message> <message> <source>Mixing in progress...</source> <translation>Bezig met mixen...</translation> </message> <message> <source>Need to specify a port with -whitebind: '%s'</source> <translation>Moet een poort opgeven met -whitebind: '%s'</translation> </message> <message> <source>No Masternodes detected.</source> <translation>Geen Masternodes gedetecteerd.</translation> </message> <message> <source>No compatible Masternode found.</source> <translation>Geen compatibele Masternode gevonden.</translation> </message> <message> <source>No funds detected in need of denominating.</source> <translation>Geen fondsen gedetecteerd die denominatie nodig hebben.</translation> </message> <message> <source>No matching denominations found for mixing.</source> <translation>Geen passende denominaties gevonden voor mixing.</translation> </message> <message> <source>Node relay options:</source> <translation>Node relay opties:</translation> </message> <message> <source>Non-standard public key detected.</source> <translation>Niet standaard publieke sleutel gedetecteerd.</translation> </message> <message> <source>Not compatible with existing transactions.</source> <translation>Niet compatibel met bestaande transacties.</translation> </message> <message> <source>Not enough file descriptors available.</source> <translation>Niet genoeg bestandsbeschrijvingen beschikbaar.</translation> </message> <message> <source>Not in the Masternode list.</source> <translation>Niet in de Masternode lijst.</translation> </message> <message> <source>Number of automatic wallet backups (default: 10)</source> <translation>Aantal automatische portemonnee backups (standaard: 10)</translation> </message> <message> <source>Obfuscation is idle.</source> <translation>Verduistering is inactief.</translation> </message> <message> <source>Obfuscation request complete:</source> <translation>Verduistering verzoek compleet:</translation> </message> <message> <source>Obfuscation request incomplete:</source> <translation>Verduistering verzoek incompleet:</translation> </message> <message> <source>Only accept block chain matching built-in checkpoints (default: %u)</source> <translation>Accepteer alleen blockchain matching met ingebouwde controlepunten (standaard: %u)</translation> </message> <message> <source>Only connect to nodes in network &lt;net&gt; (ipv4, ipv6 or onion)</source> <translation>Verbind alleen met nodes in het netwerk &lt;net&gt; (ipv4, ipv6 of onion)</translation> </message> <message> <source>Options:</source> <translation>Opties:</translation> </message> <message> <source>Password for JSON-RPC connections</source> <translation>Wachtwoord voor JSON-RPC connecties</translation> </message> <message> <source>Percentage of automatically minted Zerocoin (10-100, default: %u)</source> <translation>Percentage automatisch geminte Zerocoin (10-100, standaard: %u)</translation> </message> <message> <source>Prepend debug output with timestamp (default: %u)</source> <translation>Voeg debug output met timestamp toe (standaard: %u)</translation> </message> <message> <source>Print version and exit</source> <translation>Print versie en verlaat</translation> </message> <message> <source>RPC SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source> <translation>RPC SSL opties: (zie de Bitcoin Wiki voor SSL setup instructies)</translation> </message> <message> <source>RPC server options:</source> <translation>RPC server opties:</translation> </message> <message> <source>RPC support for HTTP persistent connections (default: %d)</source> <translation>RPC ondersteuning voor HTTP aanhoudende verbindingen (standaard: %d)</translation> </message> <message> <source>Randomly drop 1 of every &lt;n&gt; network messages</source> <translation>Willekeurig laten vallen van 1 van elke &lt;n&gt;netwerk berichten</translation> </message> <message> <source>Randomly fuzz 1 of every &lt;n&gt; network messages</source> <translation>Willekeurig vervagen van 1 van elke &lt;n&gt; netwerk berichten</translation> </message> <message> <source>Rebuild block chain index from current blk000??.dat files</source> <translation>Herstel blockchain index van huidige blk000??.dat bestanden</translation> </message> <message> <source>Receive and display P2P network alerts (default: %u)</source> <translation>Ontvang en laat P2P netwerkmeldingen zien (standaard: %u)</translation> </message> <message> <source>Relay and mine data carrier transactions (default: %u)</source> <translation>Relay en mine data carrier transacties (standaard: %u)</translation> </message> <message> <source>Relay non-P2SH multisig (default: %u)</source> <translation>Relay non P2SH multisig (default: %u)</translation> </message> <message> <source>Rescan the block chain for missing wallet transactions</source> <translation>Rescan de blockchain voor ontbrekende portemonnee transacties</translation> </message> <message> <source>Rescanning...</source> <translation>Opnieuw scannen...</translation> </message> <message> <source>ResetMintZerocoin finished: </source> <translation>ResetMintZerocoin voltooid: </translation> </message> <message> <source>ResetSpentZerocoin finished: </source> <translation>ResetSpentZerocoin voltooid: </translation> </message> <message> <source>Run a thread to flush wallet periodically (default: %u)</source> <translation>Voer regelmatig een thread om de portemonnee te spoelen uit (standaard: %u)</translation> </message> <message> <source>Run in the background as a daemon and accept commands</source> <translation>Voer op de achtergrond uit als een daemon en accepteer commando's</translation> </message> <message> <source>Send transactions as zero-fee transactions if possible (default: %u)</source> <translation>Zend transacties als zero fee transacties indien mogelijk (standaard: %u)</translation> </message> <message> <source>Server certificate file (default: %s)</source> <translation>Server certificaat bestand (standaard: %s)</translation> </message> <message> <source>Server private key (default: %s)</source> <translation>Server privé sleutel (default: %s)</translation> </message> <message> <source>Session not complete!</source> <translation>Sessie niet voltooid!</translation> </message> <message> <source>Session timed out.</source> <translation>Sessie verlopen.</translation> </message> <message> <source>Set database cache size in megabytes (%d to %d, default: %d)</source> <translation>Stel de cache grootte van de database in megabytes in (%d tot %d, standaard: %d)</translation> </message> <message> <source>Set external address:port to get to this masternode (example: %s)</source> <translation>Extern adres instellen:poort om bij deze masternode te komen (voorbeeld: %s)</translation> </message> <message> <source>Set key pool size to &lt;n&gt; (default: %u)</source> <translation>Stel key pool grootte in op &lt;n&gt; (standaard: %u)</translation> </message> <message> <source>Set maximum block size in bytes (default: %d)</source> <translation>Stel maximale block grootte in bytes in (default: %d)</translation> </message> <message> <source>Set minimum block size in bytes (default: %u)</source> <translation>Stel minimale block grootte in bytes in (default: %u)</translation> </message> <message> <source>Set the masternode private key</source> <translation>Stel de masternode privé sleutel in</translation> </message> <message> <source>Set the number of threads to service RPC calls (default: %d)</source> <translation>Stel het aantal threads in om RPC oproepen te bedienen (standaard: %d)</translation> </message> <message> <source>Sets the DB_PRIVATE flag in the wallet db environment (default: %u)</source> <translation>Stelt de DB_PRIVATE vlag in de portemonnee db omgeving in (standaard: %u)</translation> </message> <message> <source>Show all debugging options (usage: --help -help-debug)</source> <translation>Toon alle debugging opties (gebruik: --help -help-debug)</translation> </message> <message> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation>Krimp debug.log bestand bij client startup (standaard: 1 wanneer geen -debug)</translation> </message> <message> <source>Signing failed.</source> <translation>Ondertekenen mislukt.</translation> </message> <message> <source>Signing timed out.</source> <translation>Ondertekening time out.</translation> </message> <message> <source>Signing transaction failed</source> <translation>Ondertekening transactie mislukt.</translation> </message> <message> <source>Specify configuration file (default: %s)</source> <translation>Specificeer configuratiebestand (standaard: %s)</translation> </message> <message> <source>Specify connection timeout in milliseconds (minimum: 1, default: %d)</source> <translation>Specificeer verbinding time-out in milliseconden (minimum: 1, standaard: %d)</translation> </message> <message> <source>Specify data directory</source> <translation>Specificeer data directory.</translation> </message> <message> <source>Specify masternode configuration file (default: %s)</source> <translation>Specificeer masternode configuratie bestand (default: %s)</translation> </message> <message> <source>Specify pid file (default: %s)</source> <translation>Specificeer pid bestand (default: %s)</translation> </message> <message> <source>Specify wallet file (within data directory)</source> <translation>Specificeer portemonnee bestand (in data directory)</translation> </message> <message> <source>Specify your own public address</source> <translation>Specificeer je eigen publieke addres</translation> </message> <message> <source>Spend unconfirmed change when sending transactions (default: %u)</source> <translation>Onbevestigd wisselgeld besteden bij het verzenden van transacties (standaard: %u)</translation> </message> <message> <source>Staking options:</source> <translation>Staking opties:</translation> </message> <message> <source>Stop running after importing blocks from disk (default: %u)</source> <translation>Stop na het importeren van blokken van schijf (standaard: %u)</translation> </message> <message> <source>Submitted following entries to masternode: %u / %d</source> <translation>Ingediende volgende vermeldingen in masternode: %u / %d</translation> </message> <message> <source>Submitted to masternode, waiting for more entries ( %u / %d ) %s</source> <translation>Ingediend naar masternode, wachten op meer inzendingen (%u / %d) %s</translation> </message> <message> <source>Submitted to masternode, waiting in queue %s</source> <translation>Ingediend naar masternode, wachten in de wachtrij %s</translation> </message> <message> <source>SwiftTX options:</source> <translation>SwiftTX opties:</translation> </message> <message> <source>Synchronization failed</source> <translation>Synchronisatie mislukt</translation> </message> <message> <source>Synchronization finished</source> <translation>Synchronisatie voltooid</translation> </message> <message> <source>Synchronization pending...</source> <translation>Synchronisatie in afwachting...</translation> </message> <message> <source>Synchronizing budgets...</source> <translation>Budgeten synchroniseren...</translation> </message> <message> <source>Synchronizing masternode winners...</source> <translation>Synchroniseren masternode winnaars...</translation> </message> <message> <source>Synchronizing masternodes...</source> <translation>Synchroniseren masternodes...</translation> </message> <message> <source>Synchronizing sporks...</source> <translation>Synchroniseren sporks...</translation> </message> <message> <source>This help message</source> <translation>Dit help bericht</translation> </message> <message> <source>This is experimental software.</source> <translation>Dit is experimentele software.</translation> </message> <message> <source>This is intended for regression testing tools and app development.</source> <translation>Dit is bedoeld voor regressie test tools en app ontwikkeling.</translation> </message> <message> <source>This is not a Masternode.</source> <translation>Dit is geen Masternode.</translation> </message> <message> <source>Threshold for disconnecting misbehaving peers (default: %u)</source> <translation>Drempel voor het verbreken van misdragende peers (standaard: %u)</translation> </message> <message> <source>Tor control port password (default: empty)</source> <translation>Tor controle poort wachtwoord (standaard: leeg)</translation> </message> <message> <source>Tor control port to use if onion listening enabled (default: %s)</source> <translation>Tor controle poort om te gebruiken als onion listening geactiveerd is (standaard: %s)</translation> </message> <message> <source>Transaction amount too small</source> <translation>Transactie bedrag te klein</translation> </message> <message> <source>Transaction amounts must be positive</source> <translation>Transactie bedragen moeten positief zijn</translation> </message> <message> <source>Transaction created successfully.</source> <translation>Transactie is succesvol gemaakt.</translation> </message> <message> <source>Transaction fees are too high.</source> <translation>Transactiekosten zijn te hoog.</translation> </message> <message> <source>Transaction not valid.</source> <translation>Transactie is niet geldig.</translation> </message> <message> <source>Transaction too large for fee policy</source> <translation>Transactie te groot voor kosten beleid</translation> </message> <message> <source>Transaction too large</source> <translation>Transactie te groot</translation> </message> <message> <source>Transmitting final transaction.</source> <translation>Verzending van de definitieve transactie.</translation> </message> <message> <source>Unable to bind to %s on this computer (bind returned error %s)</source> <translation>Niet mogelijk te binden aan %s op deze computer (bind stuurt fout %s terug)</translation> </message> <message> <source>Unable to sign spork message, wrong key?</source> <translation>Kan sporkbericht niet tekenen, verkeerde sleutel?</translation> </message> <message> <source>Unknown network specified in -onlynet: '%s'</source> <translation>Onbekend netwerk gespecificeerd in -onlynet: '%s'</translation> </message> <message> <source>Unknown state: id = %u</source> <translation>Onbekende staat: id = %u</translation> </message> <message> <source>Upgrade wallet to latest format</source> <translation>Upgrade portemonnee naar nieuwste formaat</translation> </message> <message> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>Gebruik OpenSSL (https) voor JSON-RPC verbindingen</translation> </message> <message> <source>Use UPnP to map the listening port (default: %u)</source> <translation>Gebruik UPnP om de luisterpoort te mappen (standaard: %u)</translation> </message> <message> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>Gebruik UPnP om de luisterpoort te mappen (standaard: 1 bij het luisteren)</translation> </message> <message> <source>Use the test network</source> <translation>Gebruik het test netwerk</translation> </message> <message> <source>Username for JSON-RPC connections</source> <translation>Gebruikersnaam voor JSON-RPC verbindingen</translation> </message> <message> <source>Value more than Obfuscation pool maximum allows.</source> <translation>Waarde meer dan verduistering pool maximaal toestaat.</translation> </message> <message> <source>Verifying blocks...</source> <translation>Blokken verifiëren...</translation> </message> <message> <source>Verifying wallet...</source> <translation>Portemonnee verifiëren...</translation> </message> <message> <source>Wallet %s resides outside data directory %s</source> <translation>Portemonnee %s verblijft buiten de data directory %s</translation> </message> <message> <source>Wallet is locked.</source> <translation>Portemonnee is vergrendeld.</translation> </message> <message> <source>Wallet needed to be rewritten: restart Ccbc Core to complete</source> <translation>Wallet moest worden herschreven: start Ccbc Core opnieuw om te voltooien</translation> </message> <message> <source>Wallet options:</source> <translation>Portemonnee opties:</translation> </message> <message> <source>Wallet window title</source> <translation>Portemonnee venster titel</translation> </message> <message> <source>Warning</source> <translation>Waarschuwing</translation> </message> <message> <source>Warning: This version is obsolete, upgrade required!</source> <translation>Waarschuwing: Deze versie is verouderd, upgrade vereist!</translation> </message> <message> <source>Warning: Unsupported argument -benchmark ignored, use -debug=bench.</source> <translation>Waarschuwing: Niet- ondersteund argument -benchmark genegeerd, gebruik -debug=bench.</translation> </message> <message> <source>Warning: Unsupported argument -debugnet ignored, use -debug=net.</source> <translation>Waarschuwing: Niet ondersteunde argument -debugnet genegeerd, gebruik -debug=net.</translation> </message> <message> <source>Will retry...</source> <translation>Zal het opnieuw proberen...</translation> </message> <message> <source>You need to rebuild the database using -reindex to change -txindex</source> <translation>Je moet de database herbouwen met -reindex om -txindex te wijzigen</translation> </message> <message> <source>Your entries added successfully.</source> <translation>Je gegevens zijn succesvol toegevoegd.</translation> </message> <message> <source>Your transaction was accepted into the pool!</source> <translation>Je transactie is geaccepteerd in de pool!</translation> </message> <message> <source>Zapping all transactions from wallet...</source> <translation>Verwijderen van alle transacties uit portemonnee...</translation> </message> <message> <source>ZeroMQ notification options:</source> <translation>ZeroMQ notificatie opties:</translation> </message> <message> <source>Zerocoin options:</source> <translation>Zerocoin opties:</translation> </message> <message> <source>failed to validate zerocoin</source> <translation>mislukt om zerocoin te valideren</translation> </message> <message> <source>on startup</source> <translation>tijdens het opstarten</translation> </message> <message> <source>wallet.dat corrupt, salvage failed</source> <translation>wallet.dat corrupt, redding mislukt</translation> </message> </context> </TS>
<translation>Gebruiksinterface &amp;language:</translation>
__init__.py
import pytz import logging from datetime import datetime from bs4 import BeautifulSoup from habari.apps.crawl.models import Article from habari.apps.crawl.crawlers import AbstractBaseCrawler from habari.apps.utils.error_utils import error_to_string, http_error_to_string logger = logging.getLogger(__name__) class DNCrawler(AbstractBaseCrawler): def __init__(self): super().__init__('DN') self.url = self.news_source.url self.categories = self.get_category_links() def oped_articles(self, url): links = ('https://nation.africa/kenya/blogs-opinion/', 'https://nation.africa/kenya/photos/', ) if url.startswith(links): return True else: return False def links_to_avoid(self, url): links = ('https://nation.africa/kenya/blogs-opinion/cartoons/', ) if url.startswith(links): return False else: return True def get_category_links(self): logger.info('Getting links to all categories and sub-categories') categories = [self.url, ] try: get_categories = self.requests(self.url) except Exception as e: logger.exception('Error: {0} while getting categories from {1}'.format(e,self.url)) self.errors.append(error_to_string(e)) else: if get_categories.status_code == 200: soup = BeautifulSoup(get_categories.content, 'html.parser') main_categories = soup.select('footer ul.categories-nav_categories a') for cat in main_categories: if cat.get('href') is not None: link = self.make_relative_links_absolute(cat.get('href')) categories.append(link) return categories def get_top_stories(self): logger.info('Getting the latest stories') story_links = [] for category in self.categories: try: top_stories = self.requests(category) if top_stories.status_code == 200: soup = BeautifulSoup(top_stories.content, 'html.parser') stories = soup.select('a.teaser-image-large') + soup.select('a.article-collection-teaser') for story in stories: story = self.make_relative_links_absolute(story.get('href').strip()) if not Article.objects.filter(article_url=story).exists() and \ self.check_for_top_level_domain(story) and self.links_to_avoid(story): story_links.append(story) except Exception as e: logger.exception( '{0} error while getting top stories for {1}'.format(e, category)) self.errors.append(error_to_string(e)) return set(story_links) def get_story_details(self, link): story = self.requests(link) if story.status_code == 200: soup = BeautifulSoup(story.content, 'html.parser') try: title = soup.select_one('h1.title-medium').get_text().strip() except AttributeError: title = soup.select_one('h1.title-large').get_text().strip() try: publication_date = soup.select_one('time.date').get('datetime') except AttributeError: publication_date = soup.find("meta", property="og:article:published_time").get('content').strip() date = pytz.timezone("Africa/Nairobi").localize(datetime.strptime(publication_date, '%Y-%m-%dT%H:%M:%SZ'), is_dst=None) author_list = soup.select('.article-authors_texts .article-authors_authors') authors = self.sanitize_author_iterable(author_list) try: summary = soup.select_one('.article-content_summary .text-block').get_text().strip() except AttributeError: summary = soup.find("meta", property="og:description").get('content').strip() try: image_url = self.make_relative_links_absolute(\ soup.select_one('figure.article-picture img').get('data-src')) except AttributeError: try: image_url = soup.select_one('figure iframe.lazy-iframe_iframe').get('data-src') except AttributeError: try: image_url = soup.select_one('figure iframe').get('src') except AttributeError: image_url = soup.find("meta", property="og:image").get('content').strip() return {'article_url':link, 'article_title':title, 'publication_date':date, 'author':authors, 'summary':summary, 'image_url':image_url} def get_oped_article_details(self, url): story = self.requests(url) if story.status_code == 200: soup = BeautifulSoup(story.content, 'html.parser') try: title = soup.select_one('h1.title-medium').get_text().strip() except AttributeError: title = soup.select_one('h1.title-large').get_text().strip() try: publication_date = soup.select_one('time.date').get('datetime') except AttributeError: publication_date = soup.find("meta", property="og:article:published_time").get('content').strip() date = pytz.timezone("Africa/Nairobi").localize(datetime.strptime(publication_date, '%Y-%m-%dT%H:%M:%SZ'), is_dst=None) author_list = soup.select('.article-authors_texts .article-authors_authors') authors = self.sanitize_author_iterable(author_list) try: summary = soup.select_one('.article-content_summary .text-block').get_text().strip() except AttributeError: summary = soup.find("meta", property="og:description").get('content').strip() try: image_url = self.make_relative_links_absolute(\ soup.select_one('figure.article-picture img').get('data-src')) except AttributeError: try: image_url = soup.select_one('figure iframe.lazy-iframe_iframe').get('data-src') except AttributeError: try: image_url = soup.select_one('figure iframe').get('src') except AttributeError: image_url = soup.find("meta", property="og:image").get('content').strip() return {'article_url':url, 'article_title':title, 'publication_date':date, 'author':authors, 'summary':summary, 'image_url':image_url} def update_article_details(self, article): if self.oped_articles(article): return self.get_oped_article_details(article) else: return self.get_story_details(article) def update_top_stories(self):
articles = self.get_top_stories() article_info = [] for article in articles: try: logger.info('Updating article details for {}'.format(article)) story = self.update_article_details(article) article_info.append(Article(title=story['article_title'], article_url=story['article_url'], article_image_url=story['image_url'], author=story['author'], publication_date=story['publication_date'], summary=story['summary'], news_source=self.news_source )) except Exception as e: logger.exception('Crawling Error: {0} while getting data from: {1}'.format(e, article)) self.errors.append(error_to_string(e)) try: Article.objects.bulk_create(article_info) logger.info('') logger.info('Succesfully updated Daily Nation Latest Articles.{} new articles added'.format( len(article_info))) self.crawl.total_articles=len(article_info) self.crawl.save() except Exception as e: logger.exception('Error!!!{}'.format(e)) self.errors.append(error_to_string(e))
persistent_workspace.py
#------------------------------------------------------------------------------ # Copyright (c) 2013, Nucleic Development Team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. #------------------------------------------------------------------------------ from __future__ import print_function import pickle from atom.api import Str from enaml.widgets.api import Container from enaml.workbench.ui.api import Workspace import enaml with enaml.imports(): from persistent_view import PersistentManifest, create_new_area print('Imported Persistent Workspace!') #: Storage for the pickled dock area. This would be saved #: to some persistent storage media in a real application. PICKLED_DOCK_AREA = None class PersistentWorkspace(Workspace): """ A custom Workspace class for the crash course example. """ #: Storage for the plugin manifest's id. _manifest_id = Str() def start(self): """ Start the workspace instance. This method will create the container content and register the provided plugin with the workbench. """ self.content = Container(padding=0) self.load_area() manifest = PersistentManifest() self._manifest_id = manifest.id self.workbench.register(manifest) def
(self): """ Stop the workspace instance. This method will unregister the workspace's plugin that was registered on start. """ self.save_area() self.workbench.unregister(self._manifest_id) def save_area(self): """ Save the dock area for the workspace. """ global PICKLED_DOCK_AREA area = self.content.find('the_dock_area') PICKLED_DOCK_AREA = pickle.dumps(area, -1) def load_area(self): """ Load the dock area into the workspace content. """ if PICKLED_DOCK_AREA is not None: area = pickle.loads(PICKLED_DOCK_AREA) else: area = create_new_area() area.set_parent(self.content)
stop
graph.go
package image import (
type Graph interface { Get(id string) (*Image, error) ImageRoot(id string) string Driver() graphdriver.Driver }
"example.com/m/v2/daemon/graphdriver" )
max.rs
use crate::noise_fns::NoiseFn; /// Noise function that outputs the smaller of the two output values from two source /// functions. pub struct
<A, B>(A, B); impl<A, B> Max<A, B> { pub fn new(source1: A, source2: B) -> Self { Self(source1, source2) } } impl<T, A, B> NoiseFn<T> for Max<A, B> where T: Copy, A: NoiseFn<T>, B: NoiseFn<T>, { fn get(&self, point: T) -> f64 { (self.0.get(point)).max(self.1.get(point)) } }
Max
add.go
package linkedlist func AddReversed(l1, l2 *Node) *Node { if l1 == nil { return l2 } if l2 == nil { return l1
overflow := 0 res := l1 var prev *Node for l1 != nil && l2 != nil { l1.Value += l2.Value + overflow if l1.Value >= 10 { overflow = 1 l1.Value = l1.Value % 10 } else { overflow = 0 } prev = l1 l1 = l1.Next l2 = l2.Next } tail := l1 if l2 != nil { tail = l2 } if tail != nil { // append tail tail.Value += overflow prev.Next = tail } else if overflow > 0 { // handle overflow at the end prev.Next = &Node{ Value: overflow, } } else { // trim links at the end as they could be old links from original numbers prev.Next = nil } return res } func Add(l1, l2 *Node) *Node { if l1 == nil { return l2 } if l2 == nil { return l1 } len1 := length(l1) len2 := length(l2) if len1 < len2 { l1 = padd(l1, len2-len1) } else if len2 < len1 { l2 = padd(l2, len1-len2) } res, ovf := addEven(l1, l2) if ovf > 0 { n := &Node { Value: ovf, Next: res, } res = n } return res } func addEven(l1, l2 *Node) (*Node, int) { if l1 == nil { return nil, 0 } res, ovf := addEven(l1.Next, l2.Next) n := &Node { Value: l1.Value + l2.Value + ovf, Next: res, } if n.Value < 10 { return n, 0 } n.Value = n.Value % 10 return n, 1 } func length(l *Node) int { counter := 0 for p := l; p != nil; p = p.Next { counter += 1 } return counter } func padd(l *Node, k int) *Node { for i := 0; i < k; i += 1 { n := &Node { Value: 0, Next: l, } l = n } return l }
}
schema.py
from datetime import datetime from typing import List, Dict, Optional from pydantic import BaseModel, validator, root_validator class ItemModel(BaseModel): cve: Dict configurations: Optional[Dict]
class ResultModel(BaseModel): CVE_data_timestamp: datetime CVE_data_type: str CVE_Items: List[ItemModel] @validator('CVE_data_type') def fixed_type(cls, v): assert v == 'CVE', 'Must be of type CVE' return v class ResponseModel(BaseModel): resultsPerPage: int startIndex: int totalResults: int result: ResultModel
impact: Optional[Dict] publishedDate: datetime lastModifiedDate: datetime
calls.d.ts
import { Call } from '../hooks/useCall'; import { Falsy } from '../model/types'; import { RawCall, RawCallResult } from '../providers'; export declare function warnOnInvalidCall(call: Call | Falsy): void; export declare function encodeCallData(call: Call | Falsy): RawCall | Falsy; export declare function getUniqueCalls(requests: RawCall[]): RawCall[]; export declare class
{ readonly message: string; constructor(message: string); } export declare type CallResult = { value: any[]; error: undefined; } | { value: undefined; error: CallError; } | undefined; export declare function decodeCallResult(call: Call | Falsy, result: RawCallResult): CallResult; //# sourceMappingURL=calls.d.ts.map
CallError
ingest_financials.py
#!/usr/bin/python3 """ Responsible for ingesting data related to the business performance over time. Data is placed into the asx_company_financial_metric collection, ready for the core viewer app to use. Stocks whose financial details have been retrieved in the past month are skipped. """ import pymongo import argparse import yfinance as yf import time from utils import read_config import numpy as np import pandas as pd from datetime import datetime, timedelta from bson.objectid import ObjectId def melt_dataframes(dfs: tuple) -> pd.DataFrame: result = None for df in filter(lambda df: df is not None and len(df) > 0, dfs): df["metric"] = df.index melted = pd.melt(df, id_vars=("metric"), var_name="date") melted = melted.dropna(axis=0, how="any") if len(melted) == 0: continue # print(melted) # print(melted.shape) if result is None: result = melted else: result = result.append(melted) if result is not None and "date" in result.columns: # print(result) result["date"] = pd.to_datetime( result["date"], infer_datetime_format=True ) # format="%Y-%m-%d") # print(result) return result def
(): available_stocks = set(db.asx_company_details.distinct("asx_code")) print(f"Found {len(available_stocks)} available stocks.") gen_time = datetime.today() - timedelta(days=30) month_ago = ObjectId.from_datetime(gen_time) recently_updated_stocks = set( [ rec["asx_code"] for rec in db.asx_company_financial_metrics.find( {"_id": {"$gte": month_ago}} ) ] ) ret = available_stocks.difference(recently_updated_stocks) print(f"Found {len(ret)} desired stocks to process.") return ret def update_all_metrics(df: pd.DataFrame, asx_code: str) -> int: """ Add (or update) all financial metrics (ie. rows) for the specified asx_code in the specified dataframe :rtype: the number of records updated/created is returned """ print(f"Updating {len(df)} financial metrics for {asx_code}") n = 0 for t in df.itertuples(): d = { "metric": t.metric, "date": t.date, "value": t.value, "asx_code": t.asx_code, } assert t.asx_code == asx_code result = db.asx_company_financial_metrics.update_one( {"asx_code": asx_code, "date": t.date, "metric": t.metric}, {"$set": d}, upsert=True, ) assert result is not None assert isinstance(result, pymongo.results.UpdateResult) assert result.matched_count == 1 or result.upserted_id is not None n += 1 return n def fetch_metrics(asx_code: str) -> pd.DataFrame: """ Using the excellent yfinance, we fetch all possible metrics of business performance for the specified stock code. Returns a dataframe (possibly empty or none) representing each metric and its datapoints as separate rows """ assert len(asx_code) >= 3 ticker = yf.Ticker(asx_code + ".AX") cashflow_df = ticker.cashflow financial_df = ticker.financials earnings_df = ticker.earnings if set(earnings_df.columns) == set(["Earnings", "Revenue"]): earnings_df.index = earnings_df.index.map( str ) # convert years to str (maybe int) earnings_df = earnings_df.transpose() # print(earnings_df) balance_sheet_df = ticker.balance_sheet melted_df = melt_dataframes( (cashflow_df, financial_df, earnings_df, balance_sheet_df) ) return melted_df def make_asx_prices_dict(new_quote: tuple, asx_code: str) -> dict: #print(new_quote) d = { "asx_code": asx_code, "fetch_date": new_quote.Index, "volume": new_quote.Volume, "last_price": new_quote.Close, "day_low_price": new_quote.Low, "day_high_price": new_quote.High, "open_price": new_quote.Open, "error_code": "", "error_descr": "", # we dont set nan fields so that existing values (if any) are used ie. merge with existing data # "annual_dividend_yield": np.nan, # no available data from yf.Ticker.history() although may be available elsewhere, but for now set to missing # "annual_daily_volume": np.nan, # "bid_price": np.nan, "change_price": new_quote.change_price, "change_in_percent": new_quote.change_in_percent, } return d def fill_stock_quote_gaps(db, stock_to_fetch: str, force=False) -> int: assert db is not None assert len(stock_to_fetch) >= 3 ticker = yf.Ticker(stock_to_fetch + ".AX") df = ticker.history(period="max") df.index = [d.strftime("%Y-%m-%d") for d in df.index] # print(df) available_dates = set(df.index) available_quotes = list(db.asx_prices.find({"asx_code": stock_to_fetch})) quoted_dates = set( [q["fetch_date"] for q in available_quotes if not np.isnan(q["last_price"])] ) assert set(df.columns) == set( ["Open", "High", "Low", "Close", "Volume", "Dividends", "Stock Splits"] ) dates_to_fill = ( available_dates.difference(quoted_dates) if not force else available_dates ) print( "Got {} existing daily quotes for {}, found {} yfinance daily quotes, gap filling for {} dates (force={})".format( len(available_quotes), stock_to_fetch, len(df), len(dates_to_fill), force ) ) if len(dates_to_fill) < 1: return 0 df["change_price"] = df["Close"].diff() df["change_in_percent"] = df["Close"].pct_change() * 100.0 gap_quotes_df = df.filter(dates_to_fill, axis=0) # print(df) n = 0 for new_quote in gap_quotes_df.itertuples(): d = make_asx_prices_dict(new_quote, stock_to_fetch) result = db.asx_prices.update_one( {"fetch_date": d["fetch_date"], "asx_code": d["asx_code"]}, {"$set": d}, upsert=True, ) assert result is not None # assert result.modified_count == 1 or result.upserted_id is not None n += 1 assert n == len(gap_quotes_df) return n if __name__ == "__main__": args = argparse.ArgumentParser( description="Update financial performance metrics for ASX stocks using yfinance" ) args.add_argument( "--config", help="Configuration file to use [config.json]", type=str, default="config.json", ) args.add_argument( "--fill-gaps", help="Fill dates with no existing quotes for each stock (use --debug for a particular stock)", action="store_true", ) args.add_argument("--fail-fast", help="Stop on first error", action="store_true") args.add_argument( "--delay", help="Delay between stocks in seconds [30]", type=int, default=30 ) args.add_argument("--force", help="Overwrite existing data (if any)", action="store_true") args.add_argument( "--debug", help="Try to fetch specified stock (for debugging)", type=str, required=False, default=None, ) a = args.parse_args() config, password = read_config(a.config) m = config.get("mongo") mongo = pymongo.MongoClient( m.get("host"), m.get("port"), username=m.get("user"), password=password ) db = mongo[m.get("db")] stock_codes = desired_stocks() if not a.debug else set([a.debug]) print(f"Updating financial metrics for {len(stock_codes)} stocks") for asx_code in sorted(stock_codes): print(f"Processing stock {asx_code}") try: melted_df = fetch_metrics(asx_code) if melted_df is None or len(melted_df) < 1: raise ValueError(f"No data available for {asx_code}... skipping") melted_df["asx_code"] = asx_code ret = update_all_metrics(melted_df, asx_code) assert ret == len(melted_df) if a.fill_gaps: fill_stock_quote_gaps(db, asx_code, force=a.force) # FALLTHRU... time.sleep(a.delay) except Exception as e: print(f"WARNING: unable to download financials for {asx_code}") print(str(e)) if a.fail_fast: raise e exit(0)
desired_stocks
error.rs
use crate::sys; /// Error codes used in various Godot APIs. #[derive(Copy, Clone, Debug, PartialEq, Eq)] #[repr(u32)] pub enum GodotError { Failed = sys::godot_error_GODOT_FAILED as u32, Unavailable = sys::godot_error_GODOT_ERR_UNAVAILABLE as u32, Unconfigured = sys::godot_error_GODOT_ERR_UNCONFIGURED as u32, Unothorized = sys::godot_error_GODOT_ERR_UNAUTHORIZED as u32, PrameterRange = sys::godot_error_GODOT_ERR_PARAMETER_RANGE_ERROR as u32, OutOfMemory = sys::godot_error_GODOT_ERR_OUT_OF_MEMORY as u32, FileNotFound = sys::godot_error_GODOT_ERR_FILE_NOT_FOUND as u32, FileBadDrive = sys::godot_error_GODOT_ERR_FILE_BAD_DRIVE as u32, FileBadPath = sys::godot_error_GODOT_ERR_FILE_BAD_PATH as u32, FileNoPermission = sys::godot_error_GODOT_ERR_FILE_NO_PERMISSION as u32, FileAlreadyInUse = sys::godot_error_GODOT_ERR_FILE_ALREADY_IN_USE as u32, FileCantOpen = sys::godot_error_GODOT_ERR_FILE_CANT_OPEN as u32, FileCantWrite = sys::godot_error_GODOT_ERR_FILE_CANT_WRITE as u32, FileCantRead = sys::godot_error_GODOT_ERR_FILE_CANT_READ as u32, FileUnrecognized = sys::godot_error_GODOT_ERR_FILE_UNRECOGNIZED as u32, FileCorrupt = sys::godot_error_GODOT_ERR_FILE_CORRUPT as u32, FileMissingDependency = sys::godot_error_GODOT_ERR_FILE_MISSING_DEPENDENCIES as u32, FileEof = sys::godot_error_GODOT_ERR_FILE_EOF as u32, CantOpen = sys::godot_error_GODOT_ERR_CANT_OPEN as u32, CantCreate = sys::godot_error_GODOT_ERR_CANT_CREATE as u32, QueryFailed = sys::godot_error_GODOT_ERR_QUERY_FAILED as u32, AlreadyInUse = sys::godot_error_GODOT_ERR_ALREADY_IN_USE as u32, Locked = sys::godot_error_GODOT_ERR_LOCKED as u32, TimeOut = sys::godot_error_GODOT_ERR_TIMEOUT as u32, CantConnect = sys::godot_error_GODOT_ERR_CANT_CONNECT as u32, CantResolve = sys::godot_error_GODOT_ERR_CANT_RESOLVE as u32, ConnectionError = sys::godot_error_GODOT_ERR_CONNECTION_ERROR as u32, CantAcquireResource = sys::godot_error_GODOT_ERR_CANT_ACQUIRE_RESOURCE as u32, CantFork = sys::godot_error_GODOT_ERR_CANT_FORK as u32, InvalidData = sys::godot_error_GODOT_ERR_INVALID_DATA as u32, InvalidParameter = sys::godot_error_GODOT_ERR_INVALID_PARAMETER as u32, AlreadyExists = sys::godot_error_GODOT_ERR_ALREADY_EXISTS as u32, DoesNotExist = sys::godot_error_GODOT_ERR_DOES_NOT_EXIST as u32, DatabaseCantRead = sys::godot_error_GODOT_ERR_DATABASE_CANT_READ as u32, DatabaseCantWrite = sys::godot_error_GODOT_ERR_DATABASE_CANT_WRITE as u32, CompilationFailed = sys::godot_error_GODOT_ERR_COMPILATION_FAILED as u32, MethodNotFound = sys::godot_error_GODOT_ERR_METHOD_NOT_FOUND as u32,
DuplicateSymbol = sys::godot_error_GODOT_ERR_DUPLICATE_SYMBOL as u32, ParseError = sys::godot_error_GODOT_ERR_PARSE_ERROR as u32, Busy = sys::godot_error_GODOT_ERR_BUSY as u32, Skip = sys::godot_error_GODOT_ERR_SKIP as u32, Help = sys::godot_error_GODOT_ERR_HELP as u32, Bug = sys::godot_error_GODOT_ERR_BUG as u32, PrinterOnFire = sys::godot_error_GODOT_ERR_PRINTER_ON_FIRE as u32, } impl GodotError { /// Creates a `Result<(), GodotError>` from a raw error code. /// /// This is intended to be an internal API. /// /// # Safety /// /// `err` should be a valid value for `GodotError`. #[inline] #[doc(hidden)] pub unsafe fn result_from_sys(err: sys::godot_error) -> Result<(), Self> { if err == sys::godot_error_GODOT_OK { return Ok(()); } Err(std::mem::transmute(err as u32)) } }
LinkFailed = sys::godot_error_GODOT_ERR_LINK_FAILED as u32, ScriptFailed = sys::godot_error_GODOT_ERR_SCRIPT_FAILED as u32, CyclicLink = sys::godot_error_GODOT_ERR_CYCLIC_LINK as u32, InvalidDeclaration = sys::godot_error_GODOT_ERR_INVALID_DECLARATION as u32,
strategic.py
import math class Player: def __init__(self):
self.numbers = [num1,num2,num3,num4] self.num_all = num_all self.common = self.numbers.index(max(self.numbers)) + 1 def guess(self): prob = self.num_all / 4 ceil = math.ceil(prob) floor = math.floor(prob) prob = floor if abs(ceil - prob)> abs(floor - prob) else ceil return {self.common :prob + max(self.numbers)} def play(self): guess_ansewr = self.guess() return(guess_ansewr) def play_one_round(cart_list,num_all): player = Player() player.initcards(cart_list.count(1), cart_list.count(2), cart_list.count(3), cart_list.count(4), num_all) try: player_guess = player.play() print(player_guess) except: print('something wrong please try again') l, num_all = get_input() play_one_round(l,num_all) def get_input(): l = input('list of my cart: ').split() num_all = int(input('number of all cart: ')) l = list(map(int,l)) return l,num_all if __name__ == '__main__': l, num_all = get_input() play_one_round(l,num_all)
pass # self.most_common = lambda : self.numbers.index(max(self.numbers)) + 1 def initcards(self,num1,num2,num3,num4,num_all):
is-todos-loaded.guard.ts
import {Injectable} from '@angular/core'; import {ActivatedRouteSnapshot, CanActivate, RouterStateSnapshot} from '@angular/router'; import {select, Store} from '@ngrx/store'; import {AppState} from '@StoreConfig'; import {Observable} from 'rxjs'; import {selectTodosLoaded$} from '@Selectors/todo-list.selector'; import {map} from 'rxjs/internal/operators'; import {TodoListModule} from '@Actions/todo-list.action'; @Injectable() export class IsTodosLoadedGuard implements CanActivate { constructor(private store: Store<AppState>) {
canActivate(next: ActivatedRouteSnapshot, state: RouterStateSnapshot): Observable<boolean> | Promise<boolean> | boolean { return this.store .pipe( select(selectTodosLoaded$), map(isLoaded => { if (!isLoaded) { this.store.dispatch(new TodoListModule.LoadInitTasks()); } return true; }) ); } }
}
admin.go
package command import ( "fmt" "time" "github.com/spf13/cobra" "github.com/pyroscope-io/pyroscope/pkg/admin" "github.com/pyroscope-io/pyroscope/pkg/cli" "github.com/pyroscope-io/pyroscope/pkg/config" ) // admin func newAdminCmd(cfg *config.Admin) *cobra.Command { vpr := newViper() var cmd *cobra.Command cmd = &cobra.Command{ Use: "admin", Short: "administration commands", RunE: cli.CreateCmdRunFn(cfg, vpr, func(_ *cobra.Command, _ []string) error { fmt.Println(cfg) printUsageMessage(cmd) return nil }), } // admin cmd.AddCommand(newAdminAppCmd(cfg)) cmd.AddCommand(newAdminUserCmd(cfg)) cmd.AddCommand(newAdminStorageCmd(cfg)) return cmd } // admin app func newAdminAppCmd(cfg *config.Admin) *cobra.Command { vpr := newViper() var cmd *cobra.Command cmd = &cobra.Command{ Use: "app", Short: "", RunE: cli.CreateCmdRunFn(cfg, vpr, func(_ *cobra.Command, _ []string) error { printUsageMessage(cmd) return nil }), } cmd.AddCommand(newAdminAppGetCmd(&cfg.AdminAppGet)) cmd.AddCommand(newAdminAppDeleteCmd(&cfg.AdminAppDelete)) return cmd } // admin app get func newAdminAppGetCmd(cfg *config.AdminAppGet) *cobra.Command { vpr := newViper() cmd := &cobra.Command{ Use: "get [flags]", Short: "get the list of all apps", Long: "get the list of all apps", RunE: cli.CreateCmdRunFn(cfg, vpr, func(_ *cobra.Command, _ []string) error { cli, err := admin.NewCLI(cfg.SocketPath, cfg.Timeout) if err != nil { return err } return cli.GetAppsNames() }), } cli.PopulateFlagSet(cfg, cmd.Flags(), vpr) return cmd } // admin app delete func newAdminAppDeleteCmd(cfg *config.AdminAppDelete) *cobra.Command { vpr := newViper() cmd := &cobra.Command{ Use: "delete [flags] [app_name]", Short: "delete an app", Long: "delete an app", Args: cobra.ExactArgs(1), ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { if len(args) != 0 { return nil, cobra.ShellCompDirectiveNoFileComp } cli, err := admin.NewCLI(cfg.SocketPath, time.Second*2) if err != nil { return nil, cobra.ShellCompDirectiveNoFileComp } appNames, err := cli.CompleteApp(toComplete) if err != nil { return nil, cobra.ShellCompDirectiveNoFileComp } return appNames, cobra.ShellCompDirectiveNoFileComp }, RunE: cli.CreateCmdRunFn(cfg, vpr, func(_ *cobra.Command, arg []string) error { cli, err := admin.NewCLI(cfg.SocketPath, cfg.Timeout) if err != nil { return err } return cli.DeleteApp(arg[0], cfg.Force) }), } cli.PopulateFlagSet(cfg, cmd.Flags(), vpr) return cmd } func newAdminUserCmd(cfg *config.Admin) *cobra.Command { vpr := newViper() var cmd *cobra.Command cmd = &cobra.Command{ Use: "user", Short: "manage users", RunE: cli.CreateCmdRunFn(cfg, vpr, func(_ *cobra.Command, _ []string) error { printUsageMessage(cmd) return nil }), } cmd.AddCommand(newAdminPasswordResetCmd(&cfg.AdminUserPasswordReset)) return cmd } func newAdminPasswordResetCmd(cfg *config.AdminUserPasswordReset) *cobra.Command { vpr := newViper() cmd := &cobra.Command{ Use: "reset-password [flags]", Short: "reset user password", Args: cobra.NoArgs, RunE: cli.CreateCmdRunFn(cfg, vpr, func(_ *cobra.Command, arg []string) error { ac, err := admin.NewCLI(cfg.SocketPath, cfg.Timeout) if err != nil { return err } if err = ac.ResetUserPassword(cfg.Username, cfg.Password, cfg.Enable); err != nil
fmt.Println("Password for user", cfg.Username, "has been reset successfully.") return nil }), } cli.PopulateFlagSet(cfg, cmd.Flags(), vpr) return cmd } func newAdminStorageCmd(cfg *config.Admin) *cobra.Command { vpr := newViper() var cmd *cobra.Command cmd = &cobra.Command{ Use: "storage", Short: "", RunE: cli.CreateCmdRunFn(cfg, vpr, func(_ *cobra.Command, _ []string) error { printUsageMessage(cmd) return nil }), } cmd.AddCommand(newAdminStorageCleanupCmd(&cfg.AdminStorageCleanup)) return cmd } func newAdminStorageCleanupCmd(cfg *config.AdminStorageCleanup) *cobra.Command { vpr := newViper() cmd := &cobra.Command{ Use: "cleanup", Short: "remove malformed data", Args: cobra.NoArgs, RunE: cli.CreateCmdRunFn(cfg, vpr, func(_ *cobra.Command, arg []string) error { ac, err := admin.NewCLI(cfg.SocketPath, cfg.Timeout) if err != nil { return err } return ac.CleanupStorage() }), } cli.PopulateFlagSet(cfg, cmd.Flags(), vpr) return cmd }
{ return err }
split_check.rs
// Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0. use std::path::Path; use std::sync::mpsc::{self, sync_channel}; use std::sync::Arc; use std::time::Duration; use engine::{rocks, DB}; use engine_rocks::Compat; use raftstore::coprocessor::{ config::{Config, SplitCheckConfigManager}, CoprocessorHost, }; use raftstore::store::{SplitCheckRunner as Runner, SplitCheckTask as Task}; use tikv::config::{ConfigController, Module, TiKvConfig}; use tikv_util::worker::{Scheduler, Worker}; fn tmp_engine<P: AsRef<Path>>(path: P) -> Arc<DB>
fn setup(cfg: TiKvConfig, engine: Arc<DB>) -> (ConfigController, Worker<Task>) { let (router, _) = sync_channel(1); let runner = Runner::new( engine.c().clone(), router.clone(), CoprocessorHost::new(router), cfg.coprocessor.clone(), ); let mut worker: Worker<Task> = Worker::new("split-check-config"); worker.start(runner).unwrap(); let cfg_controller = ConfigController::new(cfg); cfg_controller.register( Module::Coprocessor, Box::new(SplitCheckConfigManager(worker.scheduler())), ); (cfg_controller, worker) } fn validate<F>(scheduler: &Scheduler<Task>, f: F) where F: FnOnce(&Config) + Send + 'static, { let (tx, rx) = mpsc::channel(); scheduler .schedule(Task::Validate(Box::new(move |cfg: &Config| { f(cfg); tx.send(()).unwrap(); }))) .unwrap(); rx.recv_timeout(Duration::from_secs(1)).unwrap(); } #[test] fn test_update_split_check_config() { let (mut cfg, _dir) = TiKvConfig::with_tmp().unwrap(); cfg.validate().unwrap(); let engine = tmp_engine(&cfg.storage.data_dir); let (cfg_controller, mut worker) = setup(cfg.clone(), engine); let scheduler = worker.scheduler(); let cop_config = cfg.coprocessor.clone(); // update of other module's config should not effect split check config cfg_controller .update_config("raftstore.raft-log-gc-threshold", "2000") .unwrap(); validate(&scheduler, move |cfg: &Config| { assert_eq!(cfg, &cop_config); }); let change = { let mut m = std::collections::HashMap::new(); m.insert( "coprocessor.split_region_on_table".to_owned(), "true".to_owned(), ); m.insert("coprocessor.batch_split_limit".to_owned(), "123".to_owned()); m.insert( "coprocessor.region_split_keys".to_owned(), "12345".to_owned(), ); m }; cfg_controller.update(change).unwrap(); // config should be updated let cop_config = { let mut cop_config = cfg.coprocessor; cop_config.split_region_on_table = true; cop_config.batch_split_limit = 123; cop_config.region_split_keys = 12345; cop_config }; validate(&scheduler, move |cfg: &Config| { assert_eq!(cfg, &cop_config); }); worker.stop().unwrap().join().unwrap(); }
{ Arc::new( rocks::util::new_engine( path.as_ref().to_str().unwrap(), None, &["split-check-config"], None, ) .unwrap(), ) }
test_node.py
#!/usr/bin/env python3 # Copyright (c) 2017-2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Class for bitcoinexodusd node under test""" import contextlib import decimal import errno from enum import Enum import http.client import json import logging import os import re import subprocess import tempfile import time import urllib.parse from .authproxy import JSONRPCException from .util import ( append_config, delete_cookie_file, get_rpc_proxy, rpc_url, wait_until, p2p_port, ) # For Python 3.4 compatibility JSONDecodeError = getattr(json, "JSONDecodeError", ValueError) BITCOINEXODUSD_PROC_WAIT_TIMEOUT = 60 class FailedToStartError(Exception): """Raised when a node fails to start correctly.""" class ErrorMatch(Enum): FULL_TEXT = 1 FULL_REGEX = 2 PARTIAL_REGEX = 3 class TestNode(): """A class for representing a bitcoinexodusd node under test. This class contains: - state about the node (whether it's running, etc) - a Python subprocess.Popen object representing the running process - an RPC connection to the node - one or more P2P connections to the node To make things easier for the test writer, any unrecognised messages will be dispatched to the RPC connection.""" def __init__(self, i, datadir, *, rpchost, timewait, bitcoinexodusd, bitcoinexodus_cli, mocktime, coverage_dir, extra_conf=None, extra_args=None, use_cli=False): self.index = i self.datadir = datadir self.stdout_dir = os.path.join(self.datadir, "stdout") self.stderr_dir = os.path.join(self.datadir, "stderr") self.rpchost = rpchost self.rpc_timeout = timewait self.binary = bitcoinexodusd self.coverage_dir = coverage_dir if extra_conf != None: append_config(datadir, extra_conf) # Most callers will just need to add extra args to the standard list below. # For those callers that need more flexibility, they can just set the args property directly. # Note that common args are set in the config file (see initialize_datadir) self.extra_args = extra_args self.args = [ self.binary, "-datadir=" + self.datadir, "-logtimemicros", "-debug", "-debugexclude=libevent", "-debugexclude=leveldb", "-mocktime=" + str(mocktime), "-uacomment=testnode%d" % i ] self.cli = TestNodeCLI(bitcoinexodus_cli, self.datadir) self.use_cli = use_cli self.running = False self.process = None self.rpc_connected = False self.rpc = None self.url = None self.log = logging.getLogger('TestFramework.node%d' % i) self.cleanup_on_exit = True # Whether to kill the node when this object goes away self.p2ps = [] def get_deterministic_priv_key(self): """Return a deterministic priv key in base58, that only depends on the node's index""" PRIV_KEYS = [ # adress , privkey ('mjTkW3DjgyZck4KbiRusZsqTgaYTxdSz6z', 'cVpF924EspNh8KjYsfhgY96mmxvT6DgdWiTYMtMjuM74hJaU5psW'), ('msX6jQXvxiNhx3Q62PKeLPrhrqZQdSimTg', 'cUxsWyKyZ9MAQTaAhUQWJmBbSvHMwSmuv59KgxQV7oZQU3PXN3KE'), ('mnonCMyH9TmAsSj3M59DsbH8H63U3RKoFP', 'cTrh7dkEAeJd6b3MRX9bZK8eRmNqVCMH3LSUkE3dSFDyzjU38QxK'), ('mqJupas8Dt2uestQDvV2NH3RU8uZh2dqQR', 'cVuKKa7gbehEQvVq717hYcbE9Dqmq7KEBKqWgWrYBa2CKKrhtRim'), ('msYac7Rvd5ywm6pEmkjyxhbCDKqWsVeYws', 'cQDCBuKcjanpXDpCqacNSjYfxeQj8G6CAtH1Dsk3cXyqLNC4RPuh'), ('n2rnuUnwLgXqf9kk2kjvVm8R5BZK1yxQBi', 'cQakmfPSLSqKHyMFGwAqKHgWUiofJCagVGhiB4KCainaeCSxeyYq'), ('myzuPxRwsf3vvGzEuzPfK9Nf2RfwauwYe6', 'cQMpDLJwA8DBe9NcQbdoSb1BhmFxVjWD5gRyrLZCtpuF9Zi3a9RK'), ('mumwTaMtbxEPUswmLBBN3vM9oGRtGBrys8', 'cSXmRKXVcoouhNNVpcNKFfxsTsToY5pvB9DVsFksF1ENunTzRKsy'), ('mpV7aGShMkJCZgbW7F6iZgrvuPHjZjH9qg', 'cSoXt6tm3pqy43UMabY6eUTmR3eSUYFtB2iNQDGgb3VUnRsQys2k'), ] return PRIV_KEYS[self.index] def _node_msg(self, msg: str) -> str: """Return a modified msg that identifies this node by its index as a debugging aid.""" return "[node %d] %s" % (self.index, msg) def _raise_assertion_error(self, msg: str): """Raise an AssertionError with msg modified to identify this node.""" raise AssertionError(self._node_msg(msg)) def __del__(self): # Ensure that we don't leave any bitcoinexodusd processes lying around after # the test ends if self.process and self.cleanup_on_exit: # Should only happen on test failure # Avoid using logger, as that may have already been shutdown when # this destructor is called. print(self._node_msg("Cleaning up leftover process")) self.process.kill() def __getattr__(self, name): """Dispatches any unrecognised messages to the RPC connection or a CLI instance.""" if self.use_cli: return getattr(self.cli, name) else: assert self.rpc_connected and self.rpc is not None, self._node_msg("Error: no RPC connection") return getattr(self.rpc, name) def start(self, extra_args=None, *, stdout=None, stderr=None, **kwargs): """Start the node.""" if extra_args is None: extra_args = self.extra_args # Add a new stdout and stderr file each time bitcoinexodusd is started if stderr is None: stderr = tempfile.NamedTemporaryFile(dir=self.stderr_dir, delete=False) if stdout is None: stdout = tempfile.NamedTemporaryFile(dir=self.stdout_dir, delete=False) self.stderr = stderr self.stdout = stdout # Delete any existing cookie file -- if such a file exists (eg due to # unclean shutdown), it will get overwritten anyway by bitcoinexodusd, and # potentially interfere with our attempt to authenticate delete_cookie_file(self.datadir) # add environment variable LIBC_FATAL_STDERR_=1 so that libc errors are written to stderr and not the terminal subp_env = dict(os.environ, LIBC_FATAL_STDERR_="1") self.process = subprocess.Popen(self.args + extra_args, env=subp_env, stdout=stdout, stderr=stderr, **kwargs) self.running = True self.log.debug("bitcoinexodusd started, waiting for RPC to come up") def wait_for_rpc_connection(self): """Sets up an RPC connection to the bitcoinexodusd process. Returns False if unable to connect.""" # Poll at a rate of four times per second
poll_per_s = 4 for _ in range(poll_per_s * self.rpc_timeout): if self.process.poll() is not None: raise FailedToStartError(self._node_msg( 'bitcoinexodusd exited with status {} during initialization'.format(self.process.returncode))) try: self.rpc = get_rpc_proxy(rpc_url(self.datadir, self.index, self.rpchost), self.index, timeout=self.rpc_timeout, coveragedir=self.coverage_dir) self.rpc.getblockcount() # If the call to getblockcount() succeeds then the RPC connection is up self.rpc_connected = True self.url = self.rpc.url self.log.debug("RPC successfully started") return except IOError as e: if e.errno != errno.ECONNREFUSED: # Port not yet open? raise # unknown IO error except JSONRPCException as e: # Initialization phase if e.error['code'] != -28: # RPC in warmup? raise # unknown JSON RPC exception except ValueError as e: # cookie file not found and no rpcuser or rpcassword. bitcoinexodusd still starting if "No RPC credentials" not in str(e): raise time.sleep(1.0 / poll_per_s) self._raise_assertion_error("Unable to connect to bitcoinexodusd") def get_wallet_rpc(self, wallet_name): if self.use_cli: return self.cli("-rpcwallet={}".format(wallet_name)) else: assert self.rpc_connected and self.rpc, self._node_msg("RPC not connected") wallet_path = "wallet/{}".format(urllib.parse.quote(wallet_name)) return self.rpc / wallet_path def stop_node(self, expected_stderr=''): """Stop the node.""" if not self.running: return self.log.debug("Stopping node") try: self.stop() except http.client.CannotSendRequest: self.log.exception("Unable to stop node.") # Check that stderr is as expected self.stderr.seek(0) stderr = self.stderr.read().decode('utf-8').strip() if stderr != expected_stderr: raise AssertionError("Unexpected stderr {} != {}".format(stderr, expected_stderr)) self.stdout.close() self.stderr.close() del self.p2ps[:] def is_node_stopped(self): """Checks whether the node has stopped. Returns True if the node has stopped. False otherwise. This method is responsible for freeing resources (self.process).""" if not self.running: return True return_code = self.process.poll() if return_code is None: return False # process has stopped. Assert that it didn't return an error code. assert return_code == 0, self._node_msg( "Node returned non-zero exit code (%d) when stopping" % return_code) self.running = False self.process = None self.rpc_connected = False self.rpc = None self.log.debug("Node stopped") return True def wait_until_stopped(self, timeout=BITCOINEXODUSD_PROC_WAIT_TIMEOUT): wait_until(self.is_node_stopped, timeout=timeout) @contextlib.contextmanager def assert_debug_log(self, expected_msgs): debug_log = os.path.join(self.datadir, 'regtest', 'debug.log') with open(debug_log, encoding='utf-8') as dl: dl.seek(0, 2) prev_size = dl.tell() try: yield finally: with open(debug_log, encoding='utf-8') as dl: dl.seek(prev_size) log = dl.read() print_log = " - " + "\n - ".join(log.splitlines()) for expected_msg in expected_msgs: if re.search(re.escape(expected_msg), log, flags=re.MULTILINE) is None: self._raise_assertion_error('Expected message "{}" does not partially match log:\n\n{}\n\n'.format(expected_msg, print_log)) def assert_start_raises_init_error(self, extra_args=None, expected_msg=None, match=ErrorMatch.FULL_TEXT, *args, **kwargs): """Attempt to start the node and expect it to raise an error. extra_args: extra arguments to pass through to bitcoinexodusd expected_msg: regex that stderr should match when bitcoinexodusd fails Will throw if bitcoinexodusd starts without an error. Will throw if an expected_msg is provided and it does not match bitcoinexodusd's stdout.""" with tempfile.NamedTemporaryFile(dir=self.stderr_dir, delete=False) as log_stderr, \ tempfile.NamedTemporaryFile(dir=self.stdout_dir, delete=False) as log_stdout: try: self.start(extra_args, stdout=log_stdout, stderr=log_stderr, *args, **kwargs) self.wait_for_rpc_connection() self.stop_node() self.wait_until_stopped() except FailedToStartError as e: self.log.debug('bitcoinexodusd failed to start: %s', e) self.running = False self.process = None # Check stderr for expected message if expected_msg is not None: log_stderr.seek(0) stderr = log_stderr.read().decode('utf-8').strip() if match == ErrorMatch.PARTIAL_REGEX: if re.search(expected_msg, stderr, flags=re.MULTILINE) is None: self._raise_assertion_error( 'Expected message "{}" does not partially match stderr:\n"{}"'.format(expected_msg, stderr)) elif match == ErrorMatch.FULL_REGEX: if re.fullmatch(expected_msg, stderr) is None: self._raise_assertion_error( 'Expected message "{}" does not fully match stderr:\n"{}"'.format(expected_msg, stderr)) elif match == ErrorMatch.FULL_TEXT: if expected_msg != stderr: self._raise_assertion_error( 'Expected message "{}" does not fully match stderr:\n"{}"'.format(expected_msg, stderr)) else: if expected_msg is None: assert_msg = "bitcoinexodusd should have exited with an error" else: assert_msg = "bitcoinexodusd should have exited with expected error " + expected_msg self._raise_assertion_error(assert_msg) def node_encrypt_wallet(self, passphrase): """"Encrypts the wallet. This causes bitcoinexodusd to shutdown, so this method takes care of cleaning up resources.""" self.encryptwallet(passphrase) self.wait_until_stopped() def add_p2p_connection(self, p2p_conn, *, wait_for_verack=True, **kwargs): """Add a p2p connection to the node. This method adds the p2p connection to the self.p2ps list and also returns the connection to the caller.""" if 'dstport' not in kwargs: kwargs['dstport'] = p2p_port(self.index) if 'dstaddr' not in kwargs: kwargs['dstaddr'] = '127.0.0.1' p2p_conn.peer_connect(**kwargs)() self.p2ps.append(p2p_conn) if wait_for_verack: p2p_conn.wait_for_verack() return p2p_conn @property def p2p(self): """Return the first p2p connection Convenience property - most tests only use a single p2p connection to each node, so this saves having to write node.p2ps[0] many times.""" assert self.p2ps, self._node_msg("No p2p connection") return self.p2ps[0] def disconnect_p2ps(self): """Close all p2p connections to the node.""" for p in self.p2ps: p.peer_disconnect() del self.p2ps[:] class TestNodeCLIAttr: def __init__(self, cli, command): self.cli = cli self.command = command def __call__(self, *args, **kwargs): return self.cli.send_cli(self.command, *args, **kwargs) def get_request(self, *args, **kwargs): return lambda: self(*args, **kwargs) class TestNodeCLI(): """Interface to bitcoinexodus-cli for an individual node""" def __init__(self, binary, datadir): self.options = [] self.binary = binary self.datadir = datadir self.input = None self.log = logging.getLogger('TestFramework.bitcoinexoduscli') def __call__(self, *options, input=None): # TestNodeCLI is callable with bitcoinexodus-cli command-line options cli = TestNodeCLI(self.binary, self.datadir) cli.options = [str(o) for o in options] cli.input = input return cli def __getattr__(self, command): return TestNodeCLIAttr(self, command) def batch(self, requests): results = [] for request in requests: try: results.append(dict(result=request())) except JSONRPCException as e: results.append(dict(error=e)) return results def send_cli(self, command=None, *args, **kwargs): """Run bitcoinexodus-cli command. Deserializes returned string as python object.""" pos_args = [str(arg).lower() if type(arg) is bool else str(arg) for arg in args] named_args = [str(key) + "=" + str(value) for (key, value) in kwargs.items()] assert not (pos_args and named_args), "Cannot use positional arguments and named arguments in the same bitcoinexodus-cli call" p_args = [self.binary, "-datadir=" + self.datadir] + self.options if named_args: p_args += ["-named"] if command is not None: p_args += [command] p_args += pos_args + named_args self.log.debug("Running bitcoinexodus-cli command: %s" % command) process = subprocess.Popen(p_args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) cli_stdout, cli_stderr = process.communicate(input=self.input) returncode = process.poll() if returncode: match = re.match(r'error code: ([-0-9]+)\nerror message:\n(.*)', cli_stderr) if match: code, message = match.groups() raise JSONRPCException(dict(code=int(code), message=message)) # Ignore cli_stdout, raise with cli_stderr raise subprocess.CalledProcessError(returncode, self.binary, output=cli_stderr) try: return json.loads(cli_stdout, parse_float=decimal.Decimal) except JSONDecodeError: return cli_stdout.rstrip("\n")
RBT.js
import getContract from "@/utils/abiUtil"; function judgeToken(rootState) { if (!state.token) state.token = getContract.getContractByName('RBT', rootState.app.web3) } const state = {}; const mutations = {}; const actions = { addFreeUser ({rootState},{ userAddress }){ judgeToken(rootState) return new Promise((resolve,reject) => { state.token.methods.addFreeUser(userAddress).estimateGas({ from: rootState.app.account, }).then(gas => { state.token.methods.addFreeUser(userAddress).send({ from: rootState.app.account, gas: parseInt(gas * 1.2) }).then(res => { resolve(res) }) }).catch(err=>{ reject(JSON.parse(err.message.substr(24,err.message.length)).message) }) }) }, admin ({rootState} ){ judgeToken(rootState) return new Promise((resolve,reject) => { state.token.methods.admin().call().then(res => { resolve(res) }).catch(err=>{ reject(JSON.parse(err.message.substr(24,err.message.length)).message) }) }) }, allowance ({rootState},{ owner,spender }){ judgeToken(rootState) return new Promise((resolve,reject) => { state.token.methods.allowance(owner,spender).call().then(res => { resolve(res) }).catch(err=>{ reject(JSON.parse(err.message.substr(24,err.message.length)).message) }) }) }, approve ({rootState},{ spender,amount }){ judgeToken(rootState) return new Promise((resolve,reject) => { state.token.methods.approve(spender,amount).estimateGas({ from: rootState.app.account, }).then(gas => { state.token.methods.approve(spender,amount).send({ from: rootState.app.account, gas: parseInt(gas * 1.2) }).then(res => { resolve(res) }) }).catch(err=>{ reject(JSON.parse(err.message.substr(24,err.message.length)).message) }) }) }, balanceOf ({rootState},{ account }){ judgeToken(rootState) return new Promise((resolve,reject) => { state.token.methods.balanceOf(account).call().then(res => { resolve(res) }).catch(err=>{ reject(JSON.parse(err.message.substr(24,err.message.length)).message) }) }) }, burn ({rootState},{ account,amount }){ judgeToken(rootState) return new Promise((resolve,reject) => { state.token.methods.burn(account,amount).estimateGas({ from: rootState.app.account, }).then(gas => { state.token.methods.burn(account,amount).send({ from: rootState.app.account, gas: parseInt(gas * 1.2) }).then(res => { resolve(res) }) }).catch(err=>{ reject(JSON.parse(err.message.substr(24,err.message.length)).message) }) }) }, changeFeeTo ({rootState},{ guager }){ judgeToken(rootState) return new Promise((resolve,reject) => { state.token.methods.changeFeeTo(guager).estimateGas({ from: rootState.app.account, }).then(gas => { state.token.methods.changeFeeTo(guager).send({ from: rootState.app.account, gas: parseInt(gas * 1.2) }).then(res => { resolve(res) }) }).catch(err=>{ reject(JSON.parse(err.message.substr(24,err.message.length)).message) }) }) }, changeOwner ({rootState},{ manager }){ judgeToken(rootState) return new Promise((resolve,reject) => { state.token.methods.changeOwner(manager).estimateGas({ from: rootState.app.account, }).then(gas => { state.token.methods.changeOwner(manager).send({ from: rootState.app.account, gas: parseInt(gas * 1.2) }).then(res => { resolve(res) }) }).catch(err=>{ reject(JSON.parse(err.message.substr(24,err.message.length)).message) }) }) }, checkpoints ({rootState},{ param0,param1 }){ judgeToken(rootState) return new Promise((resolve,reject) => { state.token.methods.checkpoints(param0,param1).call().then(res => { resolve(res) }).catch(err=>{ reject(JSON.parse(err.message.substr(24,err.message.length)).message) }) }) }, decimals ({rootState} ){ judgeToken(rootState) return new Promise((resolve,reject) => { state.token.methods.decimals().call().then(res => { resolve(res) }).catch(err=>{ reject(JSON.parse(err.message.substr(24,err.message.length)).message) }) }) }, decreaseAllowance ({rootState},{ spender,subtractedValue }){ judgeToken(rootState) return new Promise((resolve,reject) => { state.token.methods.decreaseAllowance(spender,subtractedValue).estimateGas({ from: rootState.app.account, }).then(gas => { state.token.methods.decreaseAllowance(spender,subtractedValue).send({ from: rootState.app.account, gas: parseInt(gas * 1.2) }).then(res => {
resolve(res) }) }).catch(err=>{ reject(JSON.parse(err.message.substr(24,err.message.length)).message) }) }) }, fee ({rootState} ){ judgeToken(rootState) return new Promise((resolve,reject) => { state.token.methods.fee().call().then(res => { resolve(res) }).catch(err=>{ reject(JSON.parse(err.message.substr(24,err.message.length)).message) }) }) }, feeto ({rootState} ){ judgeToken(rootState) return new Promise((resolve,reject) => { state.token.methods.feeto().call().then(res => { resolve(res) }).catch(err=>{ reject(JSON.parse(err.message.substr(24,err.message.length)).message) }) }) }, freeUsers ({rootState},{ param0 }){ judgeToken(rootState) return new Promise((resolve,reject) => { state.token.methods.freeUsers(param0).call().then(res => { resolve(res) }).catch(err=>{ reject(JSON.parse(err.message.substr(24,err.message.length)).message) }) }) }, getCurrentVotes ({rootState},{ account }){ judgeToken(rootState) return new Promise((resolve,reject) => { state.token.methods.getCurrentVotes(account).call().then(res => { resolve(res) }).catch(err=>{ reject(JSON.parse(err.message.substr(24,err.message.length)).message) }) }) }, getPriorVotes ({rootState},{ account,blockNumber }){ judgeToken(rootState) return new Promise((resolve,reject) => { state.token.methods.getPriorVotes(account,blockNumber).call().then(res => { resolve(res) }).catch(err=>{ reject(JSON.parse(err.message.substr(24,err.message.length)).message) }) }) }, increaseAllowance ({rootState},{ spender,addedValue }){ judgeToken(rootState) return new Promise((resolve,reject) => { state.token.methods.increaseAllowance(spender,addedValue).estimateGas({ from: rootState.app.account, }).then(gas => { state.token.methods.increaseAllowance(spender,addedValue).send({ from: rootState.app.account, gas: parseInt(gas * 1.2) }).then(res => { resolve(res) }) }).catch(err=>{ reject(JSON.parse(err.message.substr(24,err.message.length)).message) }) }) }, modifyFee ({rootState},{ value }){ judgeToken(rootState) return new Promise((resolve,reject) => { state.token.methods.modifyFee(value).estimateGas({ from: rootState.app.account, }).then(gas => { state.token.methods.modifyFee(value).send({ from: rootState.app.account, gas: parseInt(gas * 1.2) }).then(res => { resolve(res) }) }).catch(err=>{ reject(JSON.parse(err.message.substr(24,err.message.length)).message) }) }) }, name ({rootState} ){ judgeToken(rootState) return new Promise((resolve,reject) => { state.token.methods.name().call().then(res => { resolve(res) }).catch(err=>{ reject(JSON.parse(err.message.substr(24,err.message.length)).message) }) }) }, numCheckpoints ({rootState},{ param0 }){ judgeToken(rootState) return new Promise((resolve,reject) => { state.token.methods.numCheckpoints(param0).call().then(res => { resolve(res) }).catch(err=>{ reject(JSON.parse(err.message.substr(24,err.message.length)).message) }) }) }, removeFreeUser ({rootState},{ userAddress }){ judgeToken(rootState) return new Promise((resolve,reject) => { state.token.methods.removeFreeUser(userAddress).estimateGas({ from: rootState.app.account, }).then(gas => { state.token.methods.removeFreeUser(userAddress).send({ from: rootState.app.account, gas: parseInt(gas * 1.2) }).then(res => { resolve(res) }) }).catch(err=>{ reject(JSON.parse(err.message.substr(24,err.message.length)).message) }) }) }, symbol ({rootState} ){ judgeToken(rootState) return new Promise((resolve,reject) => { state.token.methods.symbol().call().then(res => { resolve(res) }).catch(err=>{ reject(JSON.parse(err.message.substr(24,err.message.length)).message) }) }) }, totalSupply ({rootState} ){ judgeToken(rootState) return new Promise((resolve,reject) => { state.token.methods.totalSupply().call().then(res => { resolve(res) }).catch(err=>{ reject(JSON.parse(err.message.substr(24,err.message.length)).message) }) }) }, transfer ({rootState},{ recipient,amount }){ judgeToken(rootState) return new Promise((resolve,reject) => { state.token.methods.transfer(recipient,amount).estimateGas({ from: rootState.app.account, }).then(gas => { state.token.methods.transfer(recipient,amount).send({ from: rootState.app.account, gas: parseInt(gas * 1.2) }).then(res => { resolve(res) }) }).catch(err=>{ reject(JSON.parse(err.message.substr(24,err.message.length)).message) }) }) }, transferFrom ({rootState},{ sender,recipient,amount }){ judgeToken(rootState) return new Promise((resolve,reject) => { state.token.methods.transferFrom(sender,recipient,amount).estimateGas({ from: rootState.app.account, }).then(gas => { state.token.methods.transferFrom(sender,recipient,amount).send({ from: rootState.app.account, gas: parseInt(gas * 1.2) }).then(res => { resolve(res) }) }).catch(err=>{ reject(JSON.parse(err.message.substr(24,err.message.length)).message) }) }) }, } export default { namespaced: true, mutations, state, actions }
report.worker.js
const BaseWorker = require('./base.worker'); const RecurrenceRules = require('./recurrenceRule'); const cron = require('node-cron'); class ReportWorker extends BaseWorker { constructor() { super(); this.recurrenceRules = new RecurrenceRules(); } start = () => { // this.recurrenceRules.second = "*"; this.recurrenceRules.minute = "*"; this.recurrenceRules.hour = "*"; this.recurrenceRules.dayOfMonth = "*"; this.recurrenceRules.month = "*"; this.recurrenceRules.dayOfWeek = "*"; const rules = this.recurrenceRules.getRules();
console.log("Run worker 1"); }); const worker2 = cron.schedule(rules, () => { console.log("Run worker 2"); }); } } module.exports = ReportWorker;
const worker1 = cron.schedule(rules, () => {
0004_remove_photo_path.py
# -*- coding: utf-8 -*- # Generated by Django 1.9.4 on 2016-06-16 01:39 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration):
dependencies = [ ('instaapp', '0003_photo_image'), ] operations = [ migrations.RemoveField( model_name='photo', name='path', ), ]
lib.rs
// Source adopted from // https://github.com/tildeio/helix-website/blob/master/crates/word_count/src/lib.rs #![feature(specialization)] #[macro_use] extern crate pyo3; extern crate rayon; use pyo3::prelude::*; use rayon::prelude::*; use std::fs; use std::path::PathBuf; /// Represents a file that can be searched #[pyclass] struct WordCounter { path: PathBuf, } #[pymethods] impl WordCounter { #[new] fn __new__(obj: &PyRawObject, path: String) -> PyResult<()> { obj.init(|| WordCounter { path: PathBuf::from(path), }) } /// Searches for the word, parallelized by rayon fn search(&self, py: Python, search: String) -> PyResult<usize> { let contents = fs::read_to_string(&self.path)?; let count = py.allow_threads(move || { contents .par_lines() .map(|line| count_line(line, &search)) .sum() }); Ok(count) } /// Searches for a word in a classic sequential fashion fn search_sequential(&self, needle: String) -> PyResult<usize> { let contents = fs::read_to_string(&self.path)?; let result = contents.lines().map(|line| count_line(line, &needle)).sum(); Ok(result) } } fn
(word: &str, needle: &str) -> bool { let mut needle = needle.chars(); for ch in word.chars().skip_while(|ch| !ch.is_alphabetic()) { match needle.next() { None => { return !ch.is_alphabetic(); } Some(expect) => { if ch.to_lowercase().next() != Some(expect) { return false; } } } } return needle.next().is_none(); } /// Count the occurences of needle in line, case insensitive #[pyfunction] fn count_line(line: &str, needle: &str) -> usize { let mut total = 0; for word in line.split(' ') { if matches(word, needle) { total += 1; } } total } #[pymodule] fn word_count(_py: Python, m: &PyModule) -> PyResult<()> { m.add_wrapped(wrap_function!(count_line))?; m.add_class::<WordCounter>()?; Ok(()) }
matches
cram.rs
//! All the different types of RAM that cartridges can contain. Some types of RAM //! allow banking, similar to ROM banking. This banking is always triggered by //! writing special values to certain sections of ROM, and requires an MBC that //! supports it. //! //! Some RAM types support batteries; In that case, they should make their internal //! state public via the [`Savegame`] trait if a battery is present. use super::desc::RamSize; use crate::{address::CRamAddr, Savegame}; use std::pin::Pin; /// The interface between the RAM implementation and the MBC. The CPU will never /// directly interact with this trait since the MBC can decide to disable RAM /// temporarily; Thus, all communication goes through the MBC implementation. pub trait CartridgeRam: Savegame { fn read(&self, addr: CRamAddr) -> u8; fn write(&mut self, addr: CRamAddr, val: u8); fn try_select_bank(&mut self, bank: u8); } /// Cartridges with no internal RAM should use this implementation, where every /// write is a NOOP and every read yields 0xFF. pub struct NoCRam; impl Savegame for NoCRam {} impl CartridgeRam for NoCRam { fn read(&self, _addr: CRamAddr) -> u8 { 0xff } fn write(&mut self, _addr: CRamAddr, _val: u8) {} fn try_select_bank(&mut self, _bank: u8) {} } /// A fixed amount of RAM without banking support. Attempts to switch the RAM bank /// compiles to a NOOP pub struct CRamUnbanked { cram: Box<[u8]>, has_battery: bool, } impl CRamUnbanked { pub fn new(ram_size: RamSize, has_battery: bool) -> Self { let cram = match ram_size { RamSize::RamNone => panic!("Invalid ram size for CRAMUnbanked"), RamSize::Ram2Kb => vec![0; 0x800].into_boxed_slice(), RamSize::Ram8Kb => vec![0; 0x2000].into_boxed_slice(), RamSize::Ram32Kb => panic!("Invalid ram size for CRAMUnbanked"), }; Self { cram, has_battery } } } impl Savegame for CRamUnbanked { fn savegame(&self) -> Option<&[u8]> { // TODO: Nicer API blocked by bool_to_option, look at other implementers too if self.has_battery { Some(&self.cram) } else { None } } fn savegame_mut(&mut self) -> Option<&mut [u8]> { if self.has_battery { Some(&mut self.cram) } else { None } } } impl CartridgeRam for CRamUnbanked { fn read(&self, addr: CRamAddr) -> u8 { *self.cram.get(addr.raw() as usize).unwrap_or(&0xff) } fn write(&mut self, addr: CRamAddr, val: u8) { if let Some(mem) = self.cram.get_mut(addr.raw() as usize) { *mem = val; } } fn try_select_bank(&mut self, _bank: u8) {} } /// MBC2 has a weird half-byte RAM, where only the lower 4 bits of each addressable byte are used. /// We store this in a compressed format so we use all 8 bits of each byte. The lower half of the /// byte contains the lower address. pub struct CRamMBC2 { // TODO: Internally, this looks very much like CRAMUnbanked. The Savegame impl is also the same. See if it should be modularized cram: Box<[u8]>, has_battery: bool, } impl CRamMBC2 { pub fn new(has_battery: bool) -> Self { Self { cram: vec![0u8; 256].into_boxed_slice(), has_battery, } } } impl Savegame for CRamMBC2 { fn savegame(&self) -> Option<&[u8]> { if self.has_battery { Some(&self.cram) } else { None } } fn savegame_mut(&mut self) -> Option<&mut [u8]> { if self.has_battery { Some(&mut self.cram) } else { None } } } impl CartridgeRam for CRamMBC2 { fn read(&self, addr: CRamAddr) -> u8 { let shift = (addr.raw() & 1) * 4; let sub_addr = (addr.raw() >> 1) as usize; self.cram .get(sub_addr) .map(|val| (val >> shift) & 0xF) .unwrap_or(0xF) // TODO: Check if illegal reads here return 0xF or 0xFF (or something wild) } fn write(&mut self, addr: CRamAddr, val: u8) { let sub_addr = (addr.raw() >> 1) as usize; if let Some(mem) = self.cram.get_mut(sub_addr) { let shift = (addr.raw() & 1) * 4; // Clear the old content *mem &= 0xF0u8.rotate_left(shift as u32); // Write the new value *mem |= (val & 0xF) << shift; } } fn try_select_bank(&mut self, _bank: u8) {} } /// A large amount of RAM with banking support. Selection of the current RAM bank is done by the MBC. /// Attempting to switch to a non-existent bank leaves the currently mapped bank unchanged. pub struct CRamBanked { cram: Pin<Box<[u8]>>, mapped_bank: &'static mut [u8], has_battery: bool, } impl CRamBanked { pub fn new(has_battery: bool) -> Self { let mut cram = Pin::new(vec![0u8; 4 * 0x2000].into_boxed_slice()); // We forget about the lifetime of the reference here, which is safe because we got the memory // inside a `Pin<Box<...>>` right here in the struct. let mapped_bank = unsafe { std::mem::transmute(&mut cram[..]) }; Self { cram, mapped_bank, has_battery, } } } impl Savegame for CRamBanked { fn savegame(&self) -> Option<&[u8]> { if self.has_battery { Some(&self.cram) } else { None } } fn savegame_mut(&mut self) -> Option<&mut [u8]> { if self.has_battery { Some(&mut self.cram) } else { None } } } impl CartridgeRam for CRamBanked { fn read(&self, addr: CRamAddr) -> u8 { self.mapped_bank[addr.raw() as usize] } fn write(&mut self, addr: CRamAddr, val: u8) { self.mapped_bank[addr.raw() as usize] = val; } fn try_select_bank(&mut self, bank: u8) { if bank < 4 { // This transmute forgets the lifetime of the reference; This is safe because // self actually owns the memory and has it inside a pin, so this reference // will never become invalid self.mapped_bank =
}
unsafe { std::mem::transmute(&mut self.cram[0x2000 * bank as usize..]) }; } }
wsgi.py
""" WSGI config for src project. This module contains the WSGI application used by Django's development server and any production WSGI deployments. It should expose a module-level variable named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover this application via the ``WSGI_APPLICATION`` setting. Usually you will have the standard Django WSGI application here, but it also might make sense to replace the whole Django WSGI application with a custom one
middleware here, or combine a Django application with an application of another framework. """ import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings") # This application object is used by any WSGI server configured to use this # file. This includes Django's development server, if the WSGI_APPLICATION # setting points here. from django.core.wsgi import get_wsgi_application application = get_wsgi_application() # Apply WSGI middleware here. # from helloworld.wsgi import HelloWorldApplication # application = HelloWorldApplication(application)
that later delegates to the Django one. For example, you could introduce WSGI
modal.service.js
/** * @fileoverview added by tsickle * @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc */ import { isPlatformBrowser } from '@angular/common'; import { Inject, Injectable, PLATFORM_ID } from '@angular/core'; import { BehaviorSubject } from 'rxjs'; import { map } from 'rxjs/operators'; import { Modal, ModalCloseEvent, ModalCompleteEvent } from './modal'; import * as i0 from "@angular/core"; export class ModalService { /** * @param {?} platformId */ constructor(platformId) { this.platformId = platformId; this.modals$ = new BehaviorSubject([]); } /** * @return {?} */ getInfos() { return this.modals$.pipe(map((modals) => { return modals.length ? modals[modals.length - 1] : null; })); } /** * @param {?} name * @return {?} */ addClass(name) { if (isPlatformBrowser(this.platformId)) { /** @type {?} */ const body = (/** @type {?} */ (document.querySelector('body'))); body.classList.add(name); } } /** * @param {?} name * @return {?} */ removeClass(name) { if (isPlatformBrowser(this.platformId)) { /** @type {?} */ const body = (/** @type {?} */ (document.querySelector('body'))); body.classList.remove(name); } } /** * @param {?} modal * @return {?} */ open(modal) { this.addClass('modal-active'); modal = new Modal(modal); /** @type {?} */ const modals = this.modals$.getValue(); modals.push(modal); this.modals$.next(modals); return modal.emitter; // event emitter bound to modals$ } /** * @param {?=} modal
* @param {?=} data * @return {?} */ complete(modal, data) { modal = modal ? this.remove(modal) : this.pop(); if (modal) { modal.emitter.emit(new ModalCompleteEvent({ modal: modal, data: data })); } } /** * @param {?=} modal * @param {?=} data * @return {?} */ close(modal, data) { modal = this.removeAll(); if (modal) { modal.emitter.emit(new ModalCloseEvent({ modal: modal, data: data })); } } /** * @param {?=} modal * @param {?=} data * @return {?} */ prev(modal, data) { modal = modal ? this.remove(modal) : this.pop(); if (modal) { modal.emitter.emit(new ModalCloseEvent({ modal: modal, data: data })); } } /** * @private * @return {?} */ pop() { /** @type {?} */ const modals = this.modals$.getValue(); if (modals.length) { /** @type {?} */ const modal = modals.pop(); if (!modals.length) { this.removeClass('modal-active'); } this.modals$.next(modals); return modal; } else { return null; } } /** * @private * @param {?} modal * @return {?} */ remove(modal) { /** @type {?} */ const modals = this.modals$.getValue(); if (modals.length && modals[modals.length - 1] === modal) { modals.pop(); if (!modals.length) { this.removeClass('modal-active'); } this.modals$.next(modals); return modal; } else { return null; } } /** * @private * @return {?} */ removeAll() { /** @type {?} */ const modals = this.modals$.getValue(); if (modals.length) { /** @type {?} */ const modal = modals.pop(); this.removeClass('modal-active'); this.modals$.next([]); return modal; } else { return null; } } } ModalService.decorators = [ { type: Injectable, args: [{ providedIn: 'root' },] } ]; /** @nocollapse */ ModalService.ctorParameters = () => [ { type: String, decorators: [{ type: Inject, args: [PLATFORM_ID,] }] } ]; /** @nocollapse */ ModalService.ngInjectableDef = i0.defineInjectable({ factory: function ModalService_Factory() { return new ModalService(i0.inject(i0.PLATFORM_ID)); }, token: ModalService, providedIn: "root" }); if (false) { /** @type {?} */ ModalService.prototype.modals$; /** * @type {?} * @private */ ModalService.prototype.platformId; } //# sourceMappingURL=data:application/json;base64,{"version":3,"file":"modal.service.js","sourceRoot":"ng://@artisan/core/","sources":["lib/ui/modal/modal.service.ts"],"names":[],"mappings":";;;;AAAA,OAAO,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AACpD,OAAO,EAAgB,MAAM,EAAE,UAAU,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAC9E,OAAO,EAAE,eAAe,EAAc,MAAM,MAAM,CAAC;AACnD,OAAO,EAAE,GAAG,EAAE,MAAM,gBAAgB,CAAC;AACrC,OAAO,EAAE,KAAK,EAAE,eAAe,EAAE,kBAAkB,EAAE,MAAM,SAAS,CAAC;;AAKrE,MAAM,OAAO,YAAY;;;;IAIxB,YAC8B,UAAkB;QAAlB,eAAU,GAAV,UAAU,CAAQ;QAHhD,YAAO,GAAG,IAAI,eAAe,CAAU,EAAE,CAAC,CAAC;IAIvC,CAAC;;;;IAEL,QAAQ;QACP,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CACvB,GAAG,CAAC,CAAC,MAAe,EAAE,EAAE;YACvB,OAAO,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;QACzD,CAAC,CAAC,CACF,CAAC;IACH,CAAC;;;;;IAED,QAAQ,CAAC,IAAY;QACpB,IAAI,iBAAiB,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE;;kBACjC,IAAI,GAAG,mBAAA,QAAQ,CAAC,aAAa,CAAC,MAAM,CAAC,EAAe;YAC1D,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;SACzB;IACF,CAAC;;;;;IAED,WAAW,CAAC,IAAY;QACvB,IAAI,iBAAiB,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE;;kBACjC,IAAI,GAAG,mBAAA,QAAQ,CAAC,aAAa,CAAC,MAAM,CAAC,EAAe;YAC1D,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;SAC5B;IACF,CAAC;;;;;IAED,IAAI,CAAC,KAAY;QAChB,IAAI,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC;QAC9B,KAAK,GAAG,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC;;cACnB,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,QAAQ,EAAE;QACtC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QACnB,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAC1B,OAAO,KAAK,CAAC,OAAO,CAAC;QACrB,iCAAiC;IAClC,CAAC;;;;;;IAED,QAAQ,CAAC,KAAa,EAAE,IAAU;QACjC,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC;QAChD,IAAI,KAAK,EAAE;YACV,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,kBAAkB,CAAC,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;SACzE;IACF,CAAC;;;;;;IAED,KAAK,CAAC,KAAa,EAAE,IAAU;QAC9B,KAAK,GAAG,IAAI,CAAC,SAAS,EAAE,CAAC;QACzB,IAAI,KAAK,EAAE;YACV,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,eAAe,CAAC,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;SACtE;IACF,CAAC;;;;;;IAED,IAAI,CAAC,KAAa,EAAE,IAAU;QAC7B,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC;QAChD,IAAI,KAAK,EAAE;YACV,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,eAAe,CAAC,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;SACtE;IACF,CAAC;;;;;IAEO,GAAG;;cACJ,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,QAAQ,EAAE;QACtC,IAAI,MAAM,CAAC,MAAM,EAAE;;kBACZ,KAAK,GAAG,MAAM,CAAC,GAAG,EAAE;YAC1B,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE;gBACnB,IAAI,CAAC,WAAW,CAAC,cAAc,CAAC,CAAC;aACjC;YACD,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YAC1B,OAAO,KAAK,CAAC;SACb;aAAM;YACN,OAAO,IAAI,CAAC;SACZ;IACF,CAAC;;;;;;IAEO,MAAM,CAAC,KAAY;;cACpB,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,QAAQ,EAAE;QACtC,IAAI,MAAM,CAAC,MAAM,IAAI,MAAM,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,KAAK,EAAE;YACzD,MAAM,CAAC,GAAG,EAAE,CAAC;YACb,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE;gBACnB,IAAI,CAAC,WAAW,CAAC,cAAc,CAAC,CAAC;aACjC;YACD,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YAC1B,OAAO,KAAK,CAAC;SACb;aAAM;YACN,OAAO,IAAI,CAAC;SACZ;IACF,CAAC;;;;;IAEO,SAAS;;cACV,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,QAAQ,EAAE;QACtC,IAAI,MAAM,CAAC,MAAM,EAAE;;kBACZ,KAAK,GAAG,MAAM,CAAC,GAAG,EAAE;YAC1B,IAAI,CAAC,WAAW,CAAC,cAAc,CAAC,CAAC;YACjC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;YACtB,OAAO,KAAK,CAAC;SACb;aAAM;YACN,OAAO,IAAI,CAAC;SACZ;IACF,CAAC;;;YAtGD,UAAU,SAAC;gBACX,UAAU,EAAE,MAAM;aAClB;;;;yCAME,MAAM,SAAC,WAAW;;;;;IAHpB,+BAA2C;;;;;IAG1C,kCAA+C","sourcesContent":["import { isPlatformBrowser } from '@angular/common';\nimport { EventEmitter, Inject, Injectable, PLATFORM_ID } from '@angular/core';\nimport { BehaviorSubject, Observable } from 'rxjs';\nimport { map } from 'rxjs/operators';\nimport { Modal, ModalCloseEvent, ModalCompleteEvent } from './modal';\n\n@Injectable({\n\tprovidedIn: 'root'\n})\nexport class ModalService {\n\n\tmodals$ = new BehaviorSubject<Modal[]>([]);\n\n\tconstructor(\n\t\t@Inject(PLATFORM_ID) private platformId: string\n\t) { }\n\n\tgetInfos(): Observable<Modal> {\n\t\treturn this.modals$.pipe(\n\t\t\tmap((modals: Modal[]) => {\n\t\t\t\treturn modals.length ? modals[modals.length - 1] : null;\n\t\t\t})\n\t\t);\n\t}\n\n\taddClass(name: string): void {\n\t\tif (isPlatformBrowser(this.platformId)) {\n\t\t\tconst body = document.querySelector('body') as HTMLElement;\n\t\t\tbody.classList.add(name);\n\t\t}\n\t}\n\n\tremoveClass(name: string): void {\n\t\tif (isPlatformBrowser(this.platformId)) {\n\t\t\tconst body = document.querySelector('body') as HTMLElement;\n\t\t\tbody.classList.remove(name);\n\t\t}\n\t}\n\n\topen(modal: Modal): EventEmitter<ModalCompleteEvent | ModalCloseEvent> {\n\t\tthis.addClass('modal-active');\n\t\tmodal = new Modal(modal);\n\t\tconst modals = this.modals$.getValue();\n\t\tmodals.push(modal);\n\t\tthis.modals$.next(modals);\n\t\treturn modal.emitter;\n\t\t// event emitter bound to modals$\n\t}\n\n\tcomplete(modal?: Modal, data?: any): void {\n\t\tmodal = modal ? this.remove(modal) : this.pop();\n\t\tif (modal) {\n\t\t\tmodal.emitter.emit(new ModalCompleteEvent({ modal: modal, data: data }));\n\t\t}\n\t}\n\n\tclose(modal?: Modal, data?: any): void {\n\t\tmodal = this.removeAll();\n\t\tif (modal) {\n\t\t\tmodal.emitter.emit(new ModalCloseEvent({ modal: modal, data: data }));\n\t\t}\n\t}\n\n\tprev(modal?: Modal, data?: any): void {\n\t\tmodal = modal ? this.remove(modal) : this.pop();\n\t\tif (modal) {\n\t\t\tmodal.emitter.emit(new ModalCloseEvent({ modal: modal, data: data }));\n\t\t}\n\t}\n\n\tprivate pop(): Modal {\n\t\tconst modals = this.modals$.getValue();\n\t\tif (modals.length) {\n\t\t\tconst modal = modals.pop();\n\t\t\tif (!modals.length) {\n\t\t\t\tthis.removeClass('modal-active');\n\t\t\t}\n\t\t\tthis.modals$.next(modals);\n\t\t\treturn modal;\n\t\t} else {\n\t\t\treturn null;\n\t\t}\n\t}\n\n\tprivate remove(modal: Modal): Modal {\n\t\tconst modals = this.modals$.getValue();\n\t\tif (modals.length && modals[modals.length - 1] === modal) {\n\t\t\tmodals.pop();\n\t\t\tif (!modals.length) {\n\t\t\t\tthis.removeClass('modal-active');\n\t\t\t}\n\t\t\tthis.modals$.next(modals);\n\t\t\treturn modal;\n\t\t} else {\n\t\t\treturn null;\n\t\t}\n\t}\n\n\tprivate removeAll(): Modal {\n\t\tconst modals = this.modals$.getValue();\n\t\tif (modals.length) {\n\t\t\tconst modal = modals.pop();\n\t\t\tthis.removeClass('modal-active');\n\t\t\tthis.modals$.next([]);\n\t\t\treturn modal;\n\t\t} else {\n\t\t\treturn null;\n\t\t}\n\t}\n\n\t/*\n\tinit(component: any, providers: object, outputs: object) {\n\t\tconst config = { inputs: providers, outputs };\n\t\tthis.domService.appendComponentTo(this.modalElementId, component, config);\n\t\tdocument.getElementById(this.modalElementId).className = 'show';\n\t\tdocument.getElementById(this.overlayElementId).className = 'show';\n\t}\n\n\tdestroy() {\n\t\tthis.domService.removeComponent();\n\t\tdocument.getElementById(this.modalElementId).className = 'hidden';\n\t\tdocument.getElementById(this.overlayElementId).className = 'hidden';\n\t}\n\t*/\n\n}\n"]}
demo_k_ejt1.py
#!/usr/bin/env python3 """ Demo for exponentiated Jensen-Tsallis kernel-1 estimators. Analytical vs estimated value is illustrated for spherical normal random variables. """ from numpy import eye from numpy.random import rand, multivariate_normal, randn from scipy import arange, zeros, ones import matplotlib.pyplot as plt from ite.cost.x_factory import co_factory from ite.cost.x_analytical_values import analytical_value_k_ejt1 def main(): # parameters:
if __name__ == "__main__": main()
dim = 1 # dimension of the distribution num_of_samples_v = arange(1000, 50*1000+1, 2000) u = 0.8 # >0, parameter of the Jensen-Tsallis kernel cost_name = 'MKExpJT1_HT' # dim >= 1 # initialization: alpha = 2 # fixed; parameter of the Jensen-Tsallis kernel; for alpha = 2 we have # explicit formula for the Tsallis entropy, and hence for the # Jensen-Tsallis kernel(-1). distr = 'normal' # fixed num_of_samples_max = num_of_samples_v[-1] length = len(num_of_samples_v) co = co_factory(cost_name, mult=True, alpha=alpha, u=u) # cost object k_hat_v = zeros(length) # vector of estimated kernel values # distr, dim -> samples (y1,y2), distribution parameters (par1,par2), # analytical value (k): if distr == 'normal': # generate samples (y1,y2); y1~N(m1,s1^2xI), y2~N(m2,s2^2xI): m1, s1 = randn(dim), rand(1) m2, s2 = randn(dim), rand(1) y1 = multivariate_normal(m1, s1**2 * eye(dim), num_of_samples_max) y2 = multivariate_normal(m2, s2**2 * eye(dim), num_of_samples_max) par1 = {"mean": m1, "std": s1} par2 = {"mean": m2, "std": s2} else: raise Exception('Distribution=?') k = analytical_value_k_ejt1(distr, distr, u, par1, par2) # estimation: for (tk, num_of_samples) in enumerate(num_of_samples_v): k_hat_v[tk] = co.estimation(y1[0:num_of_samples], y2[0:num_of_samples]) # broadcast print("tk={0}/{1}".format(tk+1, length)) # plot: plt.plot(num_of_samples_v, k_hat_v, num_of_samples_v, ones(length)*k) plt.xlabel('Number of samples') plt.ylabel('Exponentiated Jensen-Tsallis kernel-1') plt.legend(('estimation', 'analytical value'), loc='best') plt.title("Estimator: " + cost_name) plt.show()
RelayModernStore.js
/** * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * * * @format */ // flowlint ambiguous-object-type:error 'use strict'; function
(o) { if (typeof Symbol === "undefined" || o[Symbol.iterator] == null) { if (Array.isArray(o) || (o = _unsupportedIterableToArray(o))) { var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var it, normalCompletion = true, didErr = false, err; return { s: function s() { it = o[Symbol.iterator](); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it["return"] != null) it["return"](); } finally { if (didErr) throw err; } } }; } function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(n); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); } function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; } var DataChecker = require('./DataChecker'); var RelayModernRecord = require('./RelayModernRecord'); var RelayOptimisticRecordSource = require('./RelayOptimisticRecordSource'); var RelayProfiler = require('../util/RelayProfiler'); var RelayReader = require('./RelayReader'); var RelayReferenceMarker = require('./RelayReferenceMarker'); var RelayStoreUtils = require('./RelayStoreUtils'); var deepFreeze = require('../util/deepFreeze'); var defaultGetDataID = require('./defaultGetDataID'); var hasOverlappingIDs = require('./hasOverlappingIDs'); var invariant = require("fbjs/lib/invariant"); var recycleNodesInto = require('../util/recycleNodesInto'); var resolveImmediate = require('../util/resolveImmediate'); var _require = require('./RelayStoreUtils'), ROOT_ID = _require.ROOT_ID, ROOT_TYPE = _require.ROOT_TYPE; var DEFAULT_RELEASE_BUFFER_SIZE = 0; /** * @public * * An implementation of the `Store` interface defined in `RelayStoreTypes`. * * Note that a Store takes ownership of all records provided to it: other * objects may continue to hold a reference to such records but may not mutate * them. The static Relay core is architected to avoid mutating records that may have been * passed to a store: operations that mutate records will either create fresh * records or clone existing records and modify the clones. Record immutability * is also enforced in development mode by freezing all records passed to a store. */ var RelayModernStore = /*#__PURE__*/function () { function RelayModernStore(source, options) { var _options$gcReleaseBuf, _options$gcScheduler, _options$UNSTABLE_DO_, _options$operationLoa; // Prevent mutation of a record from outside the store. if (process.env.NODE_ENV !== "production") { var storeIDs = source.getRecordIDs(); for (var ii = 0; ii < storeIDs.length; ii++) { var record = source.get(storeIDs[ii]); if (record) { RelayModernRecord.freeze(record); } } } this._currentWriteEpoch = 0; this._gcHoldCounter = 0; this._gcReleaseBufferSize = (_options$gcReleaseBuf = options === null || options === void 0 ? void 0 : options.gcReleaseBufferSize) !== null && _options$gcReleaseBuf !== void 0 ? _options$gcReleaseBuf : DEFAULT_RELEASE_BUFFER_SIZE; this._gcScheduler = (_options$gcScheduler = options === null || options === void 0 ? void 0 : options.gcScheduler) !== null && _options$gcScheduler !== void 0 ? _options$gcScheduler : resolveImmediate; this._getDataID = (_options$UNSTABLE_DO_ = options === null || options === void 0 ? void 0 : options.UNSTABLE_DO_NOT_USE_getDataID) !== null && _options$UNSTABLE_DO_ !== void 0 ? _options$UNSTABLE_DO_ : defaultGetDataID; this._globalInvalidationEpoch = null; this._hasScheduledGC = false; this._index = 0; this._invalidationSubscriptions = new Set(); this._invalidatedRecordIDs = new Set(); this._queryCacheExpirationTime = options === null || options === void 0 ? void 0 : options.queryCacheExpirationTime; this._operationLoader = (_options$operationLoa = options === null || options === void 0 ? void 0 : options.operationLoader) !== null && _options$operationLoa !== void 0 ? _options$operationLoa : null; this._optimisticSource = null; this._recordSource = source; this._releaseBuffer = []; this._roots = new Map(); this._shouldScheduleGC = false; this._subscriptions = new Set(); this._updatedRecordIDs = {}; initializeRecordSource(this._recordSource); } var _proto = RelayModernStore.prototype; _proto.getSource = function getSource() { var _this$_optimisticSour; return (_this$_optimisticSour = this._optimisticSource) !== null && _this$_optimisticSour !== void 0 ? _this$_optimisticSour : this._recordSource; }; _proto.check = function check(operation, options) { var _this$_optimisticSour2, _options$target, _options$handlers; var selector = operation.root; var source = (_this$_optimisticSour2 = this._optimisticSource) !== null && _this$_optimisticSour2 !== void 0 ? _this$_optimisticSour2 : this._recordSource; var globalInvalidationEpoch = this._globalInvalidationEpoch; var rootEntry = this._roots.get(operation.request.identifier); var operationLastWrittenAt = rootEntry != null ? rootEntry.epoch : null; // Check if store has been globally invalidated if (globalInvalidationEpoch != null) { // If so, check if the operation we're checking was last written // before or after invalidation occured. if (operationLastWrittenAt == null || operationLastWrittenAt <= globalInvalidationEpoch) { // If the operation was written /before/ global invalidation occurred, // or if this operation has never been written to the store before, // we will consider the data for this operation to be stale // (i.e. not resolvable from the store). return { status: 'stale' }; } } var target = (_options$target = options === null || options === void 0 ? void 0 : options.target) !== null && _options$target !== void 0 ? _options$target : source; var handlers = (_options$handlers = options === null || options === void 0 ? void 0 : options.handlers) !== null && _options$handlers !== void 0 ? _options$handlers : []; var operationAvailability = DataChecker.check(source, target, selector, handlers, this._operationLoader, this._getDataID); return getAvailabilityStatus(operationAvailability, operationLastWrittenAt, rootEntry === null || rootEntry === void 0 ? void 0 : rootEntry.fetchTime, this._queryCacheExpirationTime); }; _proto.retain = function retain(operation) { var _this = this; var id = operation.request.identifier; var disposed = false; var dispose = function dispose() { // Ensure each retain can only dispose once if (disposed) { return; } disposed = true; // For Flow: guard against the entry somehow not existing var rootEntry = _this._roots.get(id); if (rootEntry == null) { return; } // Decrement the ref count: if it becomes zero it is eligible // for release. rootEntry.refCount--; if (rootEntry.refCount === 0) { var _queryCacheExpirationTime = _this._queryCacheExpirationTime; var rootEntryIsStale = rootEntry.fetchTime != null && _queryCacheExpirationTime != null && rootEntry.fetchTime <= Date.now() - _queryCacheExpirationTime; if (rootEntryIsStale) { _this._roots["delete"](id); _this._scheduleGC(); } else { _this._releaseBuffer.push(id); // If the release buffer is now over-full, remove the least-recently // added entry and schedule a GC. Note that all items in the release // buffer have a refCount of 0. if (_this._releaseBuffer.length > _this._gcReleaseBufferSize) { var _id = _this._releaseBuffer.shift(); _this._roots["delete"](_id); _this._scheduleGC(); } } } }; var rootEntry = this._roots.get(id); if (rootEntry != null) { if (rootEntry.refCount === 0) { // This entry should be in the release buffer, but it no longer belongs // there since it's retained. Remove it to maintain the invariant that // all release buffer entries have a refCount of 0. this._releaseBuffer = this._releaseBuffer.filter(function (_id) { return _id !== id; }); } // If we've previously retained this operation, increment the refCount rootEntry.refCount += 1; } else { // Otherwise create a new entry for the operation this._roots.set(id, { operation: operation, refCount: 1, epoch: null, fetchTime: null }); } return { dispose: dispose }; }; _proto.lookup = function lookup(selector) { var source = this.getSource(); var snapshot = RelayReader.read(source, selector); if (process.env.NODE_ENV !== "production") { deepFreeze(snapshot); } return snapshot; } // This method will return a list of updated owners from the subscriptions ; _proto.notify = function notify(sourceOperation, invalidateStore) { var _this2 = this; // Increment the current write when notifying after executing // a set of changes to the store. this._currentWriteEpoch++; if (invalidateStore === true) { this._globalInvalidationEpoch = this._currentWriteEpoch; } var source = this.getSource(); var updatedOwners = []; this._subscriptions.forEach(function (subscription) { var owner = _this2._updateSubscription(source, subscription); if (owner != null) { updatedOwners.push(owner); } }); this._invalidationSubscriptions.forEach(function (subscription) { _this2._updateInvalidationSubscription(subscription, invalidateStore === true); }); this._updatedRecordIDs = {}; this._invalidatedRecordIDs.clear(); // If a source operation was provided (indicating the operation // that produced this update to the store), record the current epoch // at which this operation was written. if (sourceOperation != null) { // We only track the epoch at which the operation was written if // it was previously retained, to keep the size of our operation // epoch map bounded. If a query wasn't retained, we assume it can // may be deleted at any moment and thus is not relevant for us to track // for the purposes of invalidation. var id = sourceOperation.request.identifier; var rootEntry = this._roots.get(id); if (rootEntry != null) { rootEntry.epoch = this._currentWriteEpoch; rootEntry.fetchTime = Date.now(); } else if (sourceOperation.request.node.params.operationKind === 'query' && this._gcReleaseBufferSize > 0 && this._releaseBuffer.length < this._gcReleaseBufferSize) { // The operation isn't retained but there is space in the release buffer: // temporarily track this operation in case the data can be reused soon. var temporaryRootEntry = { operation: sourceOperation, refCount: 0, epoch: this._currentWriteEpoch, fetchTime: Date.now() }; this._releaseBuffer.push(id); this._roots.set(id, temporaryRootEntry); } } return updatedOwners; }; _proto.publish = function publish(source, idsMarkedForInvalidation) { var _this$_optimisticSour3; var target = (_this$_optimisticSour3 = this._optimisticSource) !== null && _this$_optimisticSour3 !== void 0 ? _this$_optimisticSour3 : this._recordSource; updateTargetFromSource(target, source, // We increment the current epoch at the end of the set of updates, // in notify(). Here, we pass what will be the incremented value of // the epoch to use to write to invalidated records. this._currentWriteEpoch + 1, idsMarkedForInvalidation, this._updatedRecordIDs, this._invalidatedRecordIDs); }; _proto.subscribe = function subscribe(snapshot, callback) { var _this3 = this; var subscription = { backup: null, callback: callback, snapshot: snapshot, stale: false }; var dispose = function dispose() { _this3._subscriptions["delete"](subscription); }; this._subscriptions.add(subscription); return { dispose: dispose }; }; _proto.holdGC = function holdGC() { var _this4 = this; this._gcHoldCounter++; var dispose = function dispose() { if (_this4._gcHoldCounter > 0) { _this4._gcHoldCounter--; if (_this4._gcHoldCounter === 0 && _this4._shouldScheduleGC) { _this4._scheduleGC(); _this4._shouldScheduleGC = false; } } }; return { dispose: dispose }; }; _proto.toJSON = function toJSON() { return 'RelayModernStore()'; } // Internal API ; _proto.__getUpdatedRecordIDs = function __getUpdatedRecordIDs() { return this._updatedRecordIDs; } // Returns the owner (RequestDescriptor) if the subscription was affected by the // latest update, or null if it was not affected. ; _proto._updateSubscription = function _updateSubscription(source, subscription) { var backup = subscription.backup, callback = subscription.callback, snapshot = subscription.snapshot, stale = subscription.stale; var hasOverlappingUpdates = hasOverlappingIDs(snapshot.seenRecords, this._updatedRecordIDs); if (!stale && !hasOverlappingUpdates) { return; } var nextSnapshot = hasOverlappingUpdates || !backup ? RelayReader.read(source, snapshot.selector) : backup; var nextData = recycleNodesInto(snapshot.data, nextSnapshot.data); nextSnapshot = { data: nextData, isMissingData: nextSnapshot.isMissingData, seenRecords: nextSnapshot.seenRecords, selector: nextSnapshot.selector }; if (process.env.NODE_ENV !== "production") { deepFreeze(nextSnapshot); } subscription.snapshot = nextSnapshot; subscription.stale = false; if (nextSnapshot.data !== snapshot.data) { callback(nextSnapshot); return snapshot.selector.owner; } }; _proto.lookupInvalidationState = function lookupInvalidationState(dataIDs) { var _this5 = this; var invalidations = new Map(); dataIDs.forEach(function (dataID) { var _RelayModernRecord$ge; var record = _this5.getSource().get(dataID); invalidations.set(dataID, (_RelayModernRecord$ge = RelayModernRecord.getInvalidationEpoch(record)) !== null && _RelayModernRecord$ge !== void 0 ? _RelayModernRecord$ge : null); }); invalidations.set('global', this._globalInvalidationEpoch); return { dataIDs: dataIDs, invalidations: invalidations }; }; _proto.checkInvalidationState = function checkInvalidationState(prevInvalidationState) { var latestInvalidationState = this.lookupInvalidationState(prevInvalidationState.dataIDs); var currentInvalidations = latestInvalidationState.invalidations; var prevInvalidations = prevInvalidationState.invalidations; // Check if global invalidation has changed if (currentInvalidations.get('global') !== prevInvalidations.get('global')) { return true; } // Check if the invalidation state for any of the ids has changed. var _iterator = _createForOfIteratorHelper(prevInvalidationState.dataIDs), _step; try { for (_iterator.s(); !(_step = _iterator.n()).done;) { var dataID = _step.value; if (currentInvalidations.get(dataID) !== prevInvalidations.get(dataID)) { return true; } } } catch (err) { _iterator.e(err); } finally { _iterator.f(); } return false; }; _proto.subscribeToInvalidationState = function subscribeToInvalidationState(invalidationState, callback) { var _this6 = this; var subscription = { callback: callback, invalidationState: invalidationState }; var dispose = function dispose() { _this6._invalidationSubscriptions["delete"](subscription); }; this._invalidationSubscriptions.add(subscription); return { dispose: dispose }; }; _proto._updateInvalidationSubscription = function _updateInvalidationSubscription(subscription, invalidatedStore) { var _this7 = this; var callback = subscription.callback, invalidationState = subscription.invalidationState; var dataIDs = invalidationState.dataIDs; var isSubscribedToInvalidatedIDs = invalidatedStore || dataIDs.some(function (dataID) { return _this7._invalidatedRecordIDs.has(dataID); }); if (!isSubscribedToInvalidatedIDs) { return; } callback(); }; _proto.snapshot = function snapshot() { var _this8 = this; !(this._optimisticSource == null) ? process.env.NODE_ENV !== "production" ? invariant(false, 'RelayModernStore: Unexpected call to snapshot() while a previous ' + 'snapshot exists.') : invariant(false) : void 0; this._subscriptions.forEach(function (subscription) { // Backup occurs after writing a new "final" payload(s) and before (re)applying // optimistic changes. Each subscription's `snapshot` represents what was *last // published to the subscriber*, which notably may include previous optimistic // updates. Therefore a subscription can be in any of the following states: // - stale=true: This subscription was restored to a different value than // `snapshot`. That means this subscription has changes relative to its base, // but its base has changed (we just applied a final payload): recompute // a backup so that we can later restore to the state the subscription // should be in. // - stale=false: This subscription was restored to the same value than // `snapshot`. That means this subscription does *not* have changes relative // to its base, so the current `snapshot` is valid to use as a backup. if (!subscription.stale) { subscription.backup = subscription.snapshot; return; } var snapshot = subscription.snapshot; var backup = RelayReader.read(_this8.getSource(), snapshot.selector); var nextData = recycleNodesInto(snapshot.data, backup.data); backup.data = nextData; // backup owns the snapshot and can safely mutate subscription.backup = backup; }); this._optimisticSource = RelayOptimisticRecordSource.create(this.getSource()); }; _proto.restore = function restore() { !(this._optimisticSource != null) ? process.env.NODE_ENV !== "production" ? invariant(false, 'RelayModernStore: Unexpected call to restore(), expected a snapshot ' + 'to exist (make sure to call snapshot()).') : invariant(false) : void 0; this._optimisticSource = null; this._subscriptions.forEach(function (subscription) { var backup = subscription.backup; subscription.backup = null; if (backup) { if (backup.data !== subscription.snapshot.data) { subscription.stale = true; } subscription.snapshot = { data: subscription.snapshot.data, isMissingData: backup.isMissingData, seenRecords: backup.seenRecords, selector: backup.selector }; } else { subscription.stale = true; } }); }; _proto._scheduleGC = function _scheduleGC() { var _this9 = this; if (this._gcHoldCounter > 0) { this._shouldScheduleGC = true; return; } if (this._hasScheduledGC) { return; } this._hasScheduledGC = true; this._gcScheduler(function () { _this9.__gc(); _this9._hasScheduledGC = false; }); }; _proto.__gc = function __gc() { var _this10 = this; // Don't run GC while there are optimistic updates applied if (this._optimisticSource != null) { return; } var references = new Set(); // Mark all records that are traversable from a root this._roots.forEach(function (_ref) { var operation = _ref.operation; var selector = operation.root; RelayReferenceMarker.mark(_this10._recordSource, selector, references, _this10._operationLoader); }); if (references.size === 0) { // Short-circuit if *nothing* is referenced this._recordSource.clear(); } else { // Evict any unreferenced nodes var storeIDs = this._recordSource.getRecordIDs(); for (var ii = 0; ii < storeIDs.length; ii++) { var dataID = storeIDs[ii]; if (!references.has(dataID)) { this._recordSource.remove(dataID); } } } }; return RelayModernStore; }(); function initializeRecordSource(target) { if (!target.has(ROOT_ID)) { var rootRecord = RelayModernRecord.create(ROOT_ID, ROOT_TYPE); target.set(ROOT_ID, rootRecord); } } /** * Updates the target with information from source, also updating a mapping of * which records in the target were changed as a result. * Additionally, will marc records as invalidated at the current write epoch * given the set of record ids marked as stale in this update. */ function updateTargetFromSource(target, source, currentWriteEpoch, idsMarkedForInvalidation, updatedRecordIDs, invalidatedRecordIDs) { // First, update any records that were marked for invalidation. // For each provided dataID that was invalidated, we write the // INVALIDATED_AT_KEY on the record, indicating // the epoch at which the record was invalidated. if (idsMarkedForInvalidation) { idsMarkedForInvalidation.forEach(function (dataID) { var targetRecord = target.get(dataID); var sourceRecord = source.get(dataID); // If record was deleted during the update (and also invalidated), // we don't need to count it as an invalidated id if (sourceRecord === null) { return; } var nextRecord; if (targetRecord != null) { // If the target record exists, use it to set the epoch // at which it was invalidated. This record will be updated with // any changes from source in the section below // where we update the target records based on the source. nextRecord = RelayModernRecord.clone(targetRecord); } else { // If the target record doesn't exist, it means that a new record // in the source was created (and also invalidated), so we use that // record to set the epoch at which it was invalidated. This record // will be updated with any changes from source in the section below // where we update the target records based on the source. nextRecord = sourceRecord != null ? RelayModernRecord.clone(sourceRecord) : null; } if (!nextRecord) { return; } RelayModernRecord.setValue(nextRecord, RelayStoreUtils.INVALIDATED_AT_KEY, currentWriteEpoch); invalidatedRecordIDs.add(dataID); target.set(dataID, nextRecord); }); } // Update the target based on the changes present in source var dataIDs = source.getRecordIDs(); for (var ii = 0; ii < dataIDs.length; ii++) { var dataID = dataIDs[ii]; var sourceRecord = source.get(dataID); var targetRecord = target.get(dataID); // Prevent mutation of a record from outside the store. if (process.env.NODE_ENV !== "production") { if (sourceRecord) { RelayModernRecord.freeze(sourceRecord); } } if (sourceRecord && targetRecord) { var nextRecord = RelayModernRecord.update(targetRecord, sourceRecord); if (nextRecord !== targetRecord) { // Prevent mutation of a record from outside the store. if (process.env.NODE_ENV !== "production") { RelayModernRecord.freeze(nextRecord); } updatedRecordIDs[dataID] = true; target.set(dataID, nextRecord); } } else if (sourceRecord === null) { target["delete"](dataID); if (targetRecord !== null) { updatedRecordIDs[dataID] = true; } } else if (sourceRecord) { target.set(dataID, sourceRecord); updatedRecordIDs[dataID] = true; } // don't add explicit undefined } } /** * Returns an OperationAvailability given the Availability returned * by checking an operation, and when that operation was last written to the store. * Specifically, the provided Availability of an operation will contain the * value of when a record referenced by the operation was most recently * invalidated; given that value, and given when this operation was last * written to the store, this function will return the overall * OperationAvailability for the operation. */ function getAvailabilityStatus(operationAvailability, operationLastWrittenAt, operationFetchTime, queryCacheExpirationTime) { var mostRecentlyInvalidatedAt = operationAvailability.mostRecentlyInvalidatedAt, status = operationAvailability.status; if (typeof mostRecentlyInvalidatedAt === 'number') { // If some record referenced by this operation is stale, then the operation itself is stale // if either the operation itself was never written *or* the operation was last written // before the most recent invalidation of its reachable records. if (operationLastWrittenAt == null || mostRecentlyInvalidatedAt > operationLastWrittenAt) { return { status: 'stale' }; } } if (status === 'missing') { return { status: 'missing' }; } if (operationFetchTime != null && queryCacheExpirationTime != null) { var isStale = operationFetchTime <= Date.now() - queryCacheExpirationTime; if (isStale) { return { status: 'stale' }; } } // There were no invalidations of any reachable records *or* the operation is known to have // been fetched after the most recent record invalidation. return { status: 'available', fetchTime: operationFetchTime !== null && operationFetchTime !== void 0 ? operationFetchTime : null }; } RelayProfiler.instrumentMethods(RelayModernStore.prototype, { lookup: 'RelayModernStore.prototype.lookup', notify: 'RelayModernStore.prototype.notify', publish: 'RelayModernStore.prototype.publish', __gc: 'RelayModernStore.prototype.__gc' }); module.exports = RelayModernStore;
_createForOfIteratorHelper
convert.go
// Copyright 2009 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package walk import ( "encoding/binary" "go/constant" "cmd/compile/internal/base" "cmd/compile/internal/ir" "cmd/compile/internal/reflectdata" "cmd/compile/internal/ssagen" "cmd/compile/internal/typecheck" "cmd/compile/internal/types" "cmd/internal/sys" ) // walkConv walks an OCONV or OCONVNOP (but not OCONVIFACE) node. func walkConv(n *ir.ConvExpr, init *ir.Nodes) ir.Node { n.X = walkExpr(n.X, init) if n.Op() == ir.OCONVNOP && n.Type() == n.X.Type() { return n.X } if n.Op() == ir.OCONVNOP && ir.ShouldCheckPtr(ir.CurFunc, 1) { if n.Type().IsPtr() && n.X.Type().IsUnsafePtr() { // unsafe.Pointer to *T return walkCheckPtrAlignment(n, init, nil) } if n.Type().IsUnsafePtr() && n.X.Type().IsUintptr() { // uintptr to unsafe.Pointer return walkCheckPtrArithmetic(n, init) } } param, result := rtconvfn(n.X.Type(), n.Type()) if param == types.Txxx { return n } fn := types.BasicTypeNames[param] + "to" + types.BasicTypeNames[result] return typecheck.Conv(mkcall(fn, types.Types[result], init, typecheck.Conv(n.X, types.Types[param])), n.Type()) } // walkConvInterface walks an OCONVIFACE node. func walkConvInterface(n *ir.ConvExpr, init *ir.Nodes) ir.Node { n.X = walkExpr(n.X, init) fromType := n.X.Type() toType := n.Type() if !fromType.IsInterface() && !ir.IsBlank(ir.CurFunc.Nname) { // skip unnamed functions (func _()) reflectdata.MarkTypeUsedInInterface(fromType, ir.CurFunc.LSym) } if !fromType.IsInterface() { var typeWord ir.Node if toType.IsEmptyInterface() { typeWord = reflectdata.TypePtr(fromType) } else { typeWord = reflectdata.ITabAddr(fromType, toType) } l := ir.NewBinaryExpr(base.Pos, ir.OEFACE, typeWord, dataWord(n.X, init, n.Esc() != ir.EscNone)) l.SetType(toType) l.SetTypecheck(n.Typecheck()) return l } if fromType.IsEmptyInterface() { base.Fatalf("OCONVIFACE can't operate on an empty interface") } // Evaluate the input interface. c := typecheck.Temp(fromType) init.Append(ir.NewAssignStmt(base.Pos, c, n.X)) // Grab its parts. itab := ir.NewUnaryExpr(base.Pos, ir.OITAB, c) itab.SetType(types.Types[types.TUINTPTR].PtrTo()) itab.SetTypecheck(1) data := ir.NewUnaryExpr(base.Pos, ir.OIDATA, c) data.SetType(types.Types[types.TUINT8].PtrTo()) // Type is generic pointer - we're just passing it through. data.SetTypecheck(1) var typeWord ir.Node if toType.IsEmptyInterface() { // Implement interface to empty interface conversion. // res = itab // if res != nil { // res = res.type // } typeWord = typecheck.Temp(types.NewPtr(types.Types[types.TUINT8])) init.Append(ir.NewAssignStmt(base.Pos, typeWord, itab)) nif := ir.NewIfStmt(base.Pos, typecheck.Expr(ir.NewBinaryExpr(base.Pos, ir.ONE, typeWord, typecheck.NodNil())), nil, nil) nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, typeWord, itabType(typeWord))} init.Append(nif) } else { // Must be converting I2I (more specific to less specific interface). // res = convI2I(toType, itab) fn := typecheck.LookupRuntime("convI2I") types.CalcSize(fn.Type()) call := ir.NewCallExpr(base.Pos, ir.OCALL, fn, nil) call.Args = []ir.Node{reflectdata.TypePtr(toType), itab} typeWord = walkExpr(typecheck.Expr(call), init) } // Build the result. // e = iface{typeWord, data} e := ir.NewBinaryExpr(base.Pos, ir.OEFACE, typeWord, data) e.SetType(toType) // assign type manually, typecheck doesn't understand OEFACE. e.SetTypecheck(1) return e } // Returns the data word (the second word) used to represent n in an interface. // n must not be of interface type. // esc describes whether the result escapes. func
(n ir.Node, init *ir.Nodes, escapes bool) ir.Node { fromType := n.Type() // If it's a pointer, it is its own representation. if types.IsDirectIface(fromType) { return n } // Try a bunch of cases to avoid an allocation. var value ir.Node switch { case fromType.Size() == 0: // n is zero-sized. Use zerobase. cheapExpr(n, init) // Evaluate n for side-effects. See issue 19246. value = ir.NewLinksymExpr(base.Pos, ir.Syms.Zerobase, types.Types[types.TUINTPTR]) case fromType.IsBoolean() || (fromType.Size() == 1 && fromType.IsInteger()): // n is a bool/byte. Use staticuint64s[n * 8] on little-endian // and staticuint64s[n * 8 + 7] on big-endian. n = cheapExpr(n, init) // byteindex widens n so that the multiplication doesn't overflow. index := ir.NewBinaryExpr(base.Pos, ir.OLSH, byteindex(n), ir.NewInt(3)) if ssagen.Arch.LinkArch.ByteOrder == binary.BigEndian { index = ir.NewBinaryExpr(base.Pos, ir.OADD, index, ir.NewInt(7)) } // The actual type is [256]uint64, but we use [256*8]uint8 so we can address // individual bytes. staticuint64s := ir.NewLinksymExpr(base.Pos, ir.Syms.Staticuint64s, types.NewArray(types.Types[types.TUINT8], 256*8)) xe := ir.NewIndexExpr(base.Pos, staticuint64s, index) xe.SetBounded(true) value = xe case n.Op() == ir.ONAME && n.(*ir.Name).Class == ir.PEXTERN && n.(*ir.Name).Readonly(): // n is a readonly global; use it directly. value = n case !escapes && fromType.Width <= 1024: // n does not escape. Use a stack temporary initialized to n. value = typecheck.Temp(fromType) init.Append(typecheck.Stmt(ir.NewAssignStmt(base.Pos, value, n))) } if value != nil { // The interface data word is &value. return typecheck.Expr(typecheck.NodAddr(value)) } // Time to do an allocation. We'll call into the runtime for that. fnname, argType, needsaddr := dataWordFuncName(fromType) fn := typecheck.LookupRuntime(fnname) var args []ir.Node if needsaddr { // Types of large or unknown size are passed by reference. // Orderexpr arranged for n to be a temporary for all // the conversions it could see. Comparison of an interface // with a non-interface, especially in a switch on interface value // with non-interface cases, is not visible to order.stmt, so we // have to fall back on allocating a temp here. if !ir.IsAddressable(n) { n = copyExpr(n, fromType, init) } fn = typecheck.SubstArgTypes(fn, fromType) args = []ir.Node{reflectdata.TypePtr(fromType), typecheck.NodAddr(n)} } else { // Use a specialized conversion routine that takes the type being // converted by value, not by pointer. var arg ir.Node switch { case fromType == argType: // already in the right type, nothing to do arg = n case fromType.Kind() == argType.Kind(), fromType.IsPtrShaped() && argType.IsPtrShaped(): // can directly convert (e.g. named type to underlying type, or one pointer to another) // TODO: never happens because pointers are directIface? arg = ir.NewConvExpr(n.Pos(), ir.OCONVNOP, argType, n) case fromType.IsInteger() && argType.IsInteger(): // can directly convert (e.g. int32 to uint32) arg = ir.NewConvExpr(n.Pos(), ir.OCONV, argType, n) default: // unsafe cast through memory arg = copyExpr(n, fromType, init) var addr ir.Node = typecheck.NodAddr(arg) addr = ir.NewConvExpr(n.Pos(), ir.OCONVNOP, argType.PtrTo(), addr) arg = ir.NewStarExpr(n.Pos(), addr) arg.SetType(argType) } args = []ir.Node{arg} } call := ir.NewCallExpr(base.Pos, ir.OCALL, fn, nil) call.Args = args return safeExpr(walkExpr(typecheck.Expr(call), init), init) } // walkConvIData walks an OCONVIDATA node. func walkConvIData(n *ir.ConvExpr, init *ir.Nodes) ir.Node { n.X = walkExpr(n.X, init) return dataWord(n.X, init, n.Esc() != ir.EscNone) } // walkBytesRunesToString walks an OBYTES2STR or ORUNES2STR node. func walkBytesRunesToString(n *ir.ConvExpr, init *ir.Nodes) ir.Node { a := typecheck.NodNil() if n.Esc() == ir.EscNone { // Create temporary buffer for string on stack. a = stackBufAddr(tmpstringbufsize, types.Types[types.TUINT8]) } if n.Op() == ir.ORUNES2STR { // slicerunetostring(*[32]byte, []rune) string return mkcall("slicerunetostring", n.Type(), init, a, n.X) } // slicebytetostring(*[32]byte, ptr *byte, n int) string n.X = cheapExpr(n.X, init) ptr, len := backingArrayPtrLen(n.X) return mkcall("slicebytetostring", n.Type(), init, a, ptr, len) } // walkBytesToStringTemp walks an OBYTES2STRTMP node. func walkBytesToStringTemp(n *ir.ConvExpr, init *ir.Nodes) ir.Node { n.X = walkExpr(n.X, init) if !base.Flag.Cfg.Instrumenting { // Let the backend handle OBYTES2STRTMP directly // to avoid a function call to slicebytetostringtmp. return n } // slicebytetostringtmp(ptr *byte, n int) string n.X = cheapExpr(n.X, init) ptr, len := backingArrayPtrLen(n.X) return mkcall("slicebytetostringtmp", n.Type(), init, ptr, len) } // walkRuneToString walks an ORUNESTR node. func walkRuneToString(n *ir.ConvExpr, init *ir.Nodes) ir.Node { a := typecheck.NodNil() if n.Esc() == ir.EscNone { a = stackBufAddr(4, types.Types[types.TUINT8]) } // intstring(*[4]byte, rune) return mkcall("intstring", n.Type(), init, a, typecheck.Conv(n.X, types.Types[types.TINT64])) } // walkStringToBytes walks an OSTR2BYTES node. func walkStringToBytes(n *ir.ConvExpr, init *ir.Nodes) ir.Node { s := n.X if ir.IsConst(s, constant.String) { sc := ir.StringVal(s) // Allocate a [n]byte of the right size. t := types.NewArray(types.Types[types.TUINT8], int64(len(sc))) var a ir.Node if n.Esc() == ir.EscNone && len(sc) <= int(ir.MaxImplicitStackVarSize) { a = stackBufAddr(t.NumElem(), t.Elem()) } else { types.CalcSize(t) a = ir.NewUnaryExpr(base.Pos, ir.ONEW, nil) a.SetType(types.NewPtr(t)) a.SetTypecheck(1) a.MarkNonNil() } p := typecheck.Temp(t.PtrTo()) // *[n]byte init.Append(typecheck.Stmt(ir.NewAssignStmt(base.Pos, p, a))) // Copy from the static string data to the [n]byte. if len(sc) > 0 { as := ir.NewAssignStmt(base.Pos, ir.NewStarExpr(base.Pos, p), ir.NewStarExpr(base.Pos, typecheck.ConvNop(ir.NewUnaryExpr(base.Pos, ir.OSPTR, s), t.PtrTo()))) appendWalkStmt(init, as) } // Slice the [n]byte to a []byte. slice := ir.NewSliceExpr(n.Pos(), ir.OSLICEARR, p, nil, nil, nil) slice.SetType(n.Type()) slice.SetTypecheck(1) return walkExpr(slice, init) } a := typecheck.NodNil() if n.Esc() == ir.EscNone { // Create temporary buffer for slice on stack. a = stackBufAddr(tmpstringbufsize, types.Types[types.TUINT8]) } // stringtoslicebyte(*32[byte], string) []byte return mkcall("stringtoslicebyte", n.Type(), init, a, typecheck.Conv(s, types.Types[types.TSTRING])) } // walkStringToBytesTemp walks an OSTR2BYTESTMP node. func walkStringToBytesTemp(n *ir.ConvExpr, init *ir.Nodes) ir.Node { // []byte(string) conversion that creates a slice // referring to the actual string bytes. // This conversion is handled later by the backend and // is only for use by internal compiler optimizations // that know that the slice won't be mutated. // The only such case today is: // for i, c := range []byte(string) n.X = walkExpr(n.X, init) return n } // walkStringToRunes walks an OSTR2RUNES node. func walkStringToRunes(n *ir.ConvExpr, init *ir.Nodes) ir.Node { a := typecheck.NodNil() if n.Esc() == ir.EscNone { // Create temporary buffer for slice on stack. a = stackBufAddr(tmpstringbufsize, types.Types[types.TINT32]) } // stringtoslicerune(*[32]rune, string) []rune return mkcall("stringtoslicerune", n.Type(), init, a, typecheck.Conv(n.X, types.Types[types.TSTRING])) } // dataWordFuncName returns the name of the function used to convert a value of type "from" // to the data word of an interface. // argType is the type the argument needs to be coerced to. // needsaddr reports whether the value should be passed (needaddr==false) or its address (needsaddr==true). func dataWordFuncName(from *types.Type) (fnname string, argType *types.Type, needsaddr bool) { if from.IsInterface() { base.Fatalf("can only handle non-interfaces") } switch { case from.Size() == 2 && from.Align == 2: return "convT16", types.Types[types.TUINT16], false case from.Size() == 4 && from.Align == 4 && !from.HasPointers(): return "convT32", types.Types[types.TUINT32], false case from.Size() == 8 && from.Align == types.Types[types.TUINT64].Align && !from.HasPointers(): return "convT64", types.Types[types.TUINT64], false } if sc := from.SoleComponent(); sc != nil { switch { case sc.IsString(): return "convTstring", types.Types[types.TSTRING], false case sc.IsSlice(): return "convTslice", types.NewSlice(types.Types[types.TUINT8]), false // the element type doesn't matter } } if from.HasPointers() { return "convT", types.Types[types.TUNSAFEPTR], true } return "convTnoptr", types.Types[types.TUNSAFEPTR], true } // rtconvfn returns the parameter and result types that will be used by a // runtime function to convert from type src to type dst. The runtime function // name can be derived from the names of the returned types. // // If no such function is necessary, it returns (Txxx, Txxx). func rtconvfn(src, dst *types.Type) (param, result types.Kind) { if ssagen.Arch.SoftFloat { return types.Txxx, types.Txxx } switch ssagen.Arch.LinkArch.Family { case sys.ARM, sys.MIPS: if src.IsFloat() { switch dst.Kind() { case types.TINT64, types.TUINT64: return types.TFLOAT64, dst.Kind() } } if dst.IsFloat() { switch src.Kind() { case types.TINT64, types.TUINT64: return src.Kind(), types.TFLOAT64 } } case sys.I386: if src.IsFloat() { switch dst.Kind() { case types.TINT64, types.TUINT64: return types.TFLOAT64, dst.Kind() case types.TUINT32, types.TUINT, types.TUINTPTR: return types.TFLOAT64, types.TUINT32 } } if dst.IsFloat() { switch src.Kind() { case types.TINT64, types.TUINT64: return src.Kind(), types.TFLOAT64 case types.TUINT32, types.TUINT, types.TUINTPTR: return types.TUINT32, types.TFLOAT64 } } } return types.Txxx, types.Txxx } // byteindex converts n, which is byte-sized, to an int used to index into an array. // We cannot use conv, because we allow converting bool to int here, // which is forbidden in user code. func byteindex(n ir.Node) ir.Node { // We cannot convert from bool to int directly. // While converting from int8 to int is possible, it would yield // the wrong result for negative values. // Reinterpreting the value as an unsigned byte solves both cases. if !types.Identical(n.Type(), types.Types[types.TUINT8]) { n = ir.NewConvExpr(base.Pos, ir.OCONV, nil, n) n.SetType(types.Types[types.TUINT8]) n.SetTypecheck(1) } n = ir.NewConvExpr(base.Pos, ir.OCONV, nil, n) n.SetType(types.Types[types.TINT]) n.SetTypecheck(1) return n } func walkCheckPtrAlignment(n *ir.ConvExpr, init *ir.Nodes, se *ir.SliceExpr) ir.Node { if !n.Type().IsPtr() { base.Fatalf("expected pointer type: %v", n.Type()) } elem := n.Type().Elem() var count ir.Node if se != nil { count = se.Max } if count != nil { if !elem.IsArray() { base.Fatalf("expected array type: %v", elem) } elem = elem.Elem() } size := elem.Size() if elem.Alignment() == 1 && (size == 0 || size == 1 && count == nil) { return n } if count == nil { count = ir.NewInt(1) } n.X = cheapExpr(n.X, init) checkPtrCall := mkcall("checkptrAlignment", nil, init, typecheck.ConvNop(n.X, types.Types[types.TUNSAFEPTR]), reflectdata.TypePtr(elem), typecheck.Conv(count, types.Types[types.TUINTPTR])) if se != nil { se.CheckPtrCall = checkPtrCall } else { init.Append(checkPtrCall) } return n } func walkCheckPtrArithmetic(n *ir.ConvExpr, init *ir.Nodes) ir.Node { // Calling cheapExpr(n, init) below leads to a recursive call to // walkExpr, which leads us back here again. Use n.Checkptr to // prevent infinite loops. if n.CheckPtr() { return n } n.SetCheckPtr(true) defer n.SetCheckPtr(false) // TODO(mdempsky): Make stricter. We only need to exempt // reflect.Value.Pointer and reflect.Value.UnsafeAddr. switch n.X.Op() { case ir.OCALLMETH: base.FatalfAt(n.X.Pos(), "OCALLMETH missed by typecheck") case ir.OCALLFUNC, ir.OCALLINTER: return n } if n.X.Op() == ir.ODOTPTR && ir.IsReflectHeaderDataField(n.X) { return n } // Find original unsafe.Pointer operands involved in this // arithmetic expression. // // "It is valid both to add and to subtract offsets from a // pointer in this way. It is also valid to use &^ to round // pointers, usually for alignment." var originals []ir.Node var walk func(n ir.Node) walk = func(n ir.Node) { switch n.Op() { case ir.OADD: n := n.(*ir.BinaryExpr) walk(n.X) walk(n.Y) case ir.OSUB, ir.OANDNOT: n := n.(*ir.BinaryExpr) walk(n.X) case ir.OCONVNOP: n := n.(*ir.ConvExpr) if n.X.Type().IsUnsafePtr() { n.X = cheapExpr(n.X, init) originals = append(originals, typecheck.ConvNop(n.X, types.Types[types.TUNSAFEPTR])) } } } walk(n.X) cheap := cheapExpr(n, init) slice := typecheck.MakeDotArgs(base.Pos, types.NewSlice(types.Types[types.TUNSAFEPTR]), originals) slice.SetEsc(ir.EscNone) init.Append(mkcall("checkptrArithmetic", nil, init, typecheck.ConvNop(cheap, types.Types[types.TUNSAFEPTR]), slice)) // TODO(khr): Mark backing store of slice as dead. This will allow us to reuse // the backing store for multiple calls to checkptrArithmetic. return cheap }
dataWord
hfilemap.rs
#![allow(non_snake_case)] use crate::{co, kernel}; use crate::kernel::decl::{GetLastError, HFILEMAPVIEW, HIDWORD, LODWORD, WinResult}; use crate::prelude::{Handle, HandleClose}; impl_handle! { HFILEMAP: "kernel"; /// Handle to a /// [file mapping](https://docs.microsoft.com/en-us/windows/win32/api/memoryapi/nf-memoryapi-createfilemappingw). /// Originally just a `HANDLE`. } impl HandleClose for HFILEMAP {} impl KernelHfilemap for HFILEMAP {} /// [`HFILEMAP`](crate::HFILEMAP) methods from `kernel` feature. #[cfg_attr(docsrs, doc(cfg(feature = "kernel")))] pub trait KernelHfilemap: Handle { /// [`MapViewOfFile`](https://docs.microsoft.com/en-us/windows/win32/api/memoryapi/nf-memoryapi-mapviewoffile) /// method. /// /// **Note:** Must be paired with an /// [`HFILEMAPVIEW::UnmapViewOfFile`](crate::prelude::KernelHfilemapview::UnmapViewOfFile) /// call. fn MapViewOfFile(self, desired_access: co::FILE_MAP, offset: u64, number_of_bytes_to_map: Option<i64>) -> WinResult<HFILEMAPVIEW>
}
{ unsafe { kernel::ffi::MapViewOfFile( self.as_ptr(), desired_access.0, HIDWORD(offset), LODWORD(offset), number_of_bytes_to_map.unwrap_or_default(), ).as_mut() }.map(|ptr| HFILEMAPVIEW(ptr)) .ok_or_else(|| GetLastError()) }
server_update_parameters.py
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class ServerUpdateParameters(Model): """Parameters allowd to update for a server. :param sku: The SKU (pricing tier) of the server. :type sku: :class:`Sku <azure.mgmt.rdbms.postgresql.models.Sku>` :param storage_mb: The max storage allowed for a server. :type storage_mb: long :param administrator_login_password: The password of the administrator login. :type administrator_login_password: str :param version: The version of a server. Possible values include: '9.5', '9.6' :type version: str or :class:`ServerVersion <azure.mgmt.rdbms.postgresql.models.ServerVersion>` :param ssl_enforcement: Enable ssl enforcement or not when connect to server. Possible values include: 'Enabled', 'Disabled' :type ssl_enforcement: str or :class:`SslEnforcementEnum <azure.mgmt.rdbms.postgresql.models.SslEnforcementEnum>` :param tags: Application-specific metadata in the form of key-value pairs. :type tags: dict """ _validation = { 'storage_mb': {'minimum': 1024}, } _attribute_map = { 'sku': {'key': 'sku', 'type': 'Sku'}, 'storage_mb': {'key': 'properties.storageMB', 'type': 'long'}, 'administrator_login_password': {'key': 'properties.administratorLoginPassword', 'type': 'str'}, 'version': {'key': 'properties.version', 'type': 'str'}, 'ssl_enforcement': {'key': 'properties.sslEnforcement', 'type': 'SslEnforcementEnum'}, 'tags': {'key': 'tags', 'type': '{str}'}, }
def __init__(self, sku=None, storage_mb=None, administrator_login_password=None, version=None, ssl_enforcement=None, tags=None): self.sku = sku self.storage_mb = storage_mb self.administrator_login_password = administrator_login_password self.version = version self.ssl_enforcement = ssl_enforcement self.tags = tags
authz.go
package authorization import ( "bufio" "bytes" "fmt" "io" "net/http" "strings" "github.com/Sirupsen/logrus" "github.com/docker/docker/pkg/ioutils" ) const maxBodySize = 1048576 // 1MB // NewCtx creates new authZ context, it is used to store authorization information related to a specific docker // REST http session // A context provides two method: // Authenticate Request: // Call authZ plugins with current REST request and AuthN response // Request contains full HTTP packet sent to the docker daemon // https://docs.docker.com/reference/api/docker_remote_api/ // // Authenticate Response: // Call authZ plugins with full info about current REST request, REST response and AuthN response // The response from this method may contains content that overrides the daemon response // This allows authZ plugins to filter privileged content // // If multiple authZ plugins are specified, the block/allow decision is based on ANDing all plugin results // For response manipulation, the response from each plugin is piped between plugins. Plugin execution order // is determined according to daemon parameters func NewCtx(authZPlugins []Plugin, user, userAuthNMethod, requestMethod, requestURI string) *Ctx { return &Ctx{ plugins: authZPlugins, user: user, userAuthNMethod: userAuthNMethod, requestMethod: requestMethod, requestURI: requestURI, } } // Ctx stores a a single request-response interaction context type Ctx struct { user string userAuthNMethod string requestMethod string requestURI string plugins []Plugin // authReq stores the cached request object for the current transaction authReq *Request } // AuthZRequest authorized the request to the docker daemon using authZ plugins func (ctx *Ctx) AuthZRequest(w http.ResponseWriter, r *http.Request) error { var body []byte if sendBody(ctx.requestURI, r.Header) && r.ContentLength > 0 && r.ContentLength < maxBodySize { var err error body, r.Body, err = drainBody(r.Body) if err != nil { return err } } var h bytes.Buffer if err := r.Header.Write(&h); err != nil { return err } ctx.authReq = &Request{ User: ctx.user, UserAuthNMethod: ctx.userAuthNMethod, RequestMethod: ctx.requestMethod, RequestURI: ctx.requestURI, RequestBody: body, RequestHeaders: headers(r.Header), } for _, plugin := range ctx.plugins { logrus.Debugf("AuthZ request using plugin %s", plugin.Name()) authRes, err := plugin.AuthZRequest(ctx.authReq) if err != nil { return fmt.Errorf("plugin %s failed with error: %s", plugin.Name(), err) } if !authRes.Allow { return fmt.Errorf("authorization denied by plugin %s: %s", plugin.Name(), authRes.Msg) } } return nil } // AuthZResponse authorized and manipulates the response from docker daemon using authZ plugins func (ctx *Ctx) AuthZResponse(rm ResponseModifier, r *http.Request) error { ctx.authReq.ResponseStatusCode = rm.StatusCode() ctx.authReq.ResponseHeaders = headers(rm.Header()) if sendBody(ctx.requestURI, rm.Header()) { ctx.authReq.ResponseBody = rm.RawBody() } for _, plugin := range ctx.plugins { logrus.Debugf("AuthZ response using plugin %s", plugin.Name()) authRes, err := plugin.AuthZResponse(ctx.authReq) if err != nil { return fmt.Errorf("plugin %s failed with error: %s", plugin.Name(), err) } if !authRes.Allow { return fmt.Errorf("authorization denied by plugin %s: %s", plugin.Name(), authRes.Msg) } } rm.FlushAll() return nil } // drainBody dump the body (if it's length is less than 1MB) without modifying the request state func drainBody(body io.ReadCloser) ([]byte, io.ReadCloser, error)
// sendBody returns true when request/response body should be sent to AuthZPlugin func sendBody(url string, header http.Header) bool { // Skip body for auth endpoint if strings.HasSuffix(url, "/auth") { return false } // body is sent only for text or json messages return header.Get("Content-Type") == "application/json" } // headers returns flatten version of the http headers excluding authorization func headers(header http.Header) map[string]string { v := make(map[string]string, 0) for k, values := range header { // Skip authorization headers if strings.EqualFold(k, "Authorization") || strings.EqualFold(k, "X-Registry-Config") || strings.EqualFold(k, "X-Registry-Auth") { continue } for _, val := range values { v[k] = val } } return v }
{ bufReader := bufio.NewReaderSize(body, maxBodySize) newBody := ioutils.NewReadCloserWrapper(bufReader, func() error { return body.Close() }) data, err := bufReader.Peek(maxBodySize) // Body size exceeds max body size if err == nil { logrus.Warnf("Request body is larger than: '%d' skipping body", maxBodySize) return nil, newBody, nil } // Body size is less than maximum size if err == io.EOF { return data, newBody, nil } // Unknown error return nil, newBody, err }
test_master_utils.py
import debile.master.utils as utils import unittest import mock class MasterUtilsTestCase(unittest.TestCase): def test_init_config(self): config = utils._init_config('tests/resources/master.yaml') self.assertEquals(config['database'], 'sqlite:////srv/debile/debile.db') self.assertIsNotNone(config['fedmsg']) self.assertIsNotNone(config['xmlrpc']) self.assertIsNotNone(config['repo']) self.assertIsNotNone(config['keyrings']) @mock.patch('debile.master.utils.create_engine') @mock.patch('debile.master.utils.Session.configure') def test_init_sqlalchemy(self, mock_configure, mock_engine): config = {'database':'sqlite:////srv/debile/debile.db'}
self.assertTrue(mock_engine.called) self.assertTrue(mock_configure.called) args, kwargs = mock_engine.call_args self.assertEquals(args, ('sqlite:////srv/debile/debile.db',)) self.assertFalse(kwargs['implicit_returning'])
utils._init_sqlalchemy(config)
collaborators.go
package service import ( "context" "database/sql" "errors" "fmt" "time" "getsturdy.com/api/pkg/analytics" "getsturdy.com/api/pkg/codebase" "getsturdy.com/api/pkg/events" "getsturdy.com/api/pkg/github/enterprise/client" "getsturdy.com/api/pkg/notification" "github.com/google/go-github/v39/github" "github.com/google/uuid" "go.uber.org/zap" ) func (svc *Service) GrantCollaboratorsAccess(ctx context.Context, codebaseID string, authAsUserID *string) error { var didInviteAny bool gitHubRepo, err := svc.gitHubRepositoryRepo.GetByCodebaseID(codebaseID) if err != nil { return fmt.Errorf("failed to get github repo by codebase: %w", err) } installation, err := svc.gitHubInstallationRepo.GetByInstallationID(gitHubRepo.InstallationID) if err != nil { return fmt.Errorf("failed to get installation: %w", err) } repoClient, err := svc.authAsUserOrFallbackAsApp(authAsUserID, installation.InstallationID) if err != nil { return fmt.Errorf("failed to get a github client: %w", err) } collaborators, err := listAllCollaborators(ctx, repoClient, installation.Owner, gitHubRepo.Name) if err != nil { return fmt.Errorf("failed to list collaborators: %w", err) } for _, collaborator := range collaborators { logger := svc.logger.With( zap.String("codebase_id", collaborator.GetLogin()), zap.String("github_login", collaborator.GetLogin()), ) logger.Info("setting up collaborator") gitHubUser, err := svc.gitHubUserRepo.GetByUsername(collaborator.GetLogin()) if err != nil { if !errors.Is(err, sql.ErrNoRows) { logger.Error("failed to get github user from db", zap.Error(err)) } continue } logger = logger.With(zap.String("user_id", gitHubUser.UserID)) // If the gitHubUser was created within the last hour, this is a new github connection. // Only send notifications for old connections that have a new github repo imported. createdWithinTheLastHour := gitHubUser.CreatedAt.Add(time.Hour).After(time.Now()) if !createdWithinTheLastHour { if err := svc.notificationSender.User(ctx, gitHubUser.UserID, gitHubRepo.CodebaseID, notification.GitHubRepositoryImported, gitHubRepo.ID); err != nil { logger.Error("failed to send github repo imported notification", zap.Error(err)) } else { logger.Info("sent notification about the imported codebase") } } else { logger.Info("github user is to new, skipping sending notification") } _, err = svc.codebaseUserRepo.GetByUserAndCodebase(gitHubUser.UserID, codebaseID) switch { case err == nil: // The user is already a member (and is likely the user that installed the repo) logger.Info("github user is already a member of the codebase") // enqueue import pull requests for this user if err := svc.EnqueueGitHubPullRequestImport(ctx, codebaseID, gitHubUser.UserID); err != nil { logger.Error("failed to add to pr importer queue", zap.Error(err)) } case errors.Is(err, sql.ErrNoRows): logger.Info("granting access to repository based on GitHub credentials") t0 := time.Now() if err := svc.codebaseUserRepo.Create(codebase.CodebaseUser{ ID: uuid.NewString(), UserID: gitHubUser.UserID, CodebaseID: codebaseID, CreatedAt: &t0, }); err != nil { logger.Warn("failed to create codebase-user relation in db", zap.Error(err)) continue } svc.analyticsService.Capture(ctx, "added user to codebase", analytics.CodebaseID(codebaseID), analytics.Property("github", true), analytics.Property("is_github_sender", false), // This event is not fired for the user that installed the GitHub app ) // enqueue import pull requests for this user if err := svc.EnqueueGitHubPullRequestImport(ctx, codebaseID, gitHubUser.UserID); err != nil { logger.Error("failed to add to pr importer queue", zap.Error(err)) } didInviteAny = true default: logger.Error("failed to get codebase-user relation from db", zap.Error(err)) continue } } if didInviteAny { // Send events svc.eventsSender.Codebase(codebaseID, events.CodebaseUpdated, codebaseID) } return nil } func (svc *Service) authAsUserOrFallbackAsApp(userID *string, installationID int64) (client.RepositoriesClient, error) { // Prefer user auth if userID != nil { gitHubUser, err := svc.gitHubUserRepo.GetByUserID(*userID) // Auth as user if a user could be found if err == nil { personalClient, err := svc.gitHubPersonalClientProvider(gitHubUser.AccessToken) if err != nil { return nil, fmt.Errorf("failed to create personal github client: %w", err) } return personalClient.Repositories, nil } } // Fallback to authenticating as the app, note tough that this is a worse option. As requests from the app might not see all users. tokenClient, _, err := svc.gitHubInstallationClientProvider( svc.gitHubAppConfig, installationID, ) if err != nil { return nil, fmt.Errorf("failed to create github client: %w", err) } return tokenClient.Repositories, nil } // listAllCollaborators returns a list of collaborators that the authenticated user can _see_. // Note that the app does not have collaborator access, and will only see users that have a _public_ membership. // To get the full list of collaborators, authenticate as a user with a confirmed membership (the user that installed the app is a good candidate). func
(ctx context.Context, reposClient client.RepositoriesClient, owner, name string) ([]*github.User, error) { var users []*github.User page := 1 for page != 0 { newUsers, nextPage, err := listCollaborators(ctx, reposClient, owner, name, page) if err != nil { return nil, err } page = nextPage users = append(users, newUsers...) } return users, nil } func listCollaborators(ctx context.Context, reposClient client.RepositoriesClient, owner, name string, page int) ([]*github.User, int, error) { users, rsp, err := reposClient.ListCollaborators(ctx, owner, name, &github.ListCollaboratorsOptions{ Affiliation: "all", ListOptions: github.ListOptions{Page: page, PerPage: 50}}, ) if err != nil { return nil, 0, err } return users, rsp.NextPage, nil } func (svc *Service) AddUser(ctx context.Context, codebaseID, userID string) error { // Add access to this user directly t := time.Now() err := svc.codebaseUserRepo.Create(codebase.CodebaseUser{ ID: uuid.NewString(), UserID: userID, CodebaseID: codebaseID, CreatedAt: &t, }) if err != nil { return fmt.Errorf("failed to add sender to codebaseUserRepo: %w", err) } svc.analyticsService.Capture(ctx, "added user to codebase", analytics.DistinctID(userID), analytics.CodebaseID(codebaseID), analytics.Property("github", true), analytics.Property("is_github_sender", true), ) svc.logger.Info("adding github sender to the codebase", zap.String("user_id", userID)) svc.analyticsService.Capture(ctx, "installed github repository", analytics.DistinctID(userID), analytics.CodebaseID(codebaseID), analytics.Property("github", true), analytics.Property("is_github_sender", false), ) // Send events svc.eventsSender.Codebase(codebaseID, events.CodebaseUpdated, codebaseID) return nil }
listAllCollaborators
testing.ts
export * from "./testing/user.mock";
server_instance.py
# coding: utf-8 """ vserver Generated by: https://github.com/swagger-api/swagger-codegen.git """ import pprint import re # noqa: F401 import six from ncloud_vserver.model.common_code import CommonCode # noqa: F401,E501 class ServerInstance(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'server_instance_no': 'str', 'server_name': 'str', 'server_description': 'str', 'cpu_count': 'int', 'memory_size': 'int', 'platform_type': 'CommonCode', 'login_key_name': 'str', 'public_ip_instance_no': 'str', 'public_ip': 'str', 'server_instance_status': 'CommonCode', 'server_instance_operation': 'CommonCode', 'server_instance_status_name': 'str', 'create_date': 'str', 'uptime': 'str', 'server_image_product_code': 'str', 'server_product_code': 'str', 'is_protect_server_termination': 'bool', 'zone_code': 'str', 'region_code': 'str', 'vpc_no': 'str', 'subnet_no': 'str', 'network_interface_no_list': 'list[str]', 'init_script_no': 'str', 'server_instance_type': 'CommonCode', 'base_block_storage_disk_type': 'CommonCode', 'base_block_storage_disk_detail_type': 'CommonCode', 'placement_group_no': 'str' } attribute_map = { 'server_instance_no': 'serverInstanceNo', 'server_name': 'serverName', 'server_description': 'serverDescription', 'cpu_count': 'cpuCount', 'memory_size': 'memorySize', 'platform_type': 'platformType', 'login_key_name': 'loginKeyName', 'public_ip_instance_no': 'publicIpInstanceNo', 'public_ip': 'publicIp', 'server_instance_status': 'serverInstanceStatus', 'server_instance_operation': 'serverInstanceOperation', 'server_instance_status_name': 'serverInstanceStatusName', 'create_date': 'createDate', 'uptime': 'uptime', 'server_image_product_code': 'serverImageProductCode', 'server_product_code': 'serverProductCode', 'is_protect_server_termination': 'isProtectServerTermination', 'zone_code': 'zoneCode', 'region_code': 'regionCode', 'vpc_no': 'vpcNo', 'subnet_no': 'subnetNo', 'network_interface_no_list': 'networkInterfaceNoList', 'init_script_no': 'initScriptNo', 'server_instance_type': 'serverInstanceType', 'base_block_storage_disk_type': 'baseBlockStorageDiskType', 'base_block_storage_disk_detail_type': 'baseBlockStorageDiskDetailType', 'placement_group_no': 'placementGroupNo' } def __init__(self, server_instance_no=None, server_name=None, server_description=None, cpu_count=None, memory_size=None, platform_type=None, login_key_name=None, public_ip_instance_no=None, public_ip=None, server_instance_status=None, server_instance_operation=None, server_instance_status_name=None, create_date=None, uptime=None, server_image_product_code=None, server_product_code=None, is_protect_server_termination=None, zone_code=None, region_code=None, vpc_no=None, subnet_no=None, network_interface_no_list=None, init_script_no=None, server_instance_type=None, base_block_storage_disk_type=None, base_block_storage_disk_detail_type=None, placement_group_no=None): # noqa: E501 """ServerInstance - a model defined in Swagger""" # noqa: E501 self._server_instance_no = None self._server_name = None self._server_description = None self._cpu_count = None self._memory_size = None self._platform_type = None self._login_key_name = None self._public_ip_instance_no = None self._public_ip = None self._server_instance_status = None self._server_instance_operation = None self._server_instance_status_name = None self._create_date = None self._uptime = None self._server_image_product_code = None self._server_product_code = None self._is_protect_server_termination = None self._zone_code = None self._region_code = None self._vpc_no = None self._subnet_no = None self._network_interface_no_list = None self._init_script_no = None self._server_instance_type = None self._base_block_storage_disk_type = None self._base_block_storage_disk_detail_type = None self._placement_group_no = None self.discriminator = None if server_instance_no is not None: self.server_instance_no = server_instance_no if server_name is not None: self.server_name = server_name if server_description is not None: self.server_description = server_description if cpu_count is not None: self.cpu_count = cpu_count if memory_size is not None: self.memory_size = memory_size if platform_type is not None: self.platform_type = platform_type if login_key_name is not None: self.login_key_name = login_key_name if public_ip_instance_no is not None: self.public_ip_instance_no = public_ip_instance_no if public_ip is not None: self.public_ip = public_ip if server_instance_status is not None: self.server_instance_status = server_instance_status if server_instance_operation is not None: self.server_instance_operation = server_instance_operation if server_instance_status_name is not None: self.server_instance_status_name = server_instance_status_name if create_date is not None: self.create_date = create_date if uptime is not None: self.uptime = uptime if server_image_product_code is not None: self.server_image_product_code = server_image_product_code if server_product_code is not None: self.server_product_code = server_product_code if is_protect_server_termination is not None: self.is_protect_server_termination = is_protect_server_termination if zone_code is not None: self.zone_code = zone_code if region_code is not None: self.region_code = region_code if vpc_no is not None: self.vpc_no = vpc_no if subnet_no is not None: self.subnet_no = subnet_no if network_interface_no_list is not None: self.network_interface_no_list = network_interface_no_list if init_script_no is not None: self.init_script_no = init_script_no if server_instance_type is not None: self.server_instance_type = server_instance_type if base_block_storage_disk_type is not None: self.base_block_storage_disk_type = base_block_storage_disk_type if base_block_storage_disk_detail_type is not None: self.base_block_storage_disk_detail_type = base_block_storage_disk_detail_type if placement_group_no is not None: self.placement_group_no = placement_group_no @property def server_instance_no(self): """Gets the server_instance_no of this ServerInstance. # noqa: E501 서버인스턴스번호 # noqa: E501 :return: The server_instance_no of this ServerInstance. # noqa: E501 :rtype: str """ return self._server_instance_no @server_instance_no.setter def server_instance_no(self, server_instance_no): """Sets the server_instance_no of this ServerInstance. 서버인스턴스번호 # noqa: E501 :param server_instance_no: The server_instance_no of this ServerInstance. # noqa: E501 :type: str """ self._server_instance_no = server_instance_no @property def server_name(self): """Gets the server_name of this ServerInstance. # noqa: E501 서버이름 # noqa: E501 :return: The server_name of this ServerInstance. # noqa: E501 :rtype: str """ return self._server_name @server_name.setter def server_name(self, server_name): """Sets the server_name of this ServerInstance. 서버이름 # noqa: E501 :param server_name: The server_name of this ServerInstance. # noqa: E501 :type: str """ self._server_name = server_name @property def server_description(self): """Gets the server_description of this ServerInstance. # noqa: E501 서버설명 # noqa: E501 :return: The server_description of this ServerInstance. # noqa: E501 :rtype: str """ return self._server_description @server_description.setter def server_description(self, server_description): """Sets the server_description of this ServerInstance. 서버설명 # noqa: E501 :param server_description: The server_description of this ServerInstance. # noqa: E501 :type: str """ self._server_description = server_description @property def cpu_count(self): """Gets the cpu_count of this ServerInstance. # noqa: E501 CPU개수 # noqa: E501 :return: The cpu_count of this ServerInstance. # noqa: E501 :rtype: int """ return self._cpu_count @cpu_count.setter def cpu_count(self, cpu_count): """Sets the cpu_count of this ServerInstance. CPU개수 # noqa: E501 :param cpu_count: The cpu_count of this ServerInstance. # noqa: E501 :type: int """ self._cpu_count = cpu_count @property def memory_size(self): """Gets the memory_size of this ServerInstance. # noqa: E501 메모리사이즈 # noqa: E501 :return: The memory_size of this ServerInstance. # noqa: E501 :rtype: int """ return self._memory_size @memory_size.setter def memory_size(self, memory_size): """Sets the memory_size of this ServerInstance. 메모리사이즈 # noqa: E501 :param memory_size: The memory_size of this ServerInstance. # noqa: E501 :type: int """ self._memory_size = memory_size @property def platform_type(self): """Gets the platform_type of this ServerInstance. # noqa: E501 플랫폼유형 # noqa: E501 :return: The platform_type of this ServerInstance. # noqa: E501 :rtype: CommonCode """ return self._platform_type @platform_type.setter def platform_type(self, platform_type): """Sets the platform_type of this ServerInstance. 플랫폼유형 # noqa: E501 :param platform_type: The platform_type of this ServerInstance. # noqa: E501 :type: CommonCode """ self._platform_type = platform_type @property def login_key_name(self): """Gets the login_key_name of this ServerInstance. # noqa: E501 로그인키이름 # noqa: E501 :return: The login_key_name of this ServerInstance. # noqa: E501 :rtype: str """ return self._login_key_name @login_key_name.setter def login_key_name(self, login_key_name): """Sets the login_key_name of this ServerInstance. 로그인키이름 # noqa: E501 :param login_key_name: The login_key_name of this ServerInstance. # noqa: E501 :type: str """ self._login_key_name = login_key_name @property def public_ip_instance_no(self): """Gets the public_ip_instance_no of this ServerInstance. # noqa: E501 공인IP인스턴스번호 # noqa: E501 :return: The public_ip_instance_no of this ServerInstance. # noqa: E501 :rtype: str """ return self._public_ip_instance_no @public_ip_instance_no.setter def public_ip_instance_no(self, public_ip_instance_no): """Sets the public_ip_instance_no of this ServerInstance. 공인IP인스턴스번호 # noqa: E501 :param public_ip_instance_no: The public_ip_instance_no of this ServerInstance. # noqa: E501 :type: str """ self._public_ip_instance_no = public_ip_instance_no @property def public_ip(self): """Gets the public_ip of this ServerInstance. # noqa: E501 공인IP주소 # noqa: E501 :return: The public_ip of this ServerInstance. # noqa: E501 :rtype: str """ return self._public_ip @public_ip.setter def public_ip(self, public_ip): """Sets the public_ip of this ServerInstance. 공인IP주소 # noqa: E501 :param public_ip: The public_ip of this ServerInstance. # noqa: E501 :type: str """ self._public_ip = public_ip @property def server_instance_status(self): """Gets the server_instance_status of this ServerInstance. # noqa: E501 서버인스턴스상태 # noqa: E501 :return: The server_instance_status of this ServerInstance. # noqa: E501 :rtype: CommonCode """ return self._server_instance_status @server_instance_status.setter def server_instance_status(self, server_instance_status): """Sets the server_instance_status of this ServerInstance. 서버인스턴스상태 # noqa: E501 :param server_instance_status: The server_instance_status of this ServerInstance. # noqa: E501 :type: CommonCode """ self._server_instance_status = server_instance_status @property def server_instance_operation(self): """Gets the server_instance_operation of this ServerInstance. # noqa: E501 서버인스턴스OP # noqa: E501 :return: The server_instance_operation of this ServerInstance. # noqa: E501 :rtype: CommonCode """ return self._server_instance_operation @server_instance_operation.setter def server_instance_operation(self, server_instance_operation): """Sets the server_instance_operation of this ServerInstance. 서버인스턴스OP # noqa: E501 :param server_instance_operation: The server_instance_operation of this ServerInstance. # noqa: E501 :type: CommonCode """ self._server_instance_operation = server_instance_operation @property def server_instance_status_name(self): """Gets the server_instance_status_name of this ServerInstance. # noqa: E501 서버인스턴스상태이름 # noqa: E501 :return: The server_instance_status_name of this ServerInstance. # noqa: E501 :rtype: str """ return self._server_instance_status_name @server_instance_status_name.setter def server_instance_status_name(self, server_instance_status_name): """Sets the server_instance_status_name of this ServerInstance. 서버인스턴스상태이름 # noqa: E501 :param server_instance_status_name: The server_instance_status_name of this ServerInstance. # noqa: E501 :type: str """ self._server_instance_status_name = server_instance_status_name @property def create_date(self): """Gets the create_date of this ServerInstance. # noqa: E501 생성일시 # noqa: E501 :return: The create_date of this ServerInstance. # noqa: E501 :rtype: str """ return self._create_date @create_date.setter def create_date(self, create_date): """Sets the create_date of this ServerInstance. 생성일시 # noqa: E501
:type: str """ self._create_date = create_date @property def uptime(self): """Gets the uptime of this ServerInstance. # noqa: E501 업시간 # noqa: E501 :return: The uptime of this ServerInstance. # noqa: E501 :rtype: str """ return self._uptime @uptime.setter def uptime(self, uptime): """Sets the uptime of this ServerInstance. 업시간 # noqa: E501 :param uptime: The uptime of this ServerInstance. # noqa: E501 :type: str """ self._uptime = uptime @property def server_image_product_code(self): """Gets the server_image_product_code of this ServerInstance. # noqa: E501 서버이미지상품코드 # noqa: E501 :return: The server_image_product_code of this ServerInstance. # noqa: E501 :rtype: str """ return self._server_image_product_code @server_image_product_code.setter def server_image_product_code(self, server_image_product_code): """Sets the server_image_product_code of this ServerInstance. 서버이미지상품코드 # noqa: E501 :param server_image_product_code: The server_image_product_code of this ServerInstance. # noqa: E501 :type: str """ self._server_image_product_code = server_image_product_code @property def server_product_code(self): """Gets the server_product_code of this ServerInstance. # noqa: E501 서버상품코드 # noqa: E501 :return: The server_product_code of this ServerInstance. # noqa: E501 :rtype: str """ return self._server_product_code @server_product_code.setter def server_product_code(self, server_product_code): """Sets the server_product_code of this ServerInstance. 서버상품코드 # noqa: E501 :param server_product_code: The server_product_code of this ServerInstance. # noqa: E501 :type: str """ self._server_product_code = server_product_code @property def is_protect_server_termination(self): """Gets the is_protect_server_termination of this ServerInstance. # noqa: E501 서버반납보호설정여부 # noqa: E501 :return: The is_protect_server_termination of this ServerInstance. # noqa: E501 :rtype: bool """ return self._is_protect_server_termination @is_protect_server_termination.setter def is_protect_server_termination(self, is_protect_server_termination): """Sets the is_protect_server_termination of this ServerInstance. 서버반납보호설정여부 # noqa: E501 :param is_protect_server_termination: The is_protect_server_termination of this ServerInstance. # noqa: E501 :type: bool """ self._is_protect_server_termination = is_protect_server_termination @property def zone_code(self): """Gets the zone_code of this ServerInstance. # noqa: E501 ZONE코드 # noqa: E501 :return: The zone_code of this ServerInstance. # noqa: E501 :rtype: str """ return self._zone_code @zone_code.setter def zone_code(self, zone_code): """Sets the zone_code of this ServerInstance. ZONE코드 # noqa: E501 :param zone_code: The zone_code of this ServerInstance. # noqa: E501 :type: str """ self._zone_code = zone_code @property def region_code(self): """Gets the region_code of this ServerInstance. # noqa: E501 REGION코드 # noqa: E501 :return: The region_code of this ServerInstance. # noqa: E501 :rtype: str """ return self._region_code @region_code.setter def region_code(self, region_code): """Sets the region_code of this ServerInstance. REGION코드 # noqa: E501 :param region_code: The region_code of this ServerInstance. # noqa: E501 :type: str """ self._region_code = region_code @property def vpc_no(self): """Gets the vpc_no of this ServerInstance. # noqa: E501 VPC번호 # noqa: E501 :return: The vpc_no of this ServerInstance. # noqa: E501 :rtype: str """ return self._vpc_no @vpc_no.setter def vpc_no(self, vpc_no): """Sets the vpc_no of this ServerInstance. VPC번호 # noqa: E501 :param vpc_no: The vpc_no of this ServerInstance. # noqa: E501 :type: str """ self._vpc_no = vpc_no @property def subnet_no(self): """Gets the subnet_no of this ServerInstance. # noqa: E501 서브넷번호 # noqa: E501 :return: The subnet_no of this ServerInstance. # noqa: E501 :rtype: str """ return self._subnet_no @subnet_no.setter def subnet_no(self, subnet_no): """Sets the subnet_no of this ServerInstance. 서브넷번호 # noqa: E501 :param subnet_no: The subnet_no of this ServerInstance. # noqa: E501 :type: str """ self._subnet_no = subnet_no @property def network_interface_no_list(self): """Gets the network_interface_no_list of this ServerInstance. # noqa: E501 네트워크인터페이스번호리스트 # noqa: E501 :return: The network_interface_no_list of this ServerInstance. # noqa: E501 :rtype: list[str] """ return self._network_interface_no_list @network_interface_no_list.setter def network_interface_no_list(self, network_interface_no_list): """Sets the network_interface_no_list of this ServerInstance. 네트워크인터페이스번호리스트 # noqa: E501 :param network_interface_no_list: The network_interface_no_list of this ServerInstance. # noqa: E501 :type: list[str] """ self._network_interface_no_list = network_interface_no_list @property def init_script_no(self): """Gets the init_script_no of this ServerInstance. # noqa: E501 초기화스크립트번호 # noqa: E501 :return: The init_script_no of this ServerInstance. # noqa: E501 :rtype: str """ return self._init_script_no @init_script_no.setter def init_script_no(self, init_script_no): """Sets the init_script_no of this ServerInstance. 초기화스크립트번호 # noqa: E501 :param init_script_no: The init_script_no of this ServerInstance. # noqa: E501 :type: str """ self._init_script_no = init_script_no @property def server_instance_type(self): """Gets the server_instance_type of this ServerInstance. # noqa: E501 서버인스턴스유형 # noqa: E501 :return: The server_instance_type of this ServerInstance. # noqa: E501 :rtype: CommonCode """ return self._server_instance_type @server_instance_type.setter def server_instance_type(self, server_instance_type): """Sets the server_instance_type of this ServerInstance. 서버인스턴스유형 # noqa: E501 :param server_instance_type: The server_instance_type of this ServerInstance. # noqa: E501 :type: CommonCode """ self._server_instance_type = server_instance_type @property def base_block_storage_disk_type(self): """Gets the base_block_storage_disk_type of this ServerInstance. # noqa: E501 기본블록스토리지디스크유형 # noqa: E501 :return: The base_block_storage_disk_type of this ServerInstance. # noqa: E501 :rtype: CommonCode """ return self._base_block_storage_disk_type @base_block_storage_disk_type.setter def base_block_storage_disk_type(self, base_block_storage_disk_type): """Sets the base_block_storage_disk_type of this ServerInstance. 기본블록스토리지디스크유형 # noqa: E501 :param base_block_storage_disk_type: The base_block_storage_disk_type of this ServerInstance. # noqa: E501 :type: CommonCode """ self._base_block_storage_disk_type = base_block_storage_disk_type @property def base_block_storage_disk_detail_type(self): """Gets the base_block_storage_disk_detail_type of this ServerInstance. # noqa: E501 기본블록스토리지디스크상세유형 # noqa: E501 :return: The base_block_storage_disk_detail_type of this ServerInstance. # noqa: E501 :rtype: CommonCode """ return self._base_block_storage_disk_detail_type @base_block_storage_disk_detail_type.setter def base_block_storage_disk_detail_type(self, base_block_storage_disk_detail_type): """Sets the base_block_storage_disk_detail_type of this ServerInstance. 기본블록스토리지디스크상세유형 # noqa: E501 :param base_block_storage_disk_detail_type: The base_block_storage_disk_detail_type of this ServerInstance. # noqa: E501 :type: CommonCode """ self._base_block_storage_disk_detail_type = base_block_storage_disk_detail_type @property def placement_group_no(self): """Gets the placement_group_no of this ServerInstance. # noqa: E501 물리배치그룹번호 # noqa: E501 :return: The placement_group_no of this ServerInstance. # noqa: E501 :rtype: str """ return self._placement_group_no @placement_group_no.setter def placement_group_no(self, placement_group_no): """Sets the placement_group_no of this ServerInstance. 물리배치그룹번호 # noqa: E501 :param placement_group_no: The placement_group_no of this ServerInstance. # noqa: E501 :type: str """ self._placement_group_no = placement_group_no def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, ServerInstance): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
:param create_date: The create_date of this ServerInstance. # noqa: E501
exec_summary.rs
// Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0. /// Execution summaries to support `EXPLAIN ANALYZE` statements. We don't use /// `ExecutorExecutionSummary` directly since it is less efficient. #[derive(Debug, Default, Copy, Clone, Add, AddAssign, PartialEq, Eq)] pub struct ExecSummary { /// Total time cost in this executor. pub time_processed_ns: usize, /// How many rows this executor produced totally. pub num_produced_rows: usize, /// How many times executor's `next_batch()` is called. pub num_iterations: usize, } /// A trait for all execution summary collectors. pub trait ExecSummaryCollector: Send { type DurationRecorder; /// Creates a new instance with specified output slot index. fn new(output_index: usize) -> Self where Self: Sized; /// Returns an instance that will record elapsed duration and increase /// the iterations counter. The instance should be later passed back to /// `on_finish_iterate` when processing of `next_batch` is completed. fn on_start_iterate(&mut self) -> Self::DurationRecorder; // Increases the process time and produced rows counter. // It should be called when `next_batch` is completed. fn on_finish_iterate(&mut self, dr: Self::DurationRecorder, rows: usize); /// Takes and appends current execution summary into `target`. fn collect_into(&mut self, target: &mut [ExecSummary]); } /// A normal `ExecSummaryCollector` that simply collects execution summaries. /// It acts like `collect = true`. pub struct ExecSummaryCollectorEnabled { output_index: usize, counts: ExecSummary, } impl ExecSummaryCollector for ExecSummaryCollectorEnabled { type DurationRecorder = tikv_util::time::Instant; #[inline] fn new(output_index: usize) -> ExecSummaryCollectorEnabled { ExecSummaryCollectorEnabled { output_index, counts: Default::default(), } } #[inline] fn on_start_iterate(&mut self) -> Self::DurationRecorder { self.counts.num_iterations += 1; tikv_util::time::Instant::now_coarse() } #[inline] fn on_finish_iterate(&mut self, dr: Self::DurationRecorder, rows: usize)
#[inline] fn collect_into(&mut self, target: &mut [ExecSummary]) { let current_summary = std::mem::replace(&mut self.counts, ExecSummary::default()); target[self.output_index] += current_summary; } } /// A `ExecSummaryCollector` that does not collect anything. Acts like `collect = false`. pub struct ExecSummaryCollectorDisabled; impl ExecSummaryCollector for ExecSummaryCollectorDisabled { type DurationRecorder = (); #[inline] fn new(_output_index: usize) -> ExecSummaryCollectorDisabled { ExecSummaryCollectorDisabled } #[inline] fn on_start_iterate(&mut self) -> Self::DurationRecorder {} #[inline] fn on_finish_iterate(&mut self, _dr: Self::DurationRecorder, _rows: usize) {} #[inline] fn collect_into(&mut self, _target: &mut [ExecSummary]) {} } /// Combines an `ExecSummaryCollector` with another type. This inner type `T` /// typically `Executor`/`BatchExecutor`, such that `WithSummaryCollector<C, T>` /// would implement the same trait and collects the statistics into `C`. pub struct WithSummaryCollector<C: ExecSummaryCollector, T> { pub(super) summary_collector: C, pub(super) inner: T, }
{ self.counts.num_produced_rows += rows; let elapsed_time = tikv_util::time::duration_to_nanos(dr.elapsed()) as usize; self.counts.time_processed_ns += elapsed_time; }
square.service.ts
import { Injectable } from '@angular/core'; import { ConfigurationService } from '../Configuration/configuration.service'; import { UserService } from '../User/user.service'; import { ObjectData } from '../User/data'; import { SquareData } from './data'; var axios = require('axios'); @Injectable() export class Sq
constructor(private configuration: ConfigurationService, private user: UserService) { } public async getData(): Promise<SquareData> { let userObject = await this.user.getCurrentObject(); let square = await this.getSquare(userObject.segment.i, userObject.segment.j); this.addUserObjectInSquareIfNotExist(userObject, square); return square; } private async getSquare(i: number, j: number): Promise<SquareData> { let configuration = this.configuration.getData(); let response = await axios.get(`${configuration.api}api/v1/map/segments/square5x5/i/${i}/j/${j}`); return response.data; } private addUserObjectInSquareIfNotExist(userObject: ObjectData, square: SquareData) { let isExist = square.objects.some(object => object.id == userObject.id); if (!isExist) { square.objects.push(userObject); } } }
uareService {
index.ts
// Copyright IBM Corp. 2018,2020. All Rights Reserved. // Node module: @loopback/example-todo // This file is licensed under the MIT License. // License text available at https://opensource.org/licenses/MIT import {ApplicationConfig, TodoListApplication} from './application'; // re-exports for our benchmark, not needed for the tutorial itself export * from '@loopback/rest'; export * from './application'; export * from './models'; export * from './repositories'; export async function main(options: ApplicationConfig = {}) { const app = new TodoListApplication(options); await app.boot(); await app.start(); const url = app.restServer.url; console.log(`Server is running at ${url}`); return app; } if (require.main === module) { // Run the application const config = { rest: { port: +(process.env.PORT ?? 3000), host: process.env.HOST, // The `gracePeriodForClose` provides a graceful close for http/https // servers with keep-alive clients. The default value is `Infinity` // (don't force-close). If you want to immediately destroy all sockets // upon stop, set its value to `0`. // See https://www.npmjs.com/package/stoppable gracePeriodForClose: 5000, // 5 seconds openApiSpec: { // useful when used with OpenAPI-to-GraphQL to locate your application setServersFromRequest: true, }, }, }; main(config).catch(err => { console.error('Cannot start the application.', err); process.exit(1);
}); }
utils.py
import torch import numpy as np; from torch.autograd import Variable def normal_std(x): return x.std() * np.sqrt((len(x) - 1.)/(len(x))) class Data_utility(object): # train and valid is the ratio of training set and validation set. test = 1 - train - valid def __init__(self, dSet, train, valid, cuda, horizon, window, normalize = 2): self.cuda = cuda; self.P = window; self.h = horizon self.rawdat = dSet self.dat = np.zeros(self.rawdat.shape); self.n, self.m = self.dat.shape; self.normalize = 2 self.scale = np.ones(self.m); self._normalized(normalize); self._split(int(train * self.n), int((train+valid) * self.n), self.n); self.scale = torch.from_numpy(self.scale).float(); tmp = self.test[1] * self.scale.expand(self.test[1].size(0), self.m); if self.cuda: self.scale = self.scale.cuda(); self.scale = Variable(self.scale); self.rse = normal_std(tmp); self.rae = torch.mean(torch.abs(tmp - torch.mean(tmp))); def _normalized(self, normalize): #normalized by the maximum value of entire matrix. if (normalize == 0): self.dat = self.rawdat if (normalize == 1): self.dat = self.rawdat / np.max(self.rawdat); #normlized by the maximum value of each row(sensor). if (normalize == 2): for i in range(self.m): self.scale[i] = np.max(np.abs(self.rawdat[:,i])); self.dat[:,i] = self.rawdat[:,i] / np.max(np.abs(self.rawdat[:,i])); def _split(self, train, valid, test):
def _batchify(self, idx_set, horizon): n = len(idx_set); X = torch.zeros((n,self.P,self.m)); Y = torch.zeros((n,self.m)); for i in range(n): end = idx_set[i] - self.h + 1; start = end - self.P; X[i,:,:] = torch.from_numpy(self.dat[start:end, :]); Y[i,:] = torch.from_numpy(self.dat[idx_set[i], :]); return [X, Y]; def get_batches(self, inputs, targets, batch_size, shuffle=True): length = len(inputs) if shuffle: index = torch.randperm(length) else: index = torch.LongTensor(range(length)) start_idx = 0 while (start_idx < length): end_idx = min(length, start_idx + batch_size) excerpt = index[start_idx:end_idx] X = inputs[excerpt]; Y = targets[excerpt]; # if (self.cuda): # X = X.cuda(); # Y = Y.cuda(); yield Variable(X), Variable(Y); start_idx += batch_size
train_set = range(self.P+self.h-1, train); valid_set = range(train, valid); test_set = range(valid, self.n); self.train = self._batchify(train_set, self.h); self.valid = self._batchify(valid_set, self.h); self.test = self._batchify(test_set, self.h);
python_message.py
#!/usr/bin/env python # # Copyright 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """Contains a metaclass and helper functions used to create protocol message classes from Descriptor objects at runtime. Recall that a metaclass is the "type" of a class. (A class is to a metaclass what an instance is to a class.) In this case, we use the GeneratedProtocolMessageType metaclass to inject all the useful functionality into the classes output by the protocol compiler at compile-time. The upshot of all this is that the real implementation details for ALL pure-Python protocol buffers are *here in this file*. """ from __future__ import absolute_import from future import standard_library standard_library.install_aliases() from builtins import str from builtins import range from builtins import object import sys import six from six.moves import range if sys.version_info[0] < 3: try: from io import StringIO as BytesIO except ImportError: from io import StringIO as BytesIO import six.moves.copyreg as copyreg else: from io import BytesIO import copyreg import struct import weakref from google.net.proto2.python.internal import containers from google.net.proto2.python.internal import decoder from google.net.proto2.python.internal import encoder from google.net.proto2.python.internal import enum_type_wrapper from google.net.proto2.python.internal import message_listener as message_listener_mod from google.net.proto2.python.internal import type_checkers from google.net.proto2.python.internal import wire_format from google.net.proto2.python.public import descriptor as descriptor_mod from google.net.proto2.python.public import message as message_mod from google.net.proto2.python.public import text_format _FieldDescriptor = descriptor_mod.FieldDescriptor def NewMessage(bases, descriptor, dictionary): _AddClassAttributesForNestedExtensions(descriptor, dictionary) _AddSlots(descriptor, dictionary) return bases def InitMessage(descriptor, cls): cls._decoders_by_tag = {} cls._extensions_by_name = {} cls._extensions_by_number = {} if (descriptor.has_options and descriptor.GetOptions().message_set_wire_format): cls._decoders_by_tag[decoder.MESSAGE_SET_ITEM_TAG] = ( decoder.MessageSetItemDecoder(cls._extensions_by_number), None) for field in descriptor.fields: _AttachFieldHelpers(cls, field) _AddEnumValues(descriptor, cls) _AddInitMethod(descriptor, cls) _AddPropertiesForFields(descriptor, cls) _AddPropertiesForExtensions(descriptor, cls) _AddStaticMethods(cls) _AddMessageMethods(descriptor, cls) _AddPrivateHelperMethods(descriptor, cls) copyreg.pickle(cls, lambda obj: (cls, (), obj.__getstate__())) def _PropertyName(proto_field_name): """Returns the name of the public property attribute which clients can use to get and (in some cases) set the value of a protocol message field. Args: proto_field_name: The protocol message field name, exactly as it appears (or would appear) in a .proto file. """ return proto_field_name def _VerifyExtensionHandle(message, extension_handle): """Verify that the given extension handle is valid.""" if not isinstance(extension_handle, _FieldDescriptor): raise KeyError('HasExtension() expects an extension handle, got: %s' % extension_handle) if not extension_handle.is_extension: raise KeyError('"%s" is not an extension.' % extension_handle.full_name) if not extension_handle.containing_type: raise KeyError('"%s" is missing a containing_type.' % extension_handle.full_name) if extension_handle.containing_type is not message.DESCRIPTOR: raise KeyError('Extension "%s" extends message type "%s", but this ' 'message is of type "%s".' % (extension_handle.full_name, extension_handle.containing_type.full_name, message.DESCRIPTOR.full_name)) def _AddSlots(message_descriptor, dictionary): """Adds a __slots__ entry to dictionary, containing the names of all valid attributes for this message type. Args: message_descriptor: A Descriptor instance describing this message type. dictionary: Class dictionary to which we'll add a '__slots__' entry. """ dictionary['__slots__'] = ['_cached_byte_size', '_cached_byte_size_dirty', '_fields', '_unknown_fields', '_is_present_in_parent', '_listener', '_listener_for_children', '__weakref__', '_oneofs'] def _IsMessageSetExtension(field): return (field.is_extension and field.containing_type.has_options and field.containing_type.GetOptions().message_set_wire_format and field.type == _FieldDescriptor.TYPE_MESSAGE and field.message_type == field.extension_scope and field.label == _FieldDescriptor.LABEL_OPTIONAL) def _AttachFieldHelpers(cls, field_descriptor): is_repeated = (field_descriptor.label == _FieldDescriptor.LABEL_REPEATED) is_packed = (field_descriptor.has_options and field_descriptor.GetOptions().packed) if _IsMessageSetExtension(field_descriptor): field_encoder = encoder.MessageSetItemEncoder(field_descriptor.number) sizer = encoder.MessageSetItemSizer(field_descriptor.number) else: field_encoder = type_checkers.TYPE_TO_ENCODER[field_descriptor.type]( field_descriptor.number, is_repeated, is_packed) sizer = type_checkers.TYPE_TO_SIZER[field_descriptor.type]( field_descriptor.number, is_repeated, is_packed) field_descriptor._encoder = field_encoder field_descriptor._sizer = sizer field_descriptor._default_constructor = _DefaultValueConstructorForField( field_descriptor) def AddDecoder(wiretype, is_packed): tag_bytes = encoder.TagBytes(field_descriptor.number, wiretype) cls._decoders_by_tag[tag_bytes] = ( type_checkers.TYPE_TO_DECODER[field_descriptor.type]( field_descriptor.number, is_repeated, is_packed, field_descriptor, field_descriptor._default_constructor), field_descriptor if field_descriptor.containing_oneof is not None else None) AddDecoder(type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type], False) if is_repeated and wire_format.IsTypePackable(field_descriptor.type): AddDecoder(wire_format.WIRETYPE_LENGTH_DELIMITED, True) def _AddClassAttributesForNestedExtensions(descriptor, dictionary): extension_dict = descriptor.extensions_by_name for extension_name, extension_field in six.iteritems(extension_dict): assert extension_name not in dictionary dictionary[extension_name] = extension_field def _AddEnumValues(descriptor, cls): """Sets class-level attributes for all enum fields defined in this message. Also exporting a class-level object that can name enum values. Args: descriptor: Descriptor object for this message type. cls: Class we're constructing for this message type. """ for enum_type in descriptor.enum_types: setattr(cls, enum_type.name, enum_type_wrapper.EnumTypeWrapper(enum_type)) for enum_value in enum_type.values: setattr(cls, enum_value.name, enum_value.number) def _DefaultValueConstructorForField(field): """Returns a function which returns a default value for a field. Args: field: FieldDescriptor object for this field. The returned function has one argument: message: Message instance containing this field, or a weakref proxy of same. That function in turn returns a default value for this field. The default value may refer back to |message| via a weak reference. """ if field.label == _FieldDescriptor.LABEL_REPEATED: if field.has_default_value and field.default_value != []: raise ValueError('Repeated field default value not empty list: %s' % ( field.default_value)) if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: message_type = field.message_type def MakeRepeatedMessageDefault(message): return containers.RepeatedCompositeFieldContainer( message._listener_for_children, field.message_type) return MakeRepeatedMessageDefault else: type_checker = type_checkers.GetTypeChecker(field) def MakeRepeatedScalarDefault(message): return containers.RepeatedScalarFieldContainer( message._listener_for_children, type_checker) return MakeRepeatedScalarDefault if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: message_type = field.message_type def MakeSubMessageDefault(message): result = message_type._concrete_class() result._SetListener(message._listener_for_children) return result return MakeSubMessageDefault def MakeScalarDefault(message): return field.default_value return MakeScalarDefault def _ReraiseTypeErrorWithFieldName(message_name, field_name): """Re-raise the currently-handled TypeError with the field name added.""" exc = sys.exc_info()[1] if len(exc.args) == 1 and type(exc) is TypeError: exc = TypeError('%s for field %s.%s' % (str(exc), message_name, field_name)) six.reraise(type(exc), exc, sys.exc_info()[2]) def _AddInitMethod(message_descriptor, cls): """Adds an __init__ method to cls.""" fields = message_descriptor.fields def init(self, **kwargs): self._cached_byte_size = 0 self._cached_byte_size_dirty = len(kwargs) > 0 self._fields = {} self._oneofs = {} self._unknown_fields = () self._is_present_in_parent = False self._listener = message_listener_mod.NullMessageListener() self._listener_for_children = _Listener(self) for field_name, field_value in six.iteritems(kwargs): field = _GetFieldByName(message_descriptor, field_name) if field is None: raise TypeError("%s() got an unexpected keyword argument '%s'" % (message_descriptor.name, field_name)) if field.label == _FieldDescriptor.LABEL_REPEATED: copy = field._default_constructor(self) if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: for val in field_value: copy.add().MergeFrom(val) else: copy.extend(field_value) self._fields[field] = copy elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: copy = field._default_constructor(self) try: copy.MergeFrom(field_value) except TypeError: _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name) self._fields[field] = copy else: try: setattr(self, field_name, field_value) except TypeError: _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name) init.__module__ = None init.__doc__ = None cls.__init__ = init def _GetFieldByName(message_descriptor, field_name): """Returns a field descriptor by field name. Args: message_descriptor: A Descriptor describing all fields in message. field_name: The name of the field to retrieve. Returns: The field descriptor associated with the field name. """ try: return message_descriptor.fields_by_name[field_name] except KeyError: raise ValueError('Protocol message has no "%s" field.' % field_name) def _AddPropertiesForFields(descriptor, cls): """Adds properties for all fields in this protocol message type.""" for field in descriptor.fields: _AddPropertiesForField(field, cls) if descriptor.is_extendable: cls.Extensions = property(lambda self: _ExtensionDict(self)) def _AddPropertiesForField(field, cls): """Adds a public property for a protocol message field. Clients can use this property to get and (in the case of non-repeated scalar fields) directly set the value of a protocol message field. Args: field: A FieldDescriptor for this field. cls: The class we're constructing. """ assert _FieldDescriptor.MAX_CPPTYPE == 10 constant_name = field.name.upper() + "_FIELD_NUMBER" setattr(cls, constant_name, field.number) if field.label == _FieldDescriptor.LABEL_REPEATED: _AddPropertiesForRepeatedField(field, cls) elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: _AddPropertiesForNonRepeatedCompositeField(field, cls) else: _AddPropertiesForNonRepeatedScalarField(field, cls) def _AddPropertiesForRepeatedField(field, cls): """Adds a public property for a "repeated" protocol message field. Clients can use this property to get the value of the field, which will be either a _RepeatedScalarFieldContainer or _RepeatedCompositeFieldContainer (see below). Note that when clients add values to these containers, we perform type-checking in the case of repeated scalar fields, and we also set any necessary "has" bits as a side-effect. Args: field: A FieldDescriptor for this field. cls: The class we're constructing. """ proto_field_name = field.name property_name = _PropertyName(proto_field_name) def getter(self): field_value = self._fields.get(field) if field_value is None: field_value = field._default_constructor(self) field_value = self._fields.setdefault(field, field_value) return field_value getter.__module__ = None getter.__doc__ = 'Getter for %s.' % proto_field_name def setter(self, new_value): raise AttributeError('Assignment not allowed to repeated field ' '"%s" in protocol message object.' % proto_field_name) doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name setattr(cls, property_name, property(getter, setter, doc=doc)) def _AddPropertiesForNonRepeatedScalarField(field, cls): """Adds a public property for a nonrepeated, scalar protocol message field. Clients can use this property to get and directly set the value of the field. Note that when the client sets the value of a field by using this property, all necessary "has" bits are set as a side-effect, and we also perform type-checking. Args: field: A FieldDescriptor for this field. cls: The class we're constructing. """ proto_field_name = field.name property_name = _PropertyName(proto_field_name) type_checker = type_checkers.GetTypeChecker(field) default_value = field.default_value valid_values = set() def getter(self): return self._fields.get(field, default_value) getter.__module__ = None getter.__doc__ = 'Getter for %s.' % proto_field_name def field_setter(self, new_value): self._fields[field] = type_checker.CheckValue(new_value) if not self._cached_byte_size_dirty: self._Modified() if field.containing_oneof is not None: def setter(self, new_value): field_setter(self, new_value) self._UpdateOneofState(field) else: setter = field_setter setter.__module__ = None setter.__doc__ = 'Setter for %s.' % proto_field_name doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name setattr(cls, property_name, property(getter, setter, doc=doc)) def _AddPropertiesForNonRepeatedCompositeField(field, cls): """Adds a public property for a nonrepeated, composite protocol message field. A composite field is a "group" or "message" field. Clients can use this property to get the value of the field, but cannot assign to the property directly. Args: field: A FieldDescriptor for this field. cls: The class we're constructing. """ proto_field_name = field.name property_name = _PropertyName(proto_field_name) message_type = field.message_type def getter(self): field_value = self._fields.get(field) if field_value is None: field_value = message_type._concrete_class() field_value._SetListener( _OneofListener(self, field) if field.containing_oneof is not None else self._listener_for_children) field_value = self._fields.setdefault(field, field_value) return field_value getter.__module__ = None getter.__doc__ = 'Getter for %s.' % proto_field_name def setter(self, new_value): raise AttributeError('Assignment not allowed to composite field ' '"%s" in protocol message object.' % proto_field_name) doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name setattr(cls, property_name, property(getter, setter, doc=doc)) def _AddPropertiesForExtensions(descriptor, cls): """Adds properties for all fields in this protocol message type.""" extension_dict = descriptor.extensions_by_name for extension_name, extension_field in six.iteritems(extension_dict): constant_name = extension_name.upper() + "_FIELD_NUMBER" setattr(cls, constant_name, extension_field.number) def _AddStaticMethods(cls): def RegisterExtension(extension_handle): extension_handle.containing_type = cls.DESCRIPTOR _AttachFieldHelpers(cls, extension_handle) actual_handle = cls._extensions_by_number.setdefault( extension_handle.number, extension_handle) if actual_handle is not extension_handle: raise AssertionError( 'Extensions "%s" and "%s" both try to extend message type "%s" with ' 'field number %d.' % (extension_handle.full_name, actual_handle.full_name, cls.DESCRIPTOR.full_name, extension_handle.number)) cls._extensions_by_name[extension_handle.full_name] = extension_handle handle = extension_handle if _IsMessageSetExtension(handle): cls._extensions_by_name[ extension_handle.message_type.full_name] = extension_handle cls.RegisterExtension = staticmethod(RegisterExtension) def FromString(s): message = cls() message.MergeFromString(s) return message cls.FromString = staticmethod(FromString) def _IsPresent(item): """Given a (FieldDescriptor, value) tuple from _fields, return true if the value should be included in the list returned by ListFields().""" if item[0].label == _FieldDescriptor.LABEL_REPEATED: return bool(item[1]) elif item[0].cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: return item[1]._is_present_in_parent else: return True def _AddListFieldsMethod(message_descriptor, cls): """Helper for _AddMessageMethods().""" def ListFields(self): all_fields = [item for item in six.iteritems(self._fields) if _IsPresent(item)] all_fields.sort(key = lambda item: item[0].number) return all_fields cls.ListFields = ListFields def _AddHasFieldMethod(message_descriptor, cls): """Helper for _AddMessageMethods().""" singular_fields = {} for field in message_descriptor.fields: if field.label != _FieldDescriptor.LABEL_REPEATED: singular_fields[field.name] = field for field in message_descriptor.oneofs: singular_fields[field.name] = field def HasField(self, field_name): try: field = singular_fields[field_name] except KeyError: raise ValueError( 'Protocol message has no singular "%s" field.' % field_name) if isinstance(field, descriptor_mod.OneofDescriptor): try: return HasField(self, self._oneofs[field].name) except KeyError: return False else: if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: value = self._fields.get(field) return value is not None and value._is_present_in_parent else: return field in self._fields cls.HasField = HasField def _AddClearFieldMethod(message_descriptor, cls): """Helper for _AddMessageMethods().""" def ClearField(self, field_name): try: field = message_descriptor.fields_by_name[field_name] except KeyError: try: field = message_descriptor.oneofs_by_name[field_name] if field in self._oneofs: field = self._oneofs[field] else: return except KeyError: raise ValueError('Protocol message has no "%s" field.' % field_name) if field in self._fields: del self._fields[field] if self._oneofs.get(field.containing_oneof, None) is field: del self._oneofs[field.containing_oneof] self._Modified() cls.ClearField = ClearField def _AddClearExtensionMethod(cls): """Helper for _AddMessageMethods().""" def ClearExtension(self, extension_handle): _VerifyExtensionHandle(self, extension_handle) if extension_handle in self._fields: del self._fields[extension_handle] self._Modified() cls.ClearExtension = ClearExtension def _AddClearMethod(message_descriptor, cls): """Helper for _AddMessageMethods().""" def
(self): self._fields = {} self._unknown_fields = () self._oneofs = {} self._Modified() cls.Clear = Clear def _AddHasExtensionMethod(cls): """Helper for _AddMessageMethods().""" def HasExtension(self, extension_handle): _VerifyExtensionHandle(self, extension_handle) if extension_handle.label == _FieldDescriptor.LABEL_REPEATED: raise KeyError('"%s" is repeated.' % extension_handle.full_name) if extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: value = self._fields.get(extension_handle) return value is not None and value._is_present_in_parent else: return extension_handle in self._fields cls.HasExtension = HasExtension def _AddEqualsMethod(message_descriptor, cls): """Helper for _AddMessageMethods().""" def __eq__(self, other): if (not isinstance(other, message_mod.Message) or other.DESCRIPTOR != self.DESCRIPTOR): return False if self is other: return True if not self.ListFields() == other.ListFields(): return False unknown_fields = list(self._unknown_fields) unknown_fields.sort() other_unknown_fields = list(other._unknown_fields) other_unknown_fields.sort() return unknown_fields == other_unknown_fields cls.__eq__ = __eq__ def _AddStrMethod(message_descriptor, cls): """Helper for _AddMessageMethods().""" def __str__(self): return text_format.MessageToString(self) cls.__str__ = __str__ def _AddUnicodeMethod(unused_message_descriptor, cls): """Helper for _AddMessageMethods().""" def __unicode__(self): return text_format.MessageToString(self, as_utf8=True).decode('utf-8') cls.__unicode__ = __unicode__ def _AddSetListenerMethod(cls): """Helper for _AddMessageMethods().""" def SetListener(self, listener): if listener is None: self._listener = message_listener_mod.NullMessageListener() else: self._listener = listener cls._SetListener = SetListener def _BytesForNonRepeatedElement(value, field_number, field_type): """Returns the number of bytes needed to serialize a non-repeated element. The returned byte count includes space for tag information and any other additional space associated with serializing value. Args: value: Value we're serializing. field_number: Field number of this value. (Since the field number is stored as part of a varint-encoded tag, this has an impact on the total bytes required to serialize the value). field_type: The type of the field. One of the TYPE_* constants within FieldDescriptor. """ try: fn = type_checkers.TYPE_TO_BYTE_SIZE_FN[field_type] return fn(field_number, value) except KeyError: raise message_mod.EncodeError('Unrecognized field type: %d' % field_type) def _AddByteSizeMethod(message_descriptor, cls): """Helper for _AddMessageMethods().""" def ByteSize(self): if not self._cached_byte_size_dirty: return self._cached_byte_size size = 0 for field_descriptor, field_value in self.ListFields(): size += field_descriptor._sizer(field_value) for tag_bytes, value_bytes in self._unknown_fields: size += len(tag_bytes) + len(value_bytes) self._cached_byte_size = size self._cached_byte_size_dirty = False self._listener_for_children.dirty = False return size cls.ByteSize = ByteSize def _AddSerializeToStringMethod(message_descriptor, cls): """Helper for _AddMessageMethods().""" def SerializeToString(self): errors = [] if not self.IsInitialized(): raise message_mod.EncodeError( 'Message %s is missing required fields: %s' % ( self.DESCRIPTOR.full_name, ','.join(self.FindInitializationErrors()))) return self.SerializePartialToString() cls.SerializeToString = SerializeToString def _AddSerializePartialToStringMethod(message_descriptor, cls): """Helper for _AddMessageMethods().""" def SerializePartialToString(self): out = BytesIO() self._InternalSerialize(out.write) return out.getvalue() cls.SerializePartialToString = SerializePartialToString def InternalSerialize(self, write_bytes): for field_descriptor, field_value in self.ListFields(): field_descriptor._encoder(write_bytes, field_value) for tag_bytes, value_bytes in self._unknown_fields: write_bytes(tag_bytes) write_bytes(value_bytes) cls._InternalSerialize = InternalSerialize def _AddMergeFromStringMethod(message_descriptor, cls): """Helper for _AddMessageMethods().""" def MergeFromString(self, serialized): length = len(serialized) try: if self._InternalParse(serialized, 0, length) != length: raise message_mod.DecodeError('Unexpected end-group tag.') except (IndexError, TypeError): raise message_mod.DecodeError('Truncated message.') except struct.error as e: raise message_mod.DecodeError(e) return length cls.MergeFromString = MergeFromString local_ReadTag = decoder.ReadTag local_SkipField = decoder.SkipField decoders_by_tag = cls._decoders_by_tag def InternalParse(self, buffer, pos, end): self._Modified() field_dict = self._fields unknown_field_list = self._unknown_fields while pos != end: (tag_bytes, new_pos) = local_ReadTag(buffer, pos) field_decoder, field_desc = decoders_by_tag.get(tag_bytes, (None, None)) if field_decoder is None: value_start_pos = new_pos new_pos = local_SkipField(buffer, new_pos, end, tag_bytes) if new_pos == -1: return pos if not unknown_field_list: unknown_field_list = self._unknown_fields = [] unknown_field_list.append((tag_bytes, buffer[value_start_pos:new_pos])) pos = new_pos else: pos = field_decoder(buffer, new_pos, end, self, field_dict) if field_desc: self._UpdateOneofState(field_desc) return pos cls._InternalParse = InternalParse def _AddIsInitializedMethod(message_descriptor, cls): """Adds the IsInitialized and FindInitializationError methods to the protocol message class.""" required_fields = [field for field in message_descriptor.fields if field.label == _FieldDescriptor.LABEL_REQUIRED] def IsInitialized(self, errors=None): """Checks if all required fields of a message are set. Args: errors: A list which, if provided, will be populated with the field paths of all missing required fields. Returns: True iff the specified message has all required fields set. """ for field in required_fields: if (field not in self._fields or (field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE and not self._fields[field]._is_present_in_parent)): if errors is not None: errors.extend(self.FindInitializationErrors()) return False for field, value in list(self._fields.items()): if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: if field.label == _FieldDescriptor.LABEL_REPEATED: for element in value: if not element.IsInitialized(): if errors is not None: errors.extend(self.FindInitializationErrors()) return False elif value._is_present_in_parent and not value.IsInitialized(): if errors is not None: errors.extend(self.FindInitializationErrors()) return False return True cls.IsInitialized = IsInitialized def FindInitializationErrors(self): """Finds required fields which are not initialized. Returns: A list of strings. Each string is a path to an uninitialized field from the top-level message, e.g. "foo.bar[5].baz". """ errors = [] for field in required_fields: if not self.HasField(field.name): errors.append(field.name) for field, value in self.ListFields(): if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: if field.is_extension: name = "(%s)" % field.full_name else: name = field.name if field.label == _FieldDescriptor.LABEL_REPEATED: for i in range(len(value)): element = value[i] prefix = "%s[%d]." % (name, i) sub_errors = element.FindInitializationErrors() errors += [ prefix + error for error in sub_errors ] else: prefix = name + "." sub_errors = value.FindInitializationErrors() errors += [ prefix + error for error in sub_errors ] return errors cls.FindInitializationErrors = FindInitializationErrors def _AddMergeFromMethod(cls): LABEL_REPEATED = _FieldDescriptor.LABEL_REPEATED CPPTYPE_MESSAGE = _FieldDescriptor.CPPTYPE_MESSAGE def MergeFrom(self, msg): if not isinstance(msg, cls): raise TypeError( "Parameter to MergeFrom() must be instance of same class: " "expected %s got %s." % (cls.__name__, type(msg).__name__)) assert msg is not self self._Modified() fields = self._fields for field, value in six.iteritems(msg._fields): if field.label == LABEL_REPEATED: field_value = fields.get(field) if field_value is None: field_value = field._default_constructor(self) fields[field] = field_value field_value.MergeFrom(value) elif field.cpp_type == CPPTYPE_MESSAGE: if value._is_present_in_parent: field_value = fields.get(field) if field_value is None: field_value = field._default_constructor(self) fields[field] = field_value field_value.MergeFrom(value) else: self._fields[field] = value if msg._unknown_fields: if not self._unknown_fields: self._unknown_fields = [] self._unknown_fields.extend(msg._unknown_fields) cls.MergeFrom = MergeFrom def _AddWhichOneofMethod(message_descriptor, cls): def WhichOneof(self, oneof_name): """Returns the name of the currently set field inside a oneof, or None.""" try: field = message_descriptor.oneofs_by_name[oneof_name] except KeyError: raise ValueError( 'Protocol message has no oneof "%s" field.' % oneof_name) nested_field = self._oneofs.get(field, None) if nested_field is not None and self.HasField(nested_field.name): return nested_field.name else: return None cls.WhichOneof = WhichOneof def _AddMessageMethods(message_descriptor, cls): """Adds implementations of all Message methods to cls.""" _AddListFieldsMethod(message_descriptor, cls) _AddHasFieldMethod(message_descriptor, cls) _AddClearFieldMethod(message_descriptor, cls) if message_descriptor.is_extendable: _AddClearExtensionMethod(cls) _AddHasExtensionMethod(cls) _AddClearMethod(message_descriptor, cls) _AddEqualsMethod(message_descriptor, cls) _AddStrMethod(message_descriptor, cls) _AddUnicodeMethod(message_descriptor, cls) _AddSetListenerMethod(cls) _AddByteSizeMethod(message_descriptor, cls) _AddSerializeToStringMethod(message_descriptor, cls) _AddSerializePartialToStringMethod(message_descriptor, cls) _AddMergeFromStringMethod(message_descriptor, cls) _AddIsInitializedMethod(message_descriptor, cls) _AddMergeFromMethod(cls) _AddWhichOneofMethod(message_descriptor, cls) def _AddPrivateHelperMethods(message_descriptor, cls): """Adds implementation of private helper methods to cls.""" def Modified(self): """Sets the _cached_byte_size_dirty bit to true, and propagates this to our listener iff this was a state change. """ if not self._cached_byte_size_dirty: self._cached_byte_size_dirty = True self._listener_for_children.dirty = True self._is_present_in_parent = True self._listener.Modified() def _UpdateOneofState(self, field): """Sets field as the active field in its containing oneof. Will also delete currently active field in the oneof, if it is different from the argument. Does not mark the message as modified. """ other_field = self._oneofs.setdefault(field.containing_oneof, field) if other_field is not field: del self._fields[other_field] self._oneofs[field.containing_oneof] = field cls._Modified = Modified cls.SetInParent = Modified cls._UpdateOneofState = _UpdateOneofState class _Listener(object): """MessageListener implementation that a parent message registers with its child message. In order to support semantics like: foo.bar.baz.qux = 23 assert foo.HasField('bar') ...child objects must have back references to their parents. This helper class is at the heart of this support. """ def __init__(self, parent_message): """Args: parent_message: The message whose _Modified() method we should call when we receive Modified() messages. """ if isinstance(parent_message, weakref.ProxyType): self._parent_message_weakref = parent_message else: self._parent_message_weakref = weakref.proxy(parent_message) self.dirty = False def Modified(self): if self.dirty: return try: self._parent_message_weakref._Modified() except ReferenceError: pass class _OneofListener(_Listener): """Special listener implementation for setting composite oneof fields.""" def __init__(self, parent_message, field): """Args: parent_message: The message whose _Modified() method we should call when we receive Modified() messages. field: The descriptor of the field being set in the parent message. """ super(_OneofListener, self).__init__(parent_message) self._field = field def Modified(self): """Also updates the state of the containing oneof in the parent message.""" try: self._parent_message_weakref._UpdateOneofState(self._field) super(_OneofListener, self).Modified() except ReferenceError: pass class _ExtensionDict(object): """Dict-like container for supporting an indexable "Extensions" field on proto instances. Note that in all cases we expect extension handles to be FieldDescriptors. """ def __init__(self, extended_message): """extended_message: Message instance for which we are the Extensions dict. """ self._extended_message = extended_message def __getitem__(self, extension_handle): """Returns the current value of the given extension handle.""" _VerifyExtensionHandle(self._extended_message, extension_handle) result = self._extended_message._fields.get(extension_handle) if result is not None: return result if extension_handle.label == _FieldDescriptor.LABEL_REPEATED: result = extension_handle._default_constructor(self._extended_message) elif extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: result = extension_handle.message_type._concrete_class() try: result._SetListener(self._extended_message._listener_for_children) except ReferenceError: pass else: return extension_handle.default_value result = self._extended_message._fields.setdefault( extension_handle, result) return result def __eq__(self, other): if not isinstance(other, self.__class__): return False my_fields = self._extended_message.ListFields() other_fields = other._extended_message.ListFields() my_fields = [ field for field in my_fields if field.is_extension ] other_fields = [ field for field in other_fields if field.is_extension ] return my_fields == other_fields def __ne__(self, other): return not self == other def __hash__(self): raise TypeError('unhashable object') def __setitem__(self, extension_handle, value): """If extension_handle specifies a non-repeated, scalar extension field, sets the value of that field. """ _VerifyExtensionHandle(self._extended_message, extension_handle) if (extension_handle.label == _FieldDescriptor.LABEL_REPEATED or extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE): raise TypeError( 'Cannot assign to extension "%s" because it is a repeated or ' 'composite type.' % extension_handle.full_name) type_checker = type_checkers.GetTypeChecker( extension_handle) self._extended_message._fields[extension_handle] = ( type_checker.CheckValue(value)) self._extended_message._Modified() def _FindExtensionByName(self, name): """Tries to find a known extension with the specified name. Args: name: Extension full name. Returns: Extension field descriptor. """ return self._extended_message._extensions_by_name.get(name, None)
Clear
model.py
import keras from keras.layers import Activation from keras.layers import Conv2D, BatchNormalization, Dense, Flatten, Reshape def
(): model = keras.models.Sequential() model.add(Conv2D(64, kernel_size=(3,3), activation='relu', padding='same', input_shape=(9,9,1))) model.add(BatchNormalization()) model.add(Conv2D(64, kernel_size=(3,3), activation='relu', padding='same')) model.add(BatchNormalization()) model.add(Conv2D(128, kernel_size=(1,1), activation='relu', padding='same')) model.add(Flatten()) model.add(Dense(81*9)) model.add(Reshape((-1, 9))) model.add(Activation('softmax')) return model
get_model
Chapter.ts
import { ScanSource } from './ScanSource'; import { Page } from './Page'; export interface Chapter { id: string; name: string; number: number; link: string; scanned: boolean;
source: ScanSource; createDate: Date; pages: Page[]; }
global.go
package log import ( "context" "fmt" "os" "sync" ) // globalLogger is designed as a global logger in current process. var global = &loggerAppliance{} // loggerAppliance is the proxy of `Logger` to // make logger change will affect all sub-logger. type loggerAppliance struct { lock sync.Mutex Logger } func init() { global.SetLogger(DefaultLogger) } func (a *loggerAppliance) SetLogger(in Logger) { a.lock.Lock() defer a.lock.Unlock() a.Logger = in } func (a *loggerAppliance) GetLogger() Logger { return a.Logger } // SetLogger should be called before any other log call. // And it is NOT THREAD SAFE. func SetLogger(logger Logger) { global.SetLogger(logger) } // GetLogger returns global logger appliance as logger in current process. func GetLogger() Logger { return global.GetLogger() } // Log Print log by level and keyvals. func Log(level Level, keyvals ...interface{}) { _ = global.Log(level, keyvals...) } // Context with context logger. func Context(ctx context.Context) *Helper { return NewHelper(WithContext(ctx, global.Logger)) } // Debug logs a message at debug level. func Debug(a ...interface{}) { _ = global.Log(LevelDebug, DefaultMessageKey, fmt.Sprint(a...)) } // Debugf logs a message at debug level. func Debugf(format string, a ...interface{}) { _ = global.Log(LevelDebug, DefaultMessageKey, fmt.Sprintf(format, a...)) } // Debugw logs a message at debug level. func Debugw(keyvals ...interface{}) { _ = global.Log(LevelDebug, keyvals...) } // Info logs a message at info level. func Info(a ...interface{}) { _ = global.Log(LevelInfo, DefaultMessageKey, fmt.Sprint(a...)) } // Infof logs a message at info level. func Infof(format string, a ...interface{}) { _ = global.Log(LevelInfo, DefaultMessageKey, fmt.Sprintf(format, a...)) } // Infow logs a message at info level. func Infow(keyvals ...interface{}) { _ = global.Log(LevelInfo, keyvals...) } // Warn logs a message at warn level. func Warn(a ...interface{}) { _ = global.Log(LevelWarn, DefaultMessageKey, fmt.Sprint(a...)) }
// Warnw logs a message at warnf level. func Warnw(keyvals ...interface{}) { _ = global.Log(LevelWarn, keyvals...) } // Error logs a message at error level. func Error(a ...interface{}) { _ = global.Log(LevelError, DefaultMessageKey, fmt.Sprint(a...)) } // Errorf logs a message at error level. func Errorf(format string, a ...interface{}) { _ = global.Log(LevelError, DefaultMessageKey, fmt.Sprintf(format, a...)) } // Errorw logs a message at error level. func Errorw(keyvals ...interface{}) { _ = global.Log(LevelError, keyvals...) } // Fatal logs a message at fatal level. func Fatal(a ...interface{}) { _ = global.Log(LevelFatal, DefaultMessageKey, fmt.Sprint(a...)) os.Exit(1) } // Fatalf logs a message at fatal level. func Fatalf(format string, a ...interface{}) { _ = global.Log(LevelFatal, DefaultMessageKey, fmt.Sprintf(format, a...)) os.Exit(1) } // Fatalw logs a message at fatal level. func Fatalw(keyvals ...interface{}) { _ = global.Log(LevelFatal, keyvals...) os.Exit(1) }
// Warnf logs a message at warnf level. func Warnf(format string, a ...interface{}) { _ = global.Log(LevelWarn, DefaultMessageKey, fmt.Sprintf(format, a...)) }
im_processing.py
from __future__ import absolute_import, division, print_function import skimage.transform import numpy as np def
(bboxes, height, width): bboxes = np.maximum(bboxes, 0) bboxes[:, 2:4] = np.maximum(bboxes[:, 0:2], bboxes[:, 2:4]) bboxes[:, 0] = np.minimum(bboxes[:, 0], width-1) bboxes[:, 1] = np.minimum(bboxes[:, 1], height-1) bboxes[:, 2] = np.minimum(bboxes[:, 2], width-1) bboxes[:, 3] = np.minimum(bboxes[:, 3], height-1) return bboxes def resize_and_pad(im, input_h, input_w): # Resize and pad im to input_h x input_w size im_h, im_w = im.shape[:2] scale = min(input_h / im_h, input_w / im_w) resized_h = int(np.round(im_h * scale)) resized_w = int(np.round(im_w * scale)) pad_h = int(np.floor(input_h - resized_h) / 2) pad_w = int(np.floor(input_w - resized_w) / 2) resized_im = skimage.transform.resize(im, [resized_h, resized_w]) if im.ndim > 2: new_im = np.zeros((input_h, input_w, im.shape[2]), dtype=resized_im.dtype) else: new_im = np.zeros((input_h, input_w), dtype=resized_im.dtype) new_im[pad_h:pad_h+resized_h, pad_w:pad_w+resized_w, ...] = resized_im return new_im def resize_and_crop(im, input_h, input_w): # Resize and crop im to input_h x input_w size im_h, im_w = im.shape[:2] scale = max(input_h / im_h, input_w / im_w) resized_h = int(np.round(im_h * scale)) resized_w = int(np.round(im_w * scale)) crop_h = int(np.floor(resized_h - input_h) / 2) crop_w = int(np.floor(resized_w - input_w) / 2) resized_im = skimage.transform.resize(im, [resized_h, resized_w]) if im.ndim > 2: new_im = np.zeros((input_h, input_w, im.shape[2]), dtype=resized_im.dtype) else: new_im = np.zeros((input_h, input_w), dtype=resized_im.dtype) new_im[...] = resized_im[crop_h:crop_h+input_h, crop_w:crop_w+input_w, ...] return new_im def crop_bboxes_subtract_mean(im, bboxes, crop_size, image_mean): if isinstance(bboxes, list): bboxes = np.array(bboxes) bboxes = bboxes.reshape((-1, 4)) im = skimage.img_as_ubyte(im) num_bbox = bboxes.shape[0] imcrop_batch = np.zeros((num_bbox, crop_size, crop_size, 3), dtype=np.float32) for n_bbox in range(bboxes.shape[0]): xmin, ymin, xmax, ymax = bboxes[n_bbox] # crop and resize imcrop = im[ymin:ymax+1, xmin:xmax+1, :] imcrop_batch[n_bbox, ...] = skimage.img_as_ubyte( skimage.transform.resize(imcrop, [crop_size, crop_size])) imcrop_batch -= image_mean return imcrop_batch def bboxes_from_masks(masks): if masks.ndim == 2: masks = masks[np.newaxis, ...] num_mask = masks.shape[0] bboxes = np.zeros((num_mask, 4), dtype=np.int32) for n_mask in range(num_mask): idx = np.nonzero(masks[n_mask]) xmin, xmax = np.min(idx[1]), np.max(idx[1]) ymin, ymax = np.min(idx[0]), np.max(idx[0]) bboxes[n_mask, :] = [xmin, ymin, xmax, ymax] return bboxes def crop_masks_subtract_mean(im, masks, crop_size, image_mean): if masks.ndim == 2: masks = masks[np.newaxis, ...] num_mask = masks.shape[0] im = skimage.img_as_ubyte(im) bboxes = bboxes_from_masks(masks) imcrop_batch = np.zeros((num_mask, crop_size, crop_size, 3), dtype=np.float32) for n_mask in range(num_mask): xmin, ymin, xmax, ymax = bboxes[n_mask] # crop and resize im_masked = im.copy() mask = masks[n_mask, ..., np.newaxis] im_masked *= mask im_masked += image_mean.astype(np.uint8) * (1 - mask) imcrop = im_masked[ymin:ymax+1, xmin:xmax+1, :] imcrop_batch[n_mask, ...] = skimage.img_as_ubyte(skimage.transform.resize(imcrop, [224, 224])) imcrop_batch -= image_mean return imcrop_batch
rectify_bboxes
lib.rs
extern crate one_graph_gremlin; extern crate log; extern crate tokio_tungstenite; extern crate tokio; extern crate tungstenite; extern crate futures_util; extern crate serde_json; use futures_util::{ SinkExt, StreamExt, }; use tungstenite::Message; use std::sync::RwLock; use std::sync::Arc; use log::*; use std::net::SocketAddr; use tokio::net::{TcpListener, TcpStream}; use tokio_tungstenite::{accept_async, tungstenite::Error}; use simple_logger::SimpleLogger; use serde_json::Value; use std::result::Result; use self::json_gremlin_request_handler::*; mod result; mod json_gremlin_request_handler; use one_graph_gremlin::gremlin_engine::steps::gremlin_state::GremlinStateError; use self::result::ServerError; use one_graph_core::model::init::InitContext; use one_graph_gremlin::gremlin_engine::GremlinDatabaseEngine; async fn
<'a>(peer: SocketAddr, graph_engine: Arc<RwLock<GremlinDatabaseEngine<'a>>>, stream: TcpStream) { if let Err(e) = handle_connection(peer, graph_engine, stream).await { match e { ServerError::WebsocketError(te) => match te { Error::ConnectionClosed | Error::Protocol(_) | Error::Utf8 => (), err => error!("Error processing connection: {}", err), }, ServerError::ParsingError(err_msg) => error!("Parsing error: {}", err_msg), ServerError::HeaderError => error!("wrong header"), ServerError::GremlinError => error!("parsing gremlin request"), ServerError::DatabaseError(db_err) => match db_err { one_graph_gremlin::gremlin_engine::DatabaseError::GremlinError(g_err) => match g_err { GremlinStateError::Invalid(step) => error!("invalid gremlin state {:?}", step), GremlinStateError::WrongContext(err) => error!("wrong gremlin context {}", err), }, one_graph_gremlin::gremlin_engine::DatabaseError::EngineError => error!("database engine error"), one_graph_gremlin::gremlin_engine::DatabaseError::ResponseError => error!("build gremlin response error"), one_graph_gremlin::gremlin_engine::DatabaseError::RequestError => error!("gremlin request error"), }, } } } async fn handle_connection<'a>(peer: SocketAddr, graph_engine: Arc<RwLock<GremlinDatabaseEngine<'a>>>, stream: TcpStream) -> Result<(), ServerError> { let ws_stream = accept_async(stream).await.expect("Failed to accept"); info!("New WebSocket connection: {}", peer); let (mut ws_sender, mut ws_receiver) = ws_stream.split(); let mut msg_fut = ws_receiver.next(); loop { match msg_fut.await { Some(msg) => { let msg = msg.map_err(ServerError::WebsocketError)?; if msg.is_binary() { let text_msg = msg.to_text().map_err(ServerError::WebsocketError)?; let json_msg = text_msg.strip_prefix("!application/vnd.gremlin-v3.0+json").ok_or(ServerError::HeaderError)?; let v: Value = serde_json::from_str(json_msg).map_err(|err| ServerError::ParsingError(err.to_string()))?; let gremlin_reply = handle_gremlin_json_request(graph_engine.clone(), &v).map_err(|err| ServerError::DatabaseError(err))?; let res_msg = serde_json::to_string(&gremlin_reply).map_err(|err| ServerError::ParsingError(err.to_string()))?; let mut with_prefix = String::from("application/vnd.gremlin-v3.0+json"); with_prefix.push_str(&res_msg); debug!("response msg: {}", res_msg); let response = Message::Text(res_msg); ws_sender.send(response).await.map_err(ServerError::WebsocketError)?; } else if msg.is_close() { break; } msg_fut = ws_receiver.next(); // Receive next WebSocket message. } None => break, // WebSocket stream terminated. } } Ok(()) } pub async fn run_server(addr: &str, conf: InitContext<'static>) { SimpleLogger::new().init().unwrap(); let graph_engine = Arc::new(RwLock::new(GremlinDatabaseEngine::new(conf))); let listener = TcpListener::bind(&addr).await.expect("Can't listen"); info!("Listening on: {}", addr); while let Ok((stream, _)) = listener.accept().await { let peer = stream.peer_addr().expect("connected streams should have a peer address"); info!("Peer address: {}", peer); tokio::spawn(accept_connection(peer, graph_engine.clone(), stream)); } }
accept_connection
sd_PK_test.go
package sd_PK import ( "testing" "time" "github.com/DeineAgenturUG/locales" "github.com/DeineAgenturUG/locales/currency" ) func TestLocale(t *testing.T) { trans := New() expected := "sd_PK" if trans.Locale() != expected { t.Errorf("Expected '%s' Got '%s'", expected, trans.Locale()) } } func TestPluralsRange(t *testing.T) { trans := New() tests := []struct { expected locales.PluralRule }{ // { // expected: locales.PluralRuleOther, // }, } rules := trans.PluralsRange() // expected := 1 // if len(rules) != expected { // t.Errorf("Expected '%d' Got '%d'", expected, len(rules)) // } for _, tt := range tests { r := locales.PluralRuleUnknown for i := 0; i < len(rules); i++ { if rules[i] == tt.expected { r = rules[i] break } } if r == locales.PluralRuleUnknown { t.Errorf("Expected '%s' Got '%s'", tt.expected, r) } } } func TestPluralsOrdinal(t *testing.T) { trans := New() tests := []struct { expected locales.PluralRule }{ // { // expected: locales.PluralRuleOne, // }, // { // expected: locales.PluralRuleTwo, // }, // { // expected: locales.PluralRuleFew, // }, // { // expected: locales.PluralRuleOther, // }, } rules := trans.PluralsOrdinal() // expected := 4 // if len(rules) != expected { // t.Errorf("Expected '%d' Got '%d'", expected, len(rules)) // } for _, tt := range tests { r := locales.PluralRuleUnknown for i := 0; i < len(rules); i++ { if rules[i] == tt.expected { r = rules[i] break } } if r == locales.PluralRuleUnknown { t.Errorf("Expected '%s' Got '%s'", tt.expected, r) } } } func TestPluralsCardinal(t *testing.T) { trans := New() tests := []struct { expected locales.PluralRule }{ // { // expected: locales.PluralRuleOne, // }, // { // expected: locales.PluralRuleOther, // }, } rules := trans.PluralsCardinal() // expected := 2 // if len(rules) != expected { // t.Errorf("Expected '%d' Got '%d'", expected, len(rules)) // } for _, tt := range tests { r := locales.PluralRuleUnknown for i := 0; i < len(rules); i++ { if rules[i] == tt.expected { r = rules[i] break } } if r == locales.PluralRuleUnknown { t.Errorf("Expected '%s' Got '%s'", tt.expected, r) } } } func TestRangePlurals(t *testing.T) { trans := New() tests := []struct { num1 float64 v1 uint64 num2 float64 v2 uint64 expected locales.PluralRule }{ // { // num1: 1, // v1: 1, // num2: 2, // v2: 2, // expected: locales.PluralRuleOther, // }, } for _, tt := range tests { rule := trans.RangePluralRule(tt.num1, tt.v1, tt.num2, tt.v2) if rule != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, rule) } } } func TestOrdinalPlurals(t *testing.T) { trans := New() tests := []struct { num float64 v uint64 expected locales.PluralRule }{ // { // num: 1, // v: 0, // expected: locales.PluralRuleOne, // }, // { // num: 2, // v: 0, // expected: locales.PluralRuleTwo, // }, // { // num: 3, // v: 0, // expected: locales.PluralRuleFew, // }, // { // num: 4, // v: 0, // expected: locales.PluralRuleOther, // }, } for _, tt := range tests { rule := trans.OrdinalPluralRule(tt.num, tt.v) if rule != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, rule) } } } func TestCardinalPlurals(t *testing.T) { trans := New() tests := []struct { num float64 v uint64 expected locales.PluralRule }{ // { // num: 1, // v: 0, // expected: locales.PluralRuleOne, // }, // { // num: 4, // v: 0, // expected: locales.PluralRuleOther, // }, } for _, tt := range tests { rule := trans.CardinalPluralRule(tt.num, tt.v) if rule != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, rule) } } } func TestDaysAbbreviated(t *testing.T) { trans := New() days := trans.WeekdaysAbbreviated() for i, day := range days { s := trans.WeekdayAbbreviated(time.Weekday(i)) if s != day { t.Errorf("Expected '%s' Got '%s'", day, s) } } tests := []struct { idx int expected string }{ // { // idx: 0, // expected: "Sun", // }, // { // idx: 1, // expected: "Mon", // }, // { // idx: 2, // expected: "Tue", // }, // { // idx: 3, // expected: "Wed", // }, // { // idx: 4, // expected: "Thu", // }, // { // idx: 5, // expected: "Fri", // }, // { // idx: 6, // expected: "Sat", // }, } for _, tt := range tests { s := trans.WeekdayAbbreviated(time.Weekday(tt.idx)) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestDaysNarrow(t *testing.T) { trans := New() days := trans.WeekdaysNarrow() for i, day := range days { s := trans.WeekdayNarrow(time.Weekday(i)) if s != day { t.Errorf("Expected '%s' Got '%s'", string(day), s) } } tests := []struct { idx int expected string }{ // { // idx: 0, // expected: "S", // }, // { // idx: 1, // expected: "M", // }, // { // idx: 2, // expected: "T", // }, // { // idx: 3, // expected: "W", // }, // { // idx: 4, // expected: "T", // }, // { // idx: 5, // expected: "F", // }, // { // idx: 6, // expected: "S", // }, } for _, tt := range tests { s := trans.WeekdayNarrow(time.Weekday(tt.idx)) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestDaysShort(t *testing.T)
func TestDaysWide(t *testing.T) { trans := New() days := trans.WeekdaysWide() for i, day := range days { s := trans.WeekdayWide(time.Weekday(i)) if s != day { t.Errorf("Expected '%s' Got '%s'", day, s) } } tests := []struct { idx int expected string }{ // { // idx: 0, // expected: "Sunday", // }, // { // idx: 1, // expected: "Monday", // }, // { // idx: 2, // expected: "Tuesday", // }, // { // idx: 3, // expected: "Wednesday", // }, // { // idx: 4, // expected: "Thursday", // }, // { // idx: 5, // expected: "Friday", // }, // { // idx: 6, // expected: "Saturday", // }, } for _, tt := range tests { s := trans.WeekdayWide(time.Weekday(tt.idx)) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestMonthsAbbreviated(t *testing.T) { trans := New() months := trans.MonthsAbbreviated() for i, month := range months { s := trans.MonthAbbreviated(time.Month(i + 1)) if s != month { t.Errorf("Expected '%s' Got '%s'", month, s) } } tests := []struct { idx int expected string }{ // { // idx: 1, // expected: "Jan", // }, // { // idx: 2, // expected: "Feb", // }, // { // idx: 3, // expected: "Mar", // }, // { // idx: 4, // expected: "Apr", // }, // { // idx: 5, // expected: "May", // }, // { // idx: 6, // expected: "Jun", // }, // { // idx: 7, // expected: "Jul", // }, // { // idx: 8, // expected: "Aug", // }, // { // idx: 9, // expected: "Sep", // }, // { // idx: 10, // expected: "Oct", // }, // { // idx: 11, // expected: "Nov", // }, // { // idx: 12, // expected: "Dec", // }, } for _, tt := range tests { s := trans.MonthAbbreviated(time.Month(tt.idx)) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestMonthsNarrow(t *testing.T) { trans := New() months := trans.MonthsNarrow() for i, month := range months { s := trans.MonthNarrow(time.Month(i + 1)) if s != month { t.Errorf("Expected '%s' Got '%s'", month, s) } } tests := []struct { idx int expected string }{ // { // idx: 1, // expected: "J", // }, // { // idx: 2, // expected: "F", // }, // { // idx: 3, // expected: "M", // }, // { // idx: 4, // expected: "A", // }, // { // idx: 5, // expected: "M", // }, // { // idx: 6, // expected: "J", // }, // { // idx: 7, // expected: "J", // }, // { // idx: 8, // expected: "A", // }, // { // idx: 9, // expected: "S", // }, // { // idx: 10, // expected: "O", // }, // { // idx: 11, // expected: "N", // }, // { // idx: 12, // expected: "D", // }, } for _, tt := range tests { s := trans.MonthNarrow(time.Month(tt.idx)) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestMonthsWide(t *testing.T) { trans := New() months := trans.MonthsWide() for i, month := range months { s := trans.MonthWide(time.Month(i + 1)) if s != month { t.Errorf("Expected '%s' Got '%s'", month, s) } } tests := []struct { idx int expected string }{ // { // idx: 1, // expected: "January", // }, // { // idx: 2, // expected: "February", // }, // { // idx: 3, // expected: "March", // }, // { // idx: 4, // expected: "April", // }, // { // idx: 5, // expected: "May", // }, // { // idx: 6, // expected: "June", // }, // { // idx: 7, // expected: "July", // }, // { // idx: 8, // expected: "August", // }, // { // idx: 9, // expected: "September", // }, // { // idx: 10, // expected: "October", // }, // { // idx: 11, // expected: "November", // }, // { // idx: 12, // expected: "December", // }, } for _, tt := range tests { s := string(trans.MonthWide(time.Month(tt.idx))) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtTimeFull(t *testing.T) { // loc, err := time.LoadLocation("America/Toronto") // if err != nil { // t.Errorf("Expected '<nil>' Got '%s'", err) // } // fixed := time.FixedZone("OTHER", -4) tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 5, 1, 0, loc), // expected: "9:05:01 am Eastern Standard Time", // }, // { // t: time.Date(2016, 02, 03, 20, 5, 1, 0, fixed), // expected: "8:05:01 pm OTHER", // }, } trans := New() for _, tt := range tests { s := trans.FmtTimeFull(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtTimeLong(t *testing.T) { // loc, err := time.LoadLocation("America/Toronto") // if err != nil { // t.Errorf("Expected '<nil>' Got '%s'", err) // } tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 5, 1, 0, loc), // expected: "9:05:01 am EST", // }, // { // t: time.Date(2016, 02, 03, 20, 5, 1, 0, loc), // expected: "8:05:01 pm EST", // }, } trans := New() for _, tt := range tests { s := trans.FmtTimeLong(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtTimeMedium(t *testing.T) { tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 5, 1, 0, time.UTC), // expected: "9:05:01 am", // }, // { // t: time.Date(2016, 02, 03, 20, 5, 1, 0, time.UTC), // expected: "8:05:01 pm", // }, } trans := New() for _, tt := range tests { s := trans.FmtTimeMedium(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtTimeShort(t *testing.T) { tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 5, 1, 0, time.UTC), // expected: "9:05 am", // }, // { // t: time.Date(2016, 02, 03, 20, 5, 1, 0, time.UTC), // expected: "8:05 pm", // }, } trans := New() for _, tt := range tests { s := trans.FmtTimeShort(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtDateFull(t *testing.T) { tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 0, 1, 0, time.UTC), // expected: "Wednesday, February 3, 2016", // }, } trans := New() for _, tt := range tests { s := trans.FmtDateFull(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtDateLong(t *testing.T) { tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 0, 1, 0, time.UTC), // expected: "February 3, 2016", // }, } trans := New() for _, tt := range tests { s := trans.FmtDateLong(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtDateMedium(t *testing.T) { tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 0, 1, 0, time.UTC), // expected: "Feb 3, 2016", // }, } trans := New() for _, tt := range tests { s := trans.FmtDateMedium(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtDateShort(t *testing.T) { tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 0, 1, 0, time.UTC), // expected: "2/3/16", // }, // { // t: time.Date(-500, 02, 03, 9, 0, 1, 0, time.UTC), // expected: "2/3/500", // }, } trans := New() for _, tt := range tests { s := trans.FmtDateShort(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtNumber(t *testing.T) { tests := []struct { num float64 v uint64 expected string }{ // { // num: 1123456.5643, // v: 2, // expected: "1,123,456.56", // }, // { // num: 1123456.5643, // v: 1, // expected: "1,123,456.6", // }, // { // num: 221123456.5643, // v: 3, // expected: "221,123,456.564", // }, // { // num: -221123456.5643, // v: 3, // expected: "-221,123,456.564", // }, // { // num: -221123456.5643, // v: 3, // expected: "-221,123,456.564", // }, // { // num: 0, // v: 2, // expected: "0.00", // }, // { // num: -0, // v: 2, // expected: "0.00", // }, // { // num: -0, // v: 2, // expected: "0.00", // }, } trans := New() for _, tt := range tests { s := trans.FmtNumber(tt.num, tt.v) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtCurrency(t *testing.T) { tests := []struct { num float64 v uint64 currency currency.Type expected string }{ // { // num: 1123456.5643, // v: 2, // currency: currency.USD, // expected: "$1,123,456.56", // }, // { // num: 1123456.5643, // v: 1, // currency: currency.USD, // expected: "$1,123,456.60", // }, // { // num: 221123456.5643, // v: 3, // currency: currency.USD, // expected: "$221,123,456.564", // }, // { // num: -221123456.5643, // v: 3, // currency: currency.USD, // expected: "-$221,123,456.564", // }, // { // num: -221123456.5643, // v: 3, // currency: currency.CAD, // expected: "-CAD 221,123,456.564", // }, // { // num: 0, // v: 2, // currency: currency.USD, // expected: "$0.00", // }, // { // num: -0, // v: 2, // currency: currency.USD, // expected: "$0.00", // }, // { // num: -0, // v: 2, // currency: currency.CAD, // expected: "CAD 0.00", // }, // { // num: 1.23, // v: 0, // currency: currency.USD, // expected: "$1.00", // }, } trans := New() for _, tt := range tests { s := trans.FmtCurrency(tt.num, tt.v, tt.currency) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtAccounting(t *testing.T) { tests := []struct { num float64 v uint64 currency currency.Type expected string }{ // { // num: 1123456.5643, // v: 2, // currency: currency.USD, // expected: "$1,123,456.56", // }, // { // num: 1123456.5643, // v: 1, // currency: currency.USD, // expected: "$1,123,456.60", // }, // { // num: 221123456.5643, // v: 3, // currency: currency.USD, // expected: "$221,123,456.564", // }, // { // num: -221123456.5643, // v: 3, // currency: currency.USD, // expected: "($221,123,456.564)", // }, // { // num: -221123456.5643, // v: 3, // currency: currency.CAD, // expected: "(CAD 221,123,456.564)", // }, // { // num: -0, // v: 2, // currency: currency.USD, // expected: "$0.00", // }, // { // num: -0, // v: 2, // currency: currency.CAD, // expected: "CAD 0.00", // }, // { // num: 1.23, // v: 0, // currency: currency.USD, // expected: "$1.00", // }, } trans := New() for _, tt := range tests { s := trans.FmtAccounting(tt.num, tt.v, tt.currency) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtPercent(t *testing.T) { tests := []struct { num float64 v uint64 expected string }{ // { // num: 15, // v: 0, // expected: "15%", // }, // { // num: 15, // v: 2, // expected: "15.00%", // }, // { // num: 434.45, // v: 0, // expected: "434%", // }, // { // num: 34.4, // v: 2, // expected: "34.40%", // }, // { // num: -34, // v: 0, // expected: "-34%", // }, } trans := New() for _, tt := range tests { s := trans.FmtPercent(tt.num, tt.v) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } }
{ trans := New() days := trans.WeekdaysShort() for i, day := range days { s := trans.WeekdayShort(time.Weekday(i)) if s != day { t.Errorf("Expected '%s' Got '%s'", day, s) } } tests := []struct { idx int expected string }{ // { // idx: 0, // expected: "Su", // }, // { // idx: 1, // expected: "Mo", // }, // { // idx: 2, // expected: "Tu", // }, // { // idx: 3, // expected: "We", // }, // { // idx: 4, // expected: "Th", // }, // { // idx: 5, // expected: "Fr", // }, // { // idx: 6, // expected: "Sa", // }, } for _, tt := range tests { s := trans.WeekdayShort(time.Weekday(tt.idx)) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } }
error.rs
use combine::easy::Errors; use tokenizer::Token; use position::Pos; pub type InternalError<'a> = Errors<Token<'a>, Token<'a>, Pos>; /// Error parsing schema /// /// This structure is opaque for forward compatibility. We are exploring a /// way to improve both error message and API. #[derive(Fail, Debug)] #[fail(display="schema parse error: {}", _0)] pub struct ParseError(String); impl<'a> From<InternalError<'a>> for ParseError { fn from(e: InternalError<'a>) -> ParseError
}
{ ParseError(format!("{}", e)) }
error.rs
use std::fmt; /// The error used when managing connections with `deadpool`. #[derive(Debug)] pub enum Error { /// An error occurred establishing the connection ConnectionError(diesel::ConnectionError),
QueryError(diesel::result::Error), } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { Error::ConnectionError(ref e) => e.fmt(f), Error::QueryError(ref e) => e.fmt(f), } } } impl ::std::error::Error for Error {}
/// An error occurred pinging the database
peephole.py
import chainer from chainer.backends import cuda from chainer.functions.activation import sigmoid from chainer.functions.activation import tanh from chainer.functions.array import reshape from chainer.functions.array import split_axis from chainer import link from chainer.links.connection import linear from chainer import variable class StatefulPeepholeLSTM(link.Chain): """Fully-connected LSTM layer with peephole connections. This is a fully-connected LSTM layer with peephole connections as a chain. Unlike the :class:`~chainer.links.LSTM` link, this chain holds ``peep_i``, ``peep_f`` and ``peep_o`` as child links besides ``upward`` and ``lateral``. Given a input vector :math:`x`, Peephole returns the next hidden vector :math:`h'` defined as .. math:: a &=& \\tanh(upward x + lateral h), \\\\ i &=& \\sigma(upward x + lateral h + peep_i c), \\\\ f &=& \\sigma(upward x + lateral h + peep_f c), \\\\ c' &=& a \\odot i + f \\odot c, \\\\ o &=& \\sigma(upward x + lateral h + peep_o c'), \\\\ h' &=& o \\tanh(c'), where :math:`\\sigma` is the sigmoid function, :math:`\\odot` is the element-wise product, :math:`c` is the current cell state, :math:`c'` is the next cell state and :math:`h` is the current hidden vector. Args: in_size(int): Dimension of the input vector :math:`x`. out_size(int): Dimension of the hidden vector :math:`h`. Attributes: upward (~chainer.links.Linear): Linear layer of upward connections. lateral (~chainer.links.Linear): Linear layer of lateral connections. peep_i (~chainer.links.Linear): Linear layer of peephole connections to the input gate. peep_f (~chainer.links.Linear): Linear layer of peephole connections to the forget gate. peep_o (~chainer.links.Linear): Linear layer of peephole connections to the output gate. c (~chainer.Variable): Cell states of LSTM units. h (~chainer.Variable): Output at the current time step. """ def __init__(self, in_size, out_size): super(StatefulPeepholeLSTM, self).__init__() self.state_size = out_size self.reset_state() with self.init_scope(): self.upward = linear.Linear(in_size, 4 * out_size) self.lateral = linear.Linear(out_size, 4 * out_size, nobias=True) self.peep_i = linear.Linear(out_size, out_size, nobias=True) self.peep_f = linear.Linear(out_size, out_size, nobias=True) self.peep_o = linear.Linear(out_size, out_size, nobias=True) def _to_device(self, device, skip_between_cupy_devices=False): # Overrides Link._to_device # TODO(niboshi): Avoid forcing concrete links to override _to_device device = chainer.get_device(device) super(StatefulPeepholeLSTM, self)._to_device( device, skip_between_cupy_devices=skip_between_cupy_devices) if self.c is not None: if not (skip_between_cupy_devices and device.xp is cuda.cupy and isinstance(self.c, cuda.ndarray)): self.c.to_device(device) if self.h is not None: if not (skip_between_cupy_devices and device.xp is cuda.cupy and isinstance(self.h, cuda.ndarray)):
return self def reset_state(self): """Resets the internal states. It sets ``None`` to the :attr:`c` and :attr:`h` attributes. """ self.c = self.h = None def forward(self, x): """Updates the internal state and returns the LSTM outputs. Args: x (~chainer.Variable): A new batch from the input sequence. Returns: ~chainer.Variable: Outputs of updated LSTM units. """ lstm_in = self.upward(x) if self.h is not None: lstm_in += self.lateral(self.h) if self.c is None: xp = self.xp with chainer.using_device(self.device): self.c = variable.Variable( xp.zeros((len(x), self.state_size), dtype=x.dtype)) lstm_in = reshape.reshape( lstm_in, (len(lstm_in), lstm_in.shape[1] // 4, 4)) a, i, f, o = split_axis.split_axis(lstm_in, 4, 2) a = reshape.reshape(a, a.shape[:2]) i = reshape.reshape(i, i.shape[:2]) f = reshape.reshape(f, f.shape[:2]) o = reshape.reshape(o, o.shape[:2]) peep_in_i = self.peep_i(self.c) peep_in_f = self.peep_f(self.c) a = tanh.tanh(a) i = sigmoid.sigmoid(i + peep_in_i) f = sigmoid.sigmoid(f + peep_in_f) self.c = a * i + f * self.c peep_in_o = self.peep_o(self.c) o = sigmoid.sigmoid(o + peep_in_o) self.h = o * tanh.tanh(self.c) return self.h
self.h.to_device(device)
checks.py
from discord.ext import commands ''' Copyright (c) 2020 nizcomix https://github.com/niztg/CyberTron5000 under the terms of the MIT LICENSE ''' def check_admin_or_owner(): def predicate(ctx): if ctx.message.author.id == 670564722218762240: return True elif ctx.message.author.permissions_in(channel=ctx.message.channel).administrator: return True else: return False return commands.check(predicate) def check_mod_or_owner(): def predicate(ctx): if ctx.message.author.id == 670564722218762240: return True elif ctx.message.author.permissions_in(channel=ctx.message.channel).manage_messages: return True else: return False return commands.check(predicate) def check_mod_server():
def predicate(ctx): if ctx.message.author.id == 670564722218762240: return True elif ctx.message.guild.id == 292932955653931009: return True else: return False return commands.check(predicate)
lazy.py
from _dependencies.graph import _Graph class _LazyGraph: def __init__(self, attrname, namespace): self.attrname = attrname self.namespace = namespace def __get__(self, instance, owner):
graph = _Graph() for base in reversed(owner.__bases__): graph.update(base.__dependencies__) for name, dependency in self.namespace.items(): graph.assign(name, dependency) type.__setattr__(owner, self.attrname, graph) return graph
error.rs
// Copyright 2019-2021 Tauri Programme within The Commons Conservancy // SPDX-License-Identifier: Apache-2.0 // SPDX-License-Identifier: MIT use std::{io, num, path}; use thiserror::Error as DeriveError; /// Errors returned by the bundler. #[derive(Debug, DeriveError)] #[non_exhaustive] pub enum Error { /// Bundler error. #[error("{0}")] BundlerError(#[from] anyhow::Error), /// Failed to use glob pattern. #[error("`{0}`")] GlobError(#[from] glob::GlobError), /// Invalid glob pattern. #[error("`{0}`")] GlobPatternError(#[from] glob::PatternError), /// I/O error. #[error("`{0}`")] IoError(#[from] io::Error), /// Image error. #[error("`{0}`")] ImageError(#[from] image::ImageError), /// TOML error. #[error("`{0}`")] TomlError(#[from] toml::de::Error), /// Error walking directory. #[error("`{0}`")] WalkdirError(#[from] walkdir::Error), /// Strip prefix error. #[error("`{0}`")] StripError(#[from] path::StripPrefixError), /// Number parse error. #[error("`{0}`")] ConvertError(#[from] num::TryFromIntError), /// Zip error. #[error("`{0}`")] ZipError(#[from] zip::result::ZipError),
/// Handlebars template error. #[error("`{0}`")] HandleBarsError(#[from] handlebars::RenderError), /// JSON error. #[error("`{0}`")] JsonError(#[from] serde_json::error::Error), /// Regex error. #[error("`{0}`")] RegexError(#[from] regex::Error), /// Failed to perform HTTP request. #[cfg(windows)] #[error("`{0}`")] HttpError(#[from] attohttpc::Error), /// Failed to validate downloaded file hash. #[error("hash mismatch of downloaded file")] HashError, /// Unsupported architecture. #[error("Architecture Error: `{0}`")] ArchError(String), /// Couldn't find icons. #[error("Could not find Icon paths. Please make sure they exist in the tauri config JSON file")] IconPathError, /// Error on path util operation. #[error("Path Error:`{0}`")] PathUtilError(String), /// Error on shell script. #[error("Shell Scripting Error:`{0}`")] ShellScriptError(String), /// Generic error. #[error("`{0}`")] GenericError(String), /// No bundled project found for the updater. #[error("Unable to find a bundled project for the updater")] UnableToFindProject, /// String is not UTF-8. #[error("string is not UTF-8")] Utf8(#[from] std::str::Utf8Error), /// Windows SignTool not found. #[error("SignTool not found")] SignToolNotFound, /// Failed to open Windows registry. #[error("failed to open registry {0}")] OpenRegistry(String), /// Failed to get registry value. #[error("failed to get {0} value on registry")] GetRegistryValue(String), /// Unsupported OS bitness. #[error("unsupported OS bitness")] UnsupportedBitness, /// Failed to sign application. #[error("failed to sign app: {0}")] Sign(String), } /// Convenient type alias of Result type. pub type Result<T> = anyhow::Result<T, Error>;
/// Hex error. #[cfg(target_os = "windows")] #[error("`{0}`")] HexError(#[from] hex::FromHexError),
index.js
var plugins = require('electron-plugins'), ipc = require('electron').ipcMain; document.addEventListener('DOMContentLoaded', function () { var context = { document: document } plugins.load(context, function (err, loaded) { if(err) return console.error(err) console.log('Plugins loaded successfully.') }); var electronScreen = require('screen'); var webFrame = require('web-frame'); var size = electronScreen.getPrimaryDisplay().workAreaSize; function
(number) { return Math.log(number) / Math.log(2); } webFrame.setZoomFactor(Math.floor(log2((size.width*size.height)/(800*600)))); }) ipc.on('update-available', function () { console.log('there is an update available for download') })
log2
utils.rs
#[macro_export] macro_rules! impl_from_str { ($fn:path => $ty:ty) => { impl ::std::str::FromStr for $ty { type Err = ::anyhow::Error; fn from_str(input: &str) -> ::std::result::Result<Self, Self::Err> { let (remainder, output) = $fn(input).map_err(|error| { ::anyhow::anyhow!("could not parse `{}` using fn `{}`: {:#?}", stringify!($ty), stringify!($fn), error) })?; ::anyhow::ensure!( remainder.is_empty(), "could not parse entire input:\n\nRemainder: {:#?}\n\nOutput: {:#?}\n\nInput: {:#?}", remainder, output, input ); ::std::result::Result::Ok(output) } } }; } pub fn parsing_test<'a, T: std::fmt::Debug>( f: impl FnOnce(&'a str) -> nom::IResult<&'a str, T>, input: &'a str, ) -> T
{ let (remainder, result) = f(input).unwrap(); if !remainder.is_empty() { panic!( "incomplete parsing, got: {:#?}, remainder: {:?}", result, remainder ); } result }
http.go
package server import ( "net/http" "sniper/cmd/server/hook" "sniper/util/twirp" ) var hooks = twirp.ChainHooks( hook.NewRequestID(), hook.NewLog(), ) func
(mux *http.ServeMux, isInternal bool) { } func initInternalMux(mux *http.ServeMux) { }
initMux
p3_4.py
def lend_money(debts, person, amount): value = debts.get(person, 0) quantity = [amount] if value != 0: debts[person] = value + quantity else: debts[person] = quantity print(debts) def
(debts, person): value = debts.get(person, [0]) out = sum(value) return out def total_amount_owed(debts): my_money = 0 for values in debts.values(): for numbers in values: my_money += numbers return my_money
amount_owed_by
comp-2006.component.spec.ts
/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import { async, ComponentFixture, TestBed } from '@angular/core/testing';
let component: Comp2006Component; let fixture: ComponentFixture<Comp2006Component>; beforeEach(async(() => { TestBed.configureTestingModule({ declarations: [ Comp2006Component ] }) .compileComponents(); })); beforeEach(() => { fixture = TestBed.createComponent(Comp2006Component); component = fixture.componentInstance; fixture.detectChanges(); }); it('should create', () => { expect(component).toBeTruthy(); }); });
import { Comp2006Component } from './comp-2006.component'; describe('Comp2006Component', () => {
timesheets.py
# coding: utf-8 """ Xero Payroll AU This is the Xero Payroll API for orgs in Australia region. # noqa: E501 Contact: [email protected] Generated by: https://openapi-generator.tech """ import re # noqa: F401 from xero_python.models import BaseModel class Timesheets(BaseModel):
"""NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. """ """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ openapi_types = {"timesheets": "list[Timesheet]"} attribute_map = {"timesheets": "Timesheets"} def __init__(self, timesheets=None): # noqa: E501 """Timesheets - a model defined in OpenAPI""" # noqa: E501 self._timesheets = None self.discriminator = None if timesheets is not None: self.timesheets = timesheets @property def timesheets(self): """Gets the timesheets of this Timesheets. # noqa: E501 :return: The timesheets of this Timesheets. # noqa: E501 :rtype: list[Timesheet] """ return self._timesheets @timesheets.setter def timesheets(self, timesheets): """Sets the timesheets of this Timesheets. :param timesheets: The timesheets of this Timesheets. # noqa: E501 :type: list[Timesheet] """ self._timesheets = timesheets
resnet_trans_head.py
import torch.nn as nn import torch class TransHeadNet(nn.Module): def __init__(self, in_channels, num_layers=3, num_filters=256, kernel_size=3, output_dim=3, freeze=False, with_bias_end=True): super(TransHeadNet, self).__init__() self.freeze = freeze if kernel_size == 3: padding = 1 elif kernel_size == 2: padding = 0 self.features = nn.ModuleList() for i in range(num_layers): _in_channels = in_channels if i == 0 else num_filters self.features.append(nn.Conv2d(_in_channels, num_filters, kernel_size=kernel_size, stride=1, padding=padding, bias=False)) self.features.append(nn.BatchNorm2d(num_filters)) self.features.append(nn.ReLU(inplace=True)) self.linears = nn.ModuleList() self.linears.append(nn.Linear(256 * 8 * 8, 4096)) self.linears.append(nn.ReLU(inplace=True)) self.linears.append(nn.Linear(4096, 4096)) self.linears.append(nn.ReLU(inplace=True)) self.linears.append(nn.Linear(4096, output_dim)) for m in self.modules(): if isinstance(m, nn.Conv2d): nn.init.normal_(m.weight, mean=0, std=0.001) if with_bias_end and (m.bias is not None): nn.init.constant_(m.bias, 0) elif isinstance(m, nn.BatchNorm2d): nn.init.constant_(m.weight, 1) nn.init.constant_(m.bias, 0) elif isinstance(m, nn.ConvTranspose2d): nn.init.normal_(m.weight, mean=0, std=0.001) elif isinstance(m, nn.Linear): nn.init.normal_(m.weight, mean=0, std=0.001) def forward(self, x):
if self.freeze: with torch.no_grad(): for i, l in enumerate(self.features): x = l(x) x = x.view(-1, 256*8*8) for i, l in enumerate(self.linears): x = l(x) return x.detach() else: for i, l in enumerate(self.features): x = l(x) x = x.view(-1, 256*8*8) for i, l in enumerate(self.linears): x = l(x) return x
auth.py
import google.oauth2.credentials import google_auth_oauthlib.flow as oauth_flow def fetch_new_creds(config): return oauth_flow.InstalledAppFlow.from_client_config( config, scopes=[ 'https://www.googleapis.com/auth/gmail.send', 'https://www.googleapis.com/auth/spreadsheets.readonly', ]).run_console() def serialize(credentials): return { k: getattr(credentials, k) for k in ('token', 'refresh_token', 'token_uri', 'client_id', 'client_secret', 'scopes') } def create_or_deserialize_creds(serialized_creds, config=None):
if serialized_creds: return google.oauth2.credentials.Credentials(**serialized_creds) return fetch_new_creds(config)
rk.py
import numpy as np from .base import OdeSolver, DenseOutput from .common import (validate_max_step, validate_tol, select_initial_step, norm, warn_extraneous, validate_first_step) from . import dop853_coefficients # Multiply steps computed from asymptotic behaviour of errors by this. SAFETY = 0.9 MIN_FACTOR = 0.2 # Minimum allowed decrease in a step size. MAX_FACTOR = 10 # Maximum allowed increase in a step size. def rk_step(fun, t, y, f, h, A, B, C, K): """Perform a single Runge-Kutta step. This function computes a prediction of an explicit Runge-Kutta method and also estimates the error of a less accurate method. Notation for Butcher tableau is as in [1]_. Parameters ---------- fun : callable Right-hand side of the system. t : float Current time. y : ndarray, shape (n,) Current state. f : ndarray, shape (n,) Current value of the derivative, i.e., ``fun(x, y)``. h : float Step to use. A : ndarray, shape (n_stages, n_stages) Coefficients for combining previous RK stages to compute the next stage. For explicit methods the coefficients at and above the main diagonal are zeros. B : ndarray, shape (n_stages,) Coefficients for combining RK stages for computing the final prediction. C : ndarray, shape (n_stages,) Coefficients for incrementing time for consecutive RK stages. The value for the first stage is always zero. K : ndarray, shape (n_stages + 1, n) Storage array for putting RK stages here. Stages are stored in rows. The last row is a linear combination of the previous rows with coefficients Returns ------- y_new : ndarray, shape (n,) Solution at t + h computed with a higher accuracy. f_new : ndarray, shape (n,) Derivative ``fun(t + h, y_new)``. References ---------- .. [1] E. Hairer, S. P. Norsett G. Wanner, "Solving Ordinary Differential Equations I: Nonstiff Problems", Sec. II.4. """ K[0] = f for s, (a, c) in enumerate(zip(A[1:], C[1:]), start=1): dy = np.dot(K[:s].T, a[:s]) * h K[s] = fun(t + c * h, y + dy) y_new = y + h * np.dot(K[:-1].T, B) f_new = fun(t + h, y_new) K[-1] = f_new return y_new, f_new class RungeKutta(OdeSolver): """Base class for explicit Runge-Kutta methods.""" C: np.ndarray = NotImplemented A: np.ndarray = NotImplemented B: np.ndarray = NotImplemented E: np.ndarray = NotImplemented P: np.ndarray = NotImplemented order: int = NotImplemented error_estimator_order: int = NotImplemented n_stages: int = NotImplemented def __init__(self, fun, t0, y0, t_bound, max_step=np.inf, rtol=1e-3, atol=1e-6, vectorized=False, first_step=None, **extraneous): warn_extraneous(extraneous) super(RungeKutta, self).__init__(fun, t0, y0, t_bound, vectorized, support_complex=True) self.y_old = None self.max_step = validate_max_step(max_step) self.rtol, self.atol = validate_tol(rtol, atol, self.n) self.f = self.fun(self.t, self.y) if first_step is None: self.h_abs = select_initial_step( self.fun, self.t, self.y, self.f, self.direction, self.error_estimator_order, self.rtol, self.atol) else: self.h_abs = validate_first_step(first_step, t0, t_bound) self.K = np.empty((self.n_stages + 1, self.n), dtype=self.y.dtype) self.error_exponent = -1 / (self.error_estimator_order + 1) self.h_previous = None def _estimate_error(self, K, h): return np.dot(K.T, self.E) * h def _estimate_error_norm(self, K, h, scale): return norm(self._estimate_error(K, h) / scale) def _step_impl(self): t = self.t y = self.y max_step = self.max_step rtol = self.rtol atol = self.atol min_step = 10 * np.abs(np.nextafter(t, self.direction * np.inf) - t) if self.h_abs > max_step: h_abs = max_step elif self.h_abs < min_step: h_abs = min_step else: h_abs = self.h_abs step_accepted = False step_rejected = False while not step_accepted: if h_abs < min_step: return False, self.TOO_SMALL_STEP h = h_abs * self.direction t_new = t + h if self.direction * (t_new - self.t_bound) > 0: t_new = self.t_bound h = t_new - t h_abs = np.abs(h) y_new, f_new = rk_step(self.fun, t, y, self.f, h, self.A, self.B, self.C, self.K) scale = atol + np.maximum(np.abs(y), np.abs(y_new)) * rtol error_norm = self._estimate_error_norm(self.K, h, scale) if error_norm < 1: if error_norm == 0: factor = MAX_FACTOR else: factor = min(MAX_FACTOR, SAFETY * error_norm ** self.error_exponent) if step_rejected: factor = min(1, factor) h_abs *= factor step_accepted = True else: h_abs *= max(MIN_FACTOR, SAFETY * error_norm ** self.error_exponent) step_rejected = True self.h_previous = h self.y_old = y self.t = t_new self.y = y_new self.h_abs = h_abs self.f = f_new return True, None def _dense_output_impl(self): Q = self.K.T.dot(self.P) return RkDenseOutput(self.t_old, self.t, self.y_old, Q) class RK23(RungeKutta): """Explicit Runge-Kutta method of order 3(2). This uses the Bogacki-Shampine pair of formulas [1]_. The error is controlled assuming accuracy of the second-order method, but steps are taken using the third-order accurate formula (local extrapolation is done). A cubic Hermite polynomial is used for the dense output. Can be applied in the complex domain. Parameters ----------
Right-hand side of the system. The calling signature is ``fun(t, y)``. Here ``t`` is a scalar and there are two options for ndarray ``y``. It can either have shape (n,), then ``fun`` must return array_like with shape (n,). Or alternatively it can have shape (n, k), then ``fun`` must return array_like with shape (n, k), i.e. each column corresponds to a single column in ``y``. The choice between the two options is determined by `vectorized` argument (see below). t0 : float Initial time. y0 : array_like, shape (n,) Initial state. t_bound : float Boundary time - the integration won't continue beyond it. It also determines the direction of the integration. first_step : float or None, optional Initial step size. Default is ``None`` which means that the algorithm should choose. max_step : float, optional Maximum allowed step size. Default is np.inf, i.e., the step size is not bounded and determined solely by the solver. rtol, atol : float and array_like, optional Relative and absolute tolerances. The solver keeps the local error estimates less than ``atol + rtol * abs(y)``. Here, `rtol` controls a relative accuracy (number of correct digits). But if a component of `y` is approximately below `atol`, the error only needs to fall within the same `atol` threshold, and the number of correct digits is not guaranteed. If components of y have different scales, it might be beneficial to set different `atol` values for different components by passing array_like with shape (n,) for `atol`. Default values are 1e-3 for `rtol` and 1e-6 for `atol`. vectorized : bool, optional Whether `fun` is implemented in a vectorized fashion. Default is False. Attributes ---------- n : int Number of equations. status : string Current status of the solver: 'running', 'finished' or 'failed'. t_bound : float Boundary time. direction : float Integration direction: +1 or -1. t : float Current time. y : ndarray Current state. t_old : float Previous time. None if no steps were made yet. step_size : float Size of the last successful step. None if no steps were made yet. nfev : int Number evaluations of the system's right-hand side. njev : int Number of evaluations of the Jacobian. Is always 0 for this solver as it does not use the Jacobian. nlu : int Number of LU decompositions. Is always 0 for this solver. References ---------- .. [1] P. Bogacki, L.F. Shampine, "A 3(2) Pair of Runge-Kutta Formulas", Appl. Math. Lett. Vol. 2, No. 4. pp. 321-325, 1989. """ order = 3 error_estimator_order = 2 n_stages = 3 C = np.array([0, 1/2, 3/4]) A = np.array([ [0, 0, 0], [1/2, 0, 0], [0, 3/4, 0] ]) B = np.array([2/9, 1/3, 4/9]) E = np.array([5/72, -1/12, -1/9, 1/8]) P = np.array([[1, -4 / 3, 5 / 9], [0, 1, -2/3], [0, 4/3, -8/9], [0, -1, 1]]) class RK45(RungeKutta): """Explicit Runge-Kutta method of order 5(4). This uses the Dormand-Prince pair of formulas [1]_. The error is controlled assuming accuracy of the fourth-order method accuracy, but steps are taken using the fifth-order accurate formula (local extrapolation is done). A quartic interpolation polynomial is used for the dense output [2]_. Can be applied in the complex domain. Parameters ---------- fun : callable Right-hand side of the system. The calling signature is ``fun(t, y)``. Here ``t`` is a scalar, and there are two options for the ndarray ``y``: It can either have shape (n,); then ``fun`` must return array_like with shape (n,). Alternatively it can have shape (n, k); then ``fun`` must return an array_like with shape (n, k), i.e., each column corresponds to a single column in ``y``. The choice between the two options is determined by `vectorized` argument (see below). t0 : float Initial time. y0 : array_like, shape (n,) Initial state. t_bound : float Boundary time - the integration won't continue beyond it. It also determines the direction of the integration. first_step : float or None, optional Initial step size. Default is ``None`` which means that the algorithm should choose. max_step : float, optional Maximum allowed step size. Default is np.inf, i.e., the step size is not bounded and determined solely by the solver. rtol, atol : float and array_like, optional Relative and absolute tolerances. The solver keeps the local error estimates less than ``atol + rtol * abs(y)``. Here `rtol` controls a relative accuracy (number of correct digits). But if a component of `y` is approximately below `atol`, the error only needs to fall within the same `atol` threshold, and the number of correct digits is not guaranteed. If components of y have different scales, it might be beneficial to set different `atol` values for different components by passing array_like with shape (n,) for `atol`. Default values are 1e-3 for `rtol` and 1e-6 for `atol`. vectorized : bool, optional Whether `fun` is implemented in a vectorized fashion. Default is False. Attributes ---------- n : int Number of equations. status : string Current status of the solver: 'running', 'finished' or 'failed'. t_bound : float Boundary time. direction : float Integration direction: +1 or -1. t : float Current time. y : ndarray Current state. t_old : float Previous time. None if no steps were made yet. step_size : float Size of the last successful step. None if no steps were made yet. nfev : int Number evaluations of the system's right-hand side. njev : int Number of evaluations of the Jacobian. Is always 0 for this solver as it does not use the Jacobian. nlu : int Number of LU decompositions. Is always 0 for this solver. References ---------- .. [1] J. R. Dormand, P. J. Prince, "A family of embedded Runge-Kutta formulae", Journal of Computational and Applied Mathematics, Vol. 6, No. 1, pp. 19-26, 1980. .. [2] L. W. Shampine, "Some Practical Runge-Kutta Formulas", Mathematics of Computation,, Vol. 46, No. 173, pp. 135-150, 1986. """ order = 5 error_estimator_order = 4 n_stages = 6 C = np.array([0, 1/5, 3/10, 4/5, 8/9, 1]) A = np.array([ [0, 0, 0, 0, 0], [1/5, 0, 0, 0, 0], [3/40, 9/40, 0, 0, 0], [44/45, -56/15, 32/9, 0, 0], [19372/6561, -25360/2187, 64448/6561, -212/729, 0], [9017/3168, -355/33, 46732/5247, 49/176, -5103/18656] ]) B = np.array([35/384, 0, 500/1113, 125/192, -2187/6784, 11/84]) E = np.array([-71/57600, 0, 71/16695, -71/1920, 17253/339200, -22/525, 1/40]) # Corresponds to the optimum value of c_6 from [2]_. P = np.array([ [1, -8048581381/2820520608, 8663915743/2820520608, -12715105075/11282082432], [0, 0, 0, 0], [0, 131558114200/32700410799, -68118460800/10900136933, 87487479700/32700410799], [0, -1754552775/470086768, 14199869525/1410260304, -10690763975/1880347072], [0, 127303824393/49829197408, -318862633887/49829197408, 701980252875 / 199316789632], [0, -282668133/205662961, 2019193451/616988883, -1453857185/822651844], [0, 40617522/29380423, -110615467/29380423, 69997945/29380423]]) class DOP853(RungeKutta): """Explicit Runge-Kutta method of order 8. This is a Python implementation of "DOP853" algorithm originally written in Fortran [1]_, [2]_. Note that this is not a literate translation, but the algorithmic core and coefficients are the same. Can be applied in the complex domain. Parameters ---------- fun : callable Right-hand side of the system. The calling signature is ``fun(t, y)``. Here, ``t`` is a scalar, and there are two options for the ndarray ``y``: It can either have shape (n,); then ``fun`` must return array_like with shape (n,). Alternatively it can have shape (n, k); then ``fun`` must return an array_like with shape (n, k), i.e. each column corresponds to a single column in ``y``. The choice between the two options is determined by `vectorized` argument (see below). t0 : float Initial time. y0 : array_like, shape (n,) Initial state. t_bound : float Boundary time - the integration won't continue beyond it. It also determines the direction of the integration. first_step : float or None, optional Initial step size. Default is ``None`` which means that the algorithm should choose. max_step : float, optional Maximum allowed step size. Default is np.inf, i.e. the step size is not bounded and determined solely by the solver. rtol, atol : float and array_like, optional Relative and absolute tolerances. The solver keeps the local error estimates less than ``atol + rtol * abs(y)``. Here `rtol` controls a relative accuracy (number of correct digits). But if a component of `y` is approximately below `atol`, the error only needs to fall within the same `atol` threshold, and the number of correct digits is not guaranteed. If components of y have different scales, it might be beneficial to set different `atol` values for different components by passing array_like with shape (n,) for `atol`. Default values are 1e-3 for `rtol` and 1e-6 for `atol`. vectorized : bool, optional Whether `fun` is implemented in a vectorized fashion. Default is False. Attributes ---------- n : int Number of equations. status : string Current status of the solver: 'running', 'finished' or 'failed'. t_bound : float Boundary time. direction : float Integration direction: +1 or -1. t : float Current time. y : ndarray Current state. t_old : float Previous time. None if no steps were made yet. step_size : float Size of the last successful step. None if no steps were made yet. nfev : int Number evaluations of the system's right-hand side. njev : int Number of evaluations of the Jacobian. Is always 0 for this solver as it does not use the Jacobian. nlu : int Number of LU decompositions. Is always 0 for this solver. References ---------- .. [1] E. Hairer, S. P. Norsett G. Wanner, "Solving Ordinary Differential Equations I: Nonstiff Problems", Sec. II. .. [2] `Page with original Fortran code of DOP853 <http://www.unige.ch/~hairer/software.html>`_. """ n_stages = dop853_coefficients.N_STAGES order = 8 error_estimator_order = 7 A = dop853_coefficients.A[:n_stages, :n_stages] B = dop853_coefficients.B C = dop853_coefficients.C[:n_stages] E3 = dop853_coefficients.E3 E5 = dop853_coefficients.E5 D = dop853_coefficients.D A_EXTRA = dop853_coefficients.A[n_stages + 1:] C_EXTRA = dop853_coefficients.C[n_stages + 1:] def __init__(self, fun, t0, y0, t_bound, max_step=np.inf, rtol=1e-3, atol=1e-6, vectorized=False, first_step=None, **extraneous): super(DOP853, self).__init__(fun, t0, y0, t_bound, max_step, rtol, atol, vectorized, first_step, **extraneous) self.K_extended = np.empty((dop853_coefficients.N_STAGES_EXTENDED, self.n), dtype=self.y.dtype) self.K = self.K_extended[:self.n_stages + 1] def _estimate_error(self, K, h): # Left for testing purposes. err5 = np.dot(K.T, self.E5) err3 = np.dot(K.T, self.E3) denom = np.hypot(np.abs(err5), 0.1 * np.abs(err3)) correction_factor = np.ones_like(err5) mask = denom > 0 correction_factor[mask] = np.abs(err5[mask]) / denom[mask] return h * err5 * correction_factor def _estimate_error_norm(self, K, h, scale): err5 = np.dot(K.T, self.E5) / scale err3 = np.dot(K.T, self.E3) / scale err5_norm_2 = np.linalg.norm(err5)**2 err3_norm_2 = np.linalg.norm(err3)**2 if err5_norm_2 == 0 and err3_norm_2 == 0: return 0.0 denom = err5_norm_2 + 0.01 * err3_norm_2 return np.abs(h) * err5_norm_2 / np.sqrt(denom * len(scale)) def _dense_output_impl(self): K = self.K_extended h = self.h_previous for s, (a, c) in enumerate(zip(self.A_EXTRA, self.C_EXTRA), start=self.n_stages + 1): dy = np.dot(K[:s].T, a[:s]) * h K[s] = self.fun(self.t_old + c * h, self.y_old + dy) F = np.empty((dop853_coefficients.INTERPOLATOR_POWER, self.n), dtype=self.y_old.dtype) f_old = K[0] delta_y = self.y - self.y_old F[0] = delta_y F[1] = h * f_old - delta_y F[2] = 2 * delta_y - h * (self.f + f_old) F[3:] = h * np.dot(self.D, K) return Dop853DenseOutput(self.t_old, self.t, self.y_old, F) class RkDenseOutput(DenseOutput): def __init__(self, t_old, t, y_old, Q): super(RkDenseOutput, self).__init__(t_old, t) self.h = t - t_old self.Q = Q self.order = Q.shape[1] - 1 self.y_old = y_old def _call_impl(self, t): x = (t - self.t_old) / self.h if t.ndim == 0: p = np.tile(x, self.order + 1) p = np.cumprod(p) else: p = np.tile(x, (self.order + 1, 1)) p = np.cumprod(p, axis=0) y = self.h * np.dot(self.Q, p) if y.ndim == 2: y += self.y_old[:, None] else: y += self.y_old return y class Dop853DenseOutput(DenseOutput): def __init__(self, t_old, t, y_old, F): super(Dop853DenseOutput, self).__init__(t_old, t) self.h = t - t_old self.F = F self.y_old = y_old def _call_impl(self, t): x = (t - self.t_old) / self.h if t.ndim == 0: y = np.zeros_like(self.y_old) else: x = x[:, None] y = np.zeros((len(x), len(self.y_old)), dtype=self.y_old.dtype) for i, f in enumerate(reversed(self.F)): y += f if i % 2 == 0: y *= x else: y *= 1 - x y += self.y_old return y.T
fun : callable
call.go
package introspect import ( "bosun.org/_third_party/github.com/godbus/dbus" "encoding/xml" "strings" ) // Call calls org.freedesktop.Introspectable.Introspect on a remote object // and returns the introspection data. func Call(o dbus.BusObject) (*Node, error) { var xmldata string var node Node err := o.Call("org.freedesktop.DBus.Introspectable.Introspect", 0).Store(&xmldata) if err != nil
err = xml.NewDecoder(strings.NewReader(xmldata)).Decode(&node) if err != nil { return nil, err } if node.Name == "" { node.Name = string(o.Path()) } return &node, nil }
{ return nil, err }
init.go
// *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** // *** Do not edit by hand unless you're certain you know what you are doing! *** package streamanalytics import ( "fmt" "github.com/blang/semver" "github.com/pulumi/pulumi-azure/sdk/v4/go/azure" "github.com/pulumi/pulumi/sdk/v3/go/pulumi" ) type module struct { version semver.Version } func (m *module) Version() semver.Version { return m.version } func (m *module) Construct(ctx *pulumi.Context, name, typ, urn string) (r pulumi.Resource, err error) { switch typ { case "azure:streamanalytics/cluster:Cluster": r = &Cluster{} case "azure:streamanalytics/functionJavaScriptUDF:FunctionJavaScriptUDF": r = &FunctionJavaScriptUDF{} case "azure:streamanalytics/job:Job": r = &Job{} case "azure:streamanalytics/managedPrivateEndpoint:ManagedPrivateEndpoint": r = &ManagedPrivateEndpoint{} case "azure:streamanalytics/outputBlob:OutputBlob": r = &OutputBlob{} case "azure:streamanalytics/outputEventHub:OutputEventHub": r = &OutputEventHub{} case "azure:streamanalytics/outputMssql:OutputMssql": r = &OutputMssql{} case "azure:streamanalytics/outputServiceBusQueue:OutputServiceBusQueue": r = &OutputServiceBusQueue{} case "azure:streamanalytics/outputServicebusTopic:OutputServicebusTopic": r = &OutputServicebusTopic{} case "azure:streamanalytics/outputSynapse:OutputSynapse": r = &OutputSynapse{} case "azure:streamanalytics/outputTable:OutputTable": r = &OutputTable{} case "azure:streamanalytics/referenceInputBlob:ReferenceInputBlob": r = &ReferenceInputBlob{} case "azure:streamanalytics/referenceInputMssql:ReferenceInputMssql": r = &ReferenceInputMssql{} case "azure:streamanalytics/streamInputBlob:StreamInputBlob": r = &StreamInputBlob{} case "azure:streamanalytics/streamInputEventHub:StreamInputEventHub": r = &StreamInputEventHub{} case "azure:streamanalytics/streamInputIotHub:StreamInputIotHub": r = &StreamInputIotHub{} default: return nil, fmt.Errorf("unknown resource type: %s", typ) } err = ctx.RegisterResource(typ, name, nil, r, pulumi.URN_(urn)) return } func init() { version, err := azure.PkgVersion() if err != nil { fmt.Printf("failed to determine package version. defaulting to v1: %v\n", err) } pulumi.RegisterResourceModule( "azure", "streamanalytics/cluster", &module{version}, ) pulumi.RegisterResourceModule( "azure", "streamanalytics/functionJavaScriptUDF", &module{version}, ) pulumi.RegisterResourceModule( "azure", "streamanalytics/job", &module{version}, ) pulumi.RegisterResourceModule( "azure", "streamanalytics/managedPrivateEndpoint", &module{version}, ) pulumi.RegisterResourceModule( "azure", "streamanalytics/outputBlob", &module{version}, ) pulumi.RegisterResourceModule( "azure", "streamanalytics/outputEventHub", &module{version}, ) pulumi.RegisterResourceModule( "azure", "streamanalytics/outputMssql", &module{version}, ) pulumi.RegisterResourceModule( "azure", "streamanalytics/outputServiceBusQueue", &module{version}, ) pulumi.RegisterResourceModule( "azure", "streamanalytics/outputServicebusTopic", &module{version}, ) pulumi.RegisterResourceModule( "azure", "streamanalytics/outputSynapse", &module{version}, ) pulumi.RegisterResourceModule( "azure", "streamanalytics/outputTable",
"azure", "streamanalytics/referenceInputBlob", &module{version}, ) pulumi.RegisterResourceModule( "azure", "streamanalytics/referenceInputMssql", &module{version}, ) pulumi.RegisterResourceModule( "azure", "streamanalytics/streamInputBlob", &module{version}, ) pulumi.RegisterResourceModule( "azure", "streamanalytics/streamInputEventHub", &module{version}, ) pulumi.RegisterResourceModule( "azure", "streamanalytics/streamInputIotHub", &module{version}, ) }
&module{version}, ) pulumi.RegisterResourceModule(
test.divide_20211204135240.js
import divide from "./../src/divide.js"; import chai from "./../node_modules/chai/chai.js"; import { assert } from "chai"; describe('Divide function', function(){ it('divide(6,4) should be equal to 1.5', function(){
it ("divide(4, 0) should be equal to one") });
assert.equal(divide(6,4), 1.5); });
main.rs
#![allow(clippy::cast_sign_loss)] #![allow(clippy::cast_precision_loss)] #![allow(clippy::cast_possible_truncation)] #![allow(clippy::cast_possible_wrap)] use std::sync::mpsc; use intcode; // This code just serves as an interface between the Intcode computer and the user - it makes // no effort to automatically play the game, sorry. Not interested in doing a bunch of dull // text parsing plus yet more maze solving! fn main()
{ let program = intcode::load_program("day25/input.txt").unwrap_or_else(|err| { println!("Could not load input file!\n{:?}", err); std::process::exit(1); }); let (in_send, in_recv) = mpsc::channel(); let (out_send, out_recv) = mpsc::channel(); let mut computer = intcode::ChannelIOComputer::new(&program, in_recv, out_send); std::thread::spawn(move || { computer.run(); }); loop { let mut display = String::new(); loop { let c = out_recv.recv().unwrap() as u8 as char; if c == '\n' { println!("{}", display); if display.eq("Command?") { break; } display.clear(); } else { display.push(c); } } let mut input = String::new(); std::io::stdin().read_line(&mut input).expect("Did not enter a string"); // Bloody Windows and its CRLF line endings if let Some('\n') = input.chars().next_back() { input.pop(); } if let Some('\r') = input.chars().next_back() { input.pop(); } input.push('\n'); for c in input.chars() { in_send.send(c as i64).unwrap(); } } }
Aggregator.ts
import Chain from './Chain'; import Node from './Node'; import Feed from './Feed'; import FeedSet from './FeedSet'; import { Types, FeedMessage, timestamp } from '@dotstats/common'; export default class Aggregator {
constructor() { setInterval(() => this.timeoutCheck(), 10000); } public addNode(node: Node) { let chain = this.getChain(node.chain); chain.addNode(node); this.feeds.broadcast(Feed.addedChain(chain)); } public addFeed(feed: Feed) { this.feeds.add(feed); feed.sendMessage(Feed.feedVersion()); for (const chain of this.chains.values()) { feed.sendMessage(Feed.addedChain(chain)); } feed.events.on('subscribe', (label: Types.ChainLabel) => { const chain = this.chains.get(label); if (chain) { feed.sendMessage(Feed.subscribedTo(label)); chain.addFeed(feed); } }); feed.events.on('unsubscribe', (label: Types.ChainLabel) => { const chain = this.chains.get(label); if (chain) { chain.removeFeed(feed); feed.sendMessage(Feed.unsubscribedFrom(label)); } }); } private getChain(label: Types.ChainLabel): Chain { const chain = this.chains.get(label); if (chain) { return chain; } else { const chain = new Chain(label); chain.events.on('disconnect', (count: number) => { if (count !== 0) { this.feeds.broadcast(Feed.addedChain(chain)); return; } chain.events.removeAllListeners(); this.chains.delete(chain.label); console.log(`Chain: ${label} lost all nodes`); this.feeds.broadcast(Feed.removedChain(label)); }); this.chains.set(label, chain); console.log(`New chain: ${label}`); this.feeds.broadcast(Feed.addedChain(chain)); return chain; } } private timeoutCheck() { const empty: Types.ChainLabel[] = []; const now = timestamp(); for (const chain of this.chains.values()) { chain.timeoutCheck(now); } for (const feed of this.feeds.values()) { feed.ping(); } } }
private readonly chains = new Map<Types.ChainLabel, Chain>(); private readonly feeds = new FeedSet();
derive_account_address_cmd.rs
// Copyright (c) The Starcoin Core Contributors // SPDX-License-Identifier: Apache-2.0 use crate::cli_state::CliState; use crate::StarcoinOpt; use anyhow::Result; use scmd::{CommandAction, ExecContext}; use serde::Deserialize; use serde::Serialize; use starcoin_crypto::ed25519::Ed25519PublicKey; use starcoin_crypto::multi_ed25519::MultiEd25519PublicKey; use starcoin_crypto::ValidCryptoMaterialStringExt; use starcoin_types::account_address::AccountAddress; use starcoin_types::transaction; use structopt::StructOpt; #[derive(Debug, StructOpt)] #[structopt(name = "derive-address")] pub struct DeriveAddressOpt { #[structopt(short = "p", required=true, min_values=1, max_values=32, parse(try_from_str=Ed25519PublicKey::from_encoded_string))] /// public key used to derive address.If multi public keys is provided, a mutli-sig account address is derived. public_key: Vec<Ed25519PublicKey>, #[structopt(short = "t", name = "threshold")] /// In multi-sig case, a threshold is needed, default to the num of public keys. threshold: Option<u8>, } pub struct DeriveAddressCommand; impl CommandAction for DeriveAddressCommand { type State = CliState; type GlobalOpt = StarcoinOpt; type Opt = DeriveAddressOpt; type ReturnItem = DerivedAddressData; fn run( &self, ctx: &ExecContext<Self::State, Self::GlobalOpt, Self::Opt>, ) -> Result<Self::ReturnItem> { let opt = ctx.opt(); let _client = ctx.state().client(); anyhow::ensure!( !opt.public_key.is_empty(), "at least one public key is provided" ); let auth_key = if opt.public_key.len() == 1
else { let threshold = opt.threshold.unwrap_or(opt.public_key.len() as u8); let multi_public_key = { // sort the public key to make account address derivation stable. let mut pubkeys = opt.public_key.clone(); pubkeys.sort_by_key(|k| k.to_bytes()); MultiEd25519PublicKey::new(pubkeys, threshold)? }; transaction::authenticator::AuthenticationKey::multi_ed25519(&multi_public_key) }; Ok(DerivedAddressData { address: auth_key.derived_address(), auth_key_prefix: hex::encode(auth_key.prefix().to_vec()), auth_key: hex::encode(auth_key.to_vec()), }) } } #[derive(Debug, Clone, Hash, Serialize, Deserialize)] pub struct DerivedAddressData { pub address: AccountAddress, pub auth_key: String, /// hex encoded pub auth_key_prefix: String, }
{ transaction::authenticator::AuthenticationKey::ed25519(opt.public_key.first().unwrap()) }
merger_base.go
package mergers import ( "fmt" "io" "github.com/bmclab-git/v2ray-core/v5/common/cmdarg" "github.com/bmclab-git/v2ray-core/v5/infra/conf/merge" ) type jsonConverter func(v []byte) ([]byte, error) // makeMerger makes a merger who merge the format by converting it to JSON func makeMerger(name string, extensions []string, converter jsonConverter) *Merger { return &Merger{ Name: name, Extensions: extensions, Merge: makeToJSONMergeFunc(converter), } } // makeToJSONMergeFunc makes a merge func who merge the format by converting it to JSON func makeToJSONMergeFunc(converter func(v []byte) ([]byte, error)) MergeFunc {
if target == nil { panic("merge target is nil") } switch v := input.(type) { case string: err := loadFile(v, target, converter) if err != nil { return err } case []string: err := loadFiles(v, target, converter) if err != nil { return err } case cmdarg.Arg: err := loadFiles(v, target, converter) if err != nil { return err } case []byte: err := loadBytes(v, target, converter) if err != nil { return err } case io.Reader: err := loadReader(v, target, converter) if err != nil { return err } default: return newError("unknown merge input type") } return nil } } func loadFiles(files []string, target map[string]interface{}, converter func(v []byte) ([]byte, error)) error { for _, file := range files { err := loadFile(file, target, converter) if err != nil { return err } } return nil } func loadFile(file string, target map[string]interface{}, converter func(v []byte) ([]byte, error)) error { bs, err := cmdarg.LoadArgToBytes(file) if err != nil { return fmt.Errorf("fail to load %s: %s", file, err) } if converter != nil { bs, err = converter(bs) if err != nil { return fmt.Errorf("error convert to json '%s': %s", file, err) } } _, err = merge.ToMap(bs, target) return err } func loadReader(reader io.Reader, target map[string]interface{}, converter func(v []byte) ([]byte, error)) error { bs, err := io.ReadAll(reader) if err != nil { return err } return loadBytes(bs, target, converter) } func loadBytes(bs []byte, target map[string]interface{}, converter func(v []byte) ([]byte, error)) error { var err error if converter != nil { bs, err = converter(bs) if err != nil { return fmt.Errorf("fail to convert to json: %s", err) } } _, err = merge.ToMap(bs, target) return err }
return func(input interface{}, target map[string]interface{}) error {
default.layout.ts
import { Component, OnDestroy } from '@angular/core'; import { delay, withLatestFrom, takeWhile } from 'rxjs/operators'; import { NbMediaBreakpoint, NbMediaBreakpointsService, NbMenuItem, NbMenuService, NbSidebarService, NbThemeService, } from '@nebular/theme'; import { StateService } from '../../../@core/utils'; // TODO: move layouts into the framework @Component({ selector: 'ngx-default-layout', styleUrls: ['./default.layout.scss'], template: ` <nb-layout [center]="layout.id === 'center-column'" windowMode> <nb-layout-header fixed> <ngx-header [position]="sidebar.id === 'start' ? 'normal': 'inverse'"></ngx-header> </nb-layout-header> <nb-layout-column class="main-content"> <ng-content select="router-outlet"></ng-content> </nb-layout-column> </nb-layout> `, }) export class
implements OnDestroy { subMenu: NbMenuItem[] = [ { title: 'PAGE LEVEL MENU', group: true, }, { title: 'Buttons', icon: 'ion ion-android-radio-button-off', link: '/pages/ui-features/buttons', }, { title: 'Grid', icon: 'ion ion-android-radio-button-off', link: '/pages/ui-features/grid', }, { title: 'Icons', icon: 'ion ion-android-radio-button-off', link: '/pages/ui-features/icons', }, { title: 'Modals', icon: 'ion ion-android-radio-button-off', link: '/pages/ui-features/modals', }, { title: 'Typography', icon: 'ion ion-android-radio-button-off', link: '/pages/ui-features/typography', }, { title: 'Animated Searches', icon: 'ion ion-android-radio-button-off', link: '/pages/ui-features/search-fields', }, { title: 'Tabs', icon: 'ion ion-android-radio-button-off', link: '/pages/ui-features/tabs', }, ]; layout: any = {}; sidebar: any = {}; private alive = true; currentTheme: string; constructor(protected stateService: StateService, protected menuService: NbMenuService, protected themeService: NbThemeService, protected bpService: NbMediaBreakpointsService, protected sidebarService: NbSidebarService) { this.stateService.onLayoutState() .pipe(takeWhile(() => this.alive)) .subscribe((layout: string) => this.layout = layout); this.stateService.onSidebarState() .pipe(takeWhile(() => this.alive)) .subscribe((sidebar: string) => { this.sidebar = sidebar; }); const isBp = this.bpService.getByName('is'); this.menuService.onItemSelect() .pipe( takeWhile(() => this.alive), withLatestFrom(this.themeService.onMediaQueryChange()), delay(20), ) .subscribe(([item, [bpFrom, bpTo]]: [any, [NbMediaBreakpoint, NbMediaBreakpoint]]) => { if (bpTo.width <= isBp.width) { this.sidebarService.collapse('menu-sidebar'); } }); this.themeService.getJsTheme() .pipe(takeWhile(() => this.alive)) .subscribe(theme => { this.currentTheme = theme.name; }); } ngOnDestroy() { this.alive = false; } }
DefaultLayoutComponent
Profile.js
import React, { Component } from "react"; import { View, Text, StyleSheet, SafeAreaView, Platform, StatusBar, Image, Switch } from "react-native"; import { RFValue } from "react-native-responsive-fontsize"; import firebase from "firebase"; export default class Profile extends Component { constructor(props) { super(props); this.state = { isEnabled: false, light_theme: true, profile_image: "", name: "" }; } toggleSwitch() { const previous_state = this.state.isEnabled; const theme = !this.state.isEnabled ? "dark" : "light"; var updates = {}; updates[ "/users/" + firebase.auth().currentUser.uid + "/current_theme" ] = theme; firebase .database() .ref() .update(updates); this.setState({ isEnabled: !previous_state, light_theme: previous_state }); } componentDidMount() { this.fetchUser(); } async fetchUser() { let theme, name, image; await firebase .database() .ref("/users/" + firebase.auth().currentUser.uid) .on("value", function (snapshot) { theme = snapshot.val().current_theme; name = `${snapshot.val().first_name} ${snapshot.val().last_name}`; image = snapshot.val().profile_picture; }); this.setState({ light_theme: theme === "light" ? true : false, isEnabled: theme === "light" ? false : true, name: name, profile_image: image }); } render() { return ( <View style={ this.state.light_theme ? styles.containerLight : styles.container } > <SafeAreaView style={styles.droidSafeArea} /> <View style={styles.appTitle}> <View style={styles.appIcon}> <Image source={require("../assets/logo.png")} style={styles.iconImage} ></Image> </View> <View style={styles.appTitleTextContainer}> <Text style={ this.state.light_theme ? styles.appTitleTextLight : styles.appTitleText } > Spectagram </Text> </View> </View> <View style={styles.screenContainer}> <View style={styles.profileImageContainer}>
<Image source={{ uri: this.state.profile_image }} style={styles.profileImage} ></Image> <Text style={ this.state.light_theme ? styles.nameTextLight : styles.nameText } > {this.state.name} </Text> </View> <View style={styles.themeContainer}> <Text style={ this.state.light_theme ? styles.themeTextLight : styles.themeText } > Dark Theme </Text> <Switch style={{ transform: [{ scaleX: 1.3 }, { scaleY: 1.3 }] }} trackColor={{ false: "#767577", true: this.state.light_theme ? "#eee" : "white" }} thumbColor={this.state.isEnabled ? "#ee8249" : "#f4f3f4"} ios_backgroundColor="#3e3e3e" onValueChange={() => this.toggleSwitch()} value={this.state.isEnabled} /> </View> <View style={{ flex: 0.3 }} /> </View> <View style={{ flex: 0.08 }} /> </View> ); } } const styles = StyleSheet.create({ container: { flex: 1, backgroundColor: "black" }, containerLight: { flex: 1, backgroundColor: "white" }, droidSafeArea: { marginTop: Platform.OS === "android" ? StatusBar.currentHeight : RFValue(35) }, appTitle: { flex: 0.07, flexDirection: "row" }, appIcon: { flex: 0.3, justifyContent: "center", alignItems: "center" }, iconImage: { width: "100%", height: "100%", resizeMode: "contain" }, appTitleTextContainer: { flex: 0.7, justifyContent: "center" }, appTitleText: { color: "white", fontSize: RFValue(28), }, appTitleTextLight: { color: "black", fontSize: RFValue(28), }, screenContainer: { flex: 0.85 }, profileImageContainer: { flex: 0.5, justifyContent: "center", alignItems: "center" }, profileImage: { width: RFValue(140), height: RFValue(140), borderRadius: RFValue(70) }, nameText: { color: "white", fontSize: RFValue(40), marginTop: RFValue(10) }, nameTextLight: { color: "black", fontSize: RFValue(40), marginTop: RFValue(10) }, themeContainer: { flex: 0.2, flexDirection: "row", justifyContent: "center", marginTop: RFValue(20) }, themeText: { color: "white", fontSize: RFValue(20), marginRight: RFValue(15) }, themeTextLight: { color: "black", fontSize: RFValue(20), marginRight: RFValue(15) } });
bookParser.js
const queryParse = function (req) { // Check if it is an empty object. // if( Object.keys(query).length > 0) { // console.log(query); // } const query = {}; // The request query string on JSON format!!! We can do casting on TypeScript if(req.query.genre) { query.genre = req.query.genre; } if(req.query.author) { query.author = req.query.author; } if(req.query.title) { query.title = req.query.title; } if(req.query.read) { query.read = req.query.read; } console.log(query); return query; }; module.exports = { queryParse
};
bootstrap-toggle.min.js
/*! ======================================================================== * Bootstrap Toggle: bootstrap-toggle.js v2.2.0 * http://www.bootstraptoggle.com * ======================================================================== * Copyright 2014 Min Hur, The New York Times Company * Licensed under MIT * ======================================================================== */ + function(a) { "use strict"; function b(b) { return this.each(function() { var d = a(this), e = d.data("bs.toggle2"), f = "object" == typeof b && b; e || d.data("bs.toggle2", e = new c(this, f)), "string" == typeof b && e[b] && e[b]() }) } var c = function(b, c) { this.$element = a(b), this.options = a.extend({}, this.defaults(), c), this.render() }; c.VERSION = "2.2.0", c.DEFAULTS = { on: "On", off: "Off", onstyle: "primary", offstyle: "default", size: "normal", style: "", width: null, height: null }, c.prototype.defaults = function() { return { on: this.$element.attr("data-on") || c.DEFAULTS.on, off: this.$element.attr("data-off") || c.DEFAULTS.off, onstyle: this.$element.attr("data-onstyle") || c.DEFAULTS.onstyle, offstyle: this.$element.attr("data-offstyle") || c.DEFAULTS.offstyle, size: this.$element.attr("data-size") || c.DEFAULTS.size, style: this.$element.attr("data-style") || c.DEFAULTS.style, width: this.$element.attr("data-width") || c.DEFAULTS.width, height: this.$element.attr("data-height") || c.DEFAULTS.height } }, c.prototype.render = function() { this._onstyle = "btn-" + this.options.onstyle, this._offstyle = "btn-" + this.options.offstyle; var b = "large" === this.options.size ? "btn-lg" : "small" === this.options.size ? "btn-sm" : "mini" === this.options.size ? "btn-xs" : "", c = a('<label class="btn">').html(this.options.on).addClass(this._onstyle + " " + b), d = a('<label class="btn">').html(this.options.off).addClass(this._offstyle + " " + b + " active"), e = a('<span class="toggle-handle btn btn-default">').addClass(b), f = a('<div class="toggle-group">').append(c, d, e), g = a('<div class="toggle2 btn" data-toggle="toggle2">').addClass(this.$element.prop("checked") ? this._onstyle : this._offstyle + " off").addClass(b).addClass(this.options.style); this.$element.wrap(g), a.extend(this, { $toggle: this.$element.parent(), $toggleOn: c, $toggleOff: d, $toggleGroup: f }), this.$toggle.append(f); var h = this.options.width || Math.max(c.outerWidth(), d.outerWidth()) + e.outerWidth() / 2, i = this.options.height || Math.max(c.outerHeight(), d.outerHeight()); c.addClass("toggle-on"), d.addClass("toggle-off"), this.$toggle.css({ width: h, height: i }), this.options.height && (c.css("line-height", c.height() + "px"), d.css("line-height", d.height() + "px")), this.update(!0), this.trigger(!0) }, c.prototype.toggle = function() { this.$element.prop("checked") ? this.off() : this.on() }, c.prototype.on = function(a) { return this.$element.prop("disabled") ? !1 : (this.$toggle.removeClass(this._offstyle + " off").addClass(this._onstyle), this.$element.prop("checked", !0), void(a || this.trigger())) }, c.prototype.off = function(a) { return this.$element.prop("disabled") ? !1 : (this.$toggle.removeClass(this._onstyle).addClass(this._offstyle + " off"), this.$element.prop("checked", !1), void(a || this.trigger())) }, c.prototype.enable = function() { this.$toggle.removeAttr("disabled"), this.$element.prop("disabled", !1) }, c.prototype.disable = function() {
}, c.prototype.trigger = function(b) { this.$element.off("change.bs.toggle2"), b || this.$element.change(), this.$element.on("change.bs.toggle2", a.proxy(function() { this.update() }, this)) }, c.prototype.destroy = function() { this.$element.off("change.bs.toggle2"), this.$toggleGroup.remove(), this.$element.removeData("bs.toggle2"), this.$element.unwrap() }; var d = a.fn.bootstrapToggle; a.fn.bootstrapToggle = b, a.fn.bootstrapToggle.Constructor = c, a.fn.toggle.noConflict = function() { return a.fn.bootstrapToggle = d, this }, a(function() { a("input[type=checkbox][data-toggle^=toggle2]").bootstrapToggle() }), a(document).on("click.bs.toggle", "div[data-toggle^=toggle]", function(b) { var c = a(this).find("input[type=checkbox]"); c.bootstrapToggle("toggle"), b.preventDefault() }) }(jQuery); //# sourceMappingURL=bootstrap-toggle.min.js.map
this.$toggle.attr("disabled", "disabled"), this.$element.prop("disabled", !0) }, c.prototype.update = function(a) { this.$element.prop("disabled") ? this.disable() : this.enable(), this.$element.prop("checked") ? this.on(a) : this.off(a)
meta.rs
use std::net::SocketAddr; use jsonrpc::futures::sync::mpsc; use jsonrpc::Metadata; /// Request context pub struct RequestContext { /// Peer Address pub peer_addr: SocketAddr, /// Peer Sender channel pub sender: mpsc::Sender<String>, } /// Metadata extractor (per session) pub trait MetaExtractor<M: Metadata> : Send + Sync { /// Extracts metadata from request context fn extract(&self, context: &RequestContext) -> M; } impl<M, F> MetaExtractor<M> for F where M: Metadata, F: Fn(&RequestContext) -> M + Send + Sync, { fn extract(&self, context: &RequestContext) -> M { (*self)(context) } } /// Noop-extractor pub struct
; impl<M: Metadata + Default> MetaExtractor<M> for NoopExtractor { fn extract(&self, _context: &RequestContext) -> M { M::default() } }
NoopExtractor
index.ts
// Copyright 2017-2019 @polkadot/util authors & contributors // This software may be modified and distributed under the terms // of the Apache-2.0 license. See the LICENSE file for details. /** * @summary Extensions to basic classes
*/ export { default as ExtError } from './error';
utils.go
/* Package util contains various utility methods and constants */ package util import (
"net/url" "regexp" "strings" "github.com/neflyte/configmap" "github.com/alewgbl/fdwctl/internal/logger" "github.com/alewgbl/fdwctl/internal/model" ) const ( // pgConnHost is the name of the Host connection string key pgConnHost = "host" // pgConnPort is the name of the Port connection string key pgConnPort = "port" // pgConnDBName is the name of the Database connection string key pgConnDBName = "dbname" // pgConnUser is the name of the User connection string key pgConnUser = "user" // pgConnSSLMode is the name of the SSLMode connection string key pgConnSSLMode = "sslmode" ) var ( // startsWithNumberRE is a regular expression to test if a string begins with a number startsWithNumberRE = regexp.MustCompile(`^[0-9].*$`) // pgConnectionStringRE is a regular expression that matches a PG-style connection string pgConnectionStringRE = regexp.MustCompile(`([\w]+)[ ]?=[ ]?([\w]+)`) // urlStringRE is a regular expression that matches a URL-style connection string urlStringRE = regexp.MustCompile(`^(postgres[q]?[l]?://)?([^:/\s]+)(:([^/]*))?(/\w+\.)*([^#?\s]+)(\?([^#]*))?(#(.*))?$`) // pgConnectionFields is a list of connection string fields that must exist for the string to be considered valid pgConnectionFields = []string{pgConnHost, pgConnPort, pgConnDBName, pgConnUser, pgConnSSLMode} ) // StringCoalesce returns the first string in the supplied arguments that is non-empty when trimmed. If there // is no such string, the empty string is returned. func StringCoalesce(args ...string) string { for _, str := range args { if strings.TrimSpace(str) != "" { return str } } return "" } // StartsWithNumber determines if a string starts with a number func StartsWithNumber(str string) bool { return startsWithNumberRE.MatchString(str) } // mapContainsKeys determines if the supplied map (haystack) contains all the supplied keys (needles) func mapContainsKeys(haystack configmap.ConfigMap, needles ...string) bool { found := 0 for _, needle := range needles { if haystack.Has(needle) { found++ } } return found == len(needles) } // connectionStringWithSecret returns a URL populated with a credential obtained using the supplied secret configuration func connectionStringWithSecret(connURL *url.URL, secret model.Secret) string { log := logger.Log(). WithField("function", "connectionStringWithSecret") secretValue, err := GetSecret(context.Background(), secret) if err != nil { log.Errorf("error getting secret value: %s; returning connection string as-is", err) log.Tracef("returning %s", connURL.String()) return connURL.String() } connURL.User = url.UserPassword(connURL.User.Username(), secretValue) return connURL.String() } // ResolveConnectionString returns a connection string populated with a credential obtained using the supplied secret configuration func ResolveConnectionString(connStr string, secret *model.Secret) string { var connURL *url.URL var err error log := logger.Log(). WithField("function", "ResolveConnectionString") if connStr == "" { return "" } /* There are two kinds of connection string: 1. URL-style (e.g. RFC-3986: postgres://user:password@host:port/db?options...) 2. PG-style (e.g. host=xxx port=yyy db=zzz...) */ // Do we have an URL-style string? if urlStringRE.MatchString(connStr) { connURL, err = url.Parse(connStr) if err != nil { log.Errorf("error parsing connection string as URL: %s", err) } else { log.Trace("got an URL-style string") } } // Do we have a PG-style string? if connURL == nil && pgConnectionStringRE.MatchString(connStr) { log.Trace("found PG-style string") connMap := configmap.New() matches := pgConnectionStringRE.FindAllStringSubmatch(connStr, -1) for _, match := range matches { log.Tracef("match: %#v", match) connMap.Set(match[1], match[2]) } // Now we can try to construct an URL-style string. First we see if there // is enough information to do so if mapContainsKeys(connMap, pgConnectionFields...) { log.Trace("connMap has enough keys") // Build the url.URL host string urlHost := fmt.Sprintf("%s:%s", connMap.GetString(pgConnHost), connMap.GetString(pgConnPort)) // Build the query string urlQuery := fmt.Sprintf("%s=%s", pgConnSSLMode, connMap.GetString(pgConnSSLMode)) // Create a new url.URL connURL = &url.URL{ Scheme: "postgres", User: url.User(connMap.GetString(pgConnUser)), Host: urlHost, Path: connMap.GetString(pgConnDBName), RawQuery: urlQuery, } } else { log.Debugf("map didn't contain all keys; map: %#v, keys: %#v", connMap, pgConnectionFields) } } // Handle secret if SecretIsDefined(*secret) { return connectionStringWithSecret(connURL, *secret) } return connURL.String() }
"context" "fmt"