file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
WmfImagePlugin.py | #
# The Python Imaging Library
# $Id$
#
# WMF stub codec
#
# history:
# 1996-12-14 fl Created
# 2004-02-22 fl Turned into a stub driver
# 2004-02-23 fl Added EMF support
#
# Copyright (c) Secret Labs AB 1997-2004. All rights reserved.
# Copyright (c) Fredrik Lundh 1996.
#
# See the README file for information on usage and redistribution.
#
# WMF/EMF reference documentation:
# https://winprotocoldoc.blob.core.windows.net/productionwindowsarchives/MS-WMF/[MS-WMF].pdf
# http://wvware.sourceforge.net/caolan/index.html
# http://wvware.sourceforge.net/caolan/ora-wmf.html
from __future__ import print_function
from . import Image, ImageFile
from ._binary import i16le as word, si16le as short, i32le as dword, si32le as _long
__version__ = "0.2"
_handler = None
if str != bytes:
long = int
def register_handler(handler):
"""
Install application-specific WMF image handler.
:param handler: Handler object.
"""
global _handler
_handler = handler
if hasattr(Image.core, "drawwmf"):
# install default handler (windows only)
class WmfHandler(object):
|
register_handler(WmfHandler())
#
# --------------------------------------------------------------------
# Read WMF file
def _accept(prefix):
return (
prefix[:6] == b"\xd7\xcd\xc6\x9a\x00\x00" or
prefix[:4] == b"\x01\x00\x00\x00"
)
##
# Image plugin for Windows metafiles.
class WmfStubImageFile(ImageFile.StubImageFile):
format = "WMF"
format_description = "Windows Metafile"
def _open(self):
# check placable header
s = self.fp.read(80)
if s[:6] == b"\xd7\xcd\xc6\x9a\x00\x00":
# placeable windows metafile
# get units per inch
inch = word(s, 14)
# get bounding box
x0 = short(s, 6)
y0 = short(s, 8)
x1 = short(s, 10)
y1 = short(s, 12)
# normalize size to 72 dots per inch
size = (x1 - x0) * 72 // inch, (y1 - y0) * 72 // inch
self.info["wmf_bbox"] = x0, y0, x1, y1
self.info["dpi"] = 72
# print(self.mode, self.size, self.info)
# sanity check (standard metafile header)
if s[22:26] != b"\x01\x00\t\x00":
raise SyntaxError("Unsupported WMF file format")
elif dword(s) == 1 and s[40:44] == b" EMF":
# enhanced metafile
# get bounding box
x0 = _long(s, 8)
y0 = _long(s, 12)
x1 = _long(s, 16)
y1 = _long(s, 20)
# get frame (in 0.01 millimeter units)
frame = _long(s, 24), _long(s, 28), _long(s, 32), _long(s, 36)
# normalize size to 72 dots per inch
size = x1 - x0, y1 - y0
# calculate dots per inch from bbox and frame
xdpi = 2540 * (x1 - y0) // (frame[2] - frame[0])
ydpi = 2540 * (y1 - y0) // (frame[3] - frame[1])
self.info["wmf_bbox"] = x0, y0, x1, y1
if xdpi == ydpi:
self.info["dpi"] = xdpi
else:
self.info["dpi"] = xdpi, ydpi
else:
raise SyntaxError("Unsupported file format")
self.mode = "RGB"
self.size = size
loader = self._load()
if loader:
loader.open(self)
def _load(self):
return _handler
def _save(im, fp, filename):
if _handler is None or not hasattr("_handler", "save"):
raise IOError("WMF save handler not installed")
_handler.save(im, fp, filename)
#
# --------------------------------------------------------------------
# Registry stuff
Image.register_open(WmfStubImageFile.format, WmfStubImageFile, _accept)
Image.register_save(WmfStubImageFile.format, _save)
Image.register_extension(WmfStubImageFile.format, ".wmf")
Image.register_extension(WmfStubImageFile.format, ".emf")
| def open(self, im):
im.mode = "RGB"
self.bbox = im.info["wmf_bbox"]
def load(self, im):
im.fp.seek(0) # rewind
return Image.frombytes(
"RGB", im.size,
Image.core.drawwmf(im.fp.read(), im.size, self.bbox),
"raw", "BGR", (im.size[0]*3 + 3) & -4, -1
) |
lib.rs | #![no_std]
extern crate eng_wasm;
extern crate eng_wasm_derive;
use eng_wasm::*;
use eng_wasm_derive::pub_interface;
use eng_wasm_derive::eth_contract;
use eng_wasm::String;
#[eth_contract("ABI.json")]
struct | ;
#[pub_interface]
pub trait ContractInterface{
fn write_data(data: String, address: String);
fn read_data(address: String) -> String;
}
pub struct Contract;
impl ContractInterface for Contract {
fn write_data(data: String, address: String) {
let eth_contract = EthContract::new(&address);
eth_contract.writeData(data);
}
fn read_data(address: String) -> String {
let eth_contract = EthContract::new(&address);
eth_contract.readData();
eformat!("ABIs still in dev")
}
} | EthContract |
test_sampler.py | from dowel import logger
import numpy as np
from garage.sampler.utils import truncate_paths
from tests.fixtures.logger import NullOutput
class TestSampler:
def setup_method(self):
logger.add_output(NullOutput())
def teardown_method(self):
logger.remove_all()
def test_truncate_paths(self): | actions=np.zeros((100, 1)),
rewards=np.zeros(100),
env_infos=dict(),
agent_infos=dict(lala=np.zeros(100)),
),
dict(
observations=np.zeros((50, 1)),
actions=np.zeros((50, 1)),
rewards=np.zeros(50),
env_infos=dict(),
agent_infos=dict(lala=np.zeros(50)),
),
]
truncated = truncate_paths(paths, 130)
assert len(truncated) == 2
assert len(truncated[-1]['observations']) == 30
assert len(truncated[0]['observations']) == 100
# make sure not to change the original one
assert len(paths) == 2
assert len(paths[-1]['observations']) == 50 | paths = [
dict(
observations=np.zeros((100, 1)), |
new_client.py | __author__ = 'mnowotka'
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
import requests
import requests_cache
from chembl_webresource_client.spore_client import Client, make_spore_function
from chembl_webresource_client.query_set import QuerySet
from chembl_webresource_client.query_set import Model
from chembl_webresource_client.settings import Settings
from easydict import EasyDict
#-----------------------------------------------------------------------------------------------------------------------
class NewClient(object): |
def client_from_url(url, base_url=None):
"""Builds a client from an url
:param url: the url you want to get the SPORE schema from
:param session: the :class:`request.Session` instance to use. Defaults to
the requests module itself.
"""
res = requests.get(url)
if not res.ok:
raise Exception('Error getting schema from url {0} with status {1} and msg {2}'.format(url, res.status_code, res.text))
schema = res.json()
if 'base_url' not in schema:
if base_url:
schema['base_url'] = base_url
else:
parsed_url = urlparse(url)
schema['base_url'] = parsed_url.scheme + '://' + parsed_url.netloc + '/'
if not schema['base_url'].endswith('/'):
schema['base_url'] += '/'
client = NewClient()
client.description = EasyDict(schema)
client.official = False # TODO: change
keys = client.description.methods.keys()
for method, definition in [(m,d) for (m,d) in client.description.methods.items() if
(m.startswith('POST_') or m.startswith('GET_')) and m.endswith('_detail')]:
searchable = False
if method.replace('dispatch_detail', 'get_search') in keys:
searchable = True
name = definition['resource_name']
collection_name = definition['collection_name']
formats = [format for format in definition['formats'] if format not in ('jsonp', 'html')]
default_format = definition['default_format'].split('/')[-1]
if not name:
continue
model = Model(name, collection_name, formats, searchable)
qs = QuerySet(model=model)
if default_format != 'xml':
qs.set_format(default_format)
setattr(client, name, qs)
return client
#-----------------------------------------------------------------------------------------------------------------------
new_client = client_from_url(Settings.Instance().NEW_CLIENT_URL + '/spore')
#----------------------------------------------------------------------------------------------------------------------- | pass
#----------------------------------------------------------------------------------------------------------------------- |
kandi.js | (function () {
// define variables
var canvas = document.getElementById('canvas');
var ctx = canvas.getContext('2d');
var player, score, stop, ticker;
var ground = [], water = [], enemies = [], environment = [];
// platform variables
var platformHeight, platformLength, gapLength;
var platformWidth = 32;
var platformBase = canvas.height - platformWidth; // bottom row of the game
var platformSpacer = 64;
/**
* Get a random number between range
* @param {integer}
* @param {integer}
*/
function rand(low, high) {
return Math.floor( Math.random() * (high - low + 1) + low );
}
/**
* Bound a number between range
* @param {integer} num - Number to bound
* @param {integer}
* @param {integer}
*/
function bound(num, low, high) {
return Math.max( Math.min(num, high), low);
}
/**
* Asset pre-loader object. Loads all images
*/
var assetLoader = (function() {
// images dictionary
this.imgs = {
'bg' : 'imgs/bg.png',
'sky' : 'imgs/sky.png',
'backdrop' : 'imgs/backdrop.png',
'backdrop2' : 'imgs/backdrop_ground.png',
'grass' : 'imgs/grass.png',
'avatar_normal' : 'imgs/normal_walk.png',
'water' : 'imgs/water.png',
'grass1' : 'imgs/grassMid1.png',
'grass2' : 'imgs/grassMid2.png',
'bridge' : 'imgs/bridge.png',
'plant' : 'imgs/plant.png',
'bush1' : 'imgs/bush1.png',
'bush2' : 'imgs/bush2.png',
'cliff' : 'imgs/grassCliffRight.png',
'spikes' : 'imgs/spikes.png',
'box' : 'imgs/boxCoin.png',
'slime' : 'imgs/slime.png'
};
var assetsLoaded = 0; // how many assets have been loaded
var numImgs = Object.keys(this.imgs).length; // total number of image assets
this.totalAssest = numImgs; // total number of assets
/**
* Ensure all assets are loaded before using them
* @param {number} dic - Dictionary name ('imgs', 'sounds', 'fonts')
* @param {number} name - Asset name in the dictionary
*/
function assetLoaded(dic, name) {
// don't count assets that have already loaded
if (this[dic][name].status !== 'loading') {
return;
}
this[dic][name].status = 'loaded';
assetsLoaded++;
// finished callback
if (assetsLoaded === this.totalAssest && typeof this.finished === 'function') {
this.finished();
}
}
/**
* Create assets, set callback for asset loading, set asset source
*/
this.downloadAll = function() {
var _this = this;
var src;
// load images
for (var img in this.imgs) {
if (this.imgs.hasOwnProperty(img)) {
src = this.imgs[img];
// create a closure for event binding
(function(_this, img) {
_this.imgs[img] = new Image();
_this.imgs[img].status = 'loading';
_this.imgs[img].name = img;
_this.imgs[img].onload = function() { assetLoaded.call(_this, 'imgs', img) };
_this.imgs[img].src = src;
})(_this, img);
}
}
}
return {
imgs: this.imgs,
totalAssest: this.totalAssest,
downloadAll: this.downloadAll
};
})();
assetLoader.finished = function() {
startGame();
}
/**
* Creates a Spritesheet
* @param {string} - Path to the image.
* @param {number} - Width (in px) of each frame.
* @param {number} - Height (in px) of each frame.
*/
function SpriteSheet(path, frameWidth, frameHeight) {
this.image = new Image();
this.frameWidth = frameWidth;
this.frameHeight = frameHeight;
// calculate the number of frames in a row after the image loads
var self = this;
this.image.onload = function() {
self.framesPerRow = Math.floor(self.image.width / self.frameWidth);
};
this.image.src = path;
}
/**
* Creates an animation from a spritesheet.
* @param {SpriteSheet} - The spritesheet used to create the animation.
* @param {number} - Number of frames to wait for before transitioning the animation.
* @param {array} - Range or sequence of frame numbers for the animation.
* @param {boolean} - Repeat the animation once completed.
*/
function Animation(spritesheet, frameSpeed, startFrame, endFrame) {
var animationSequence = []; // array holding the order of the animation
var currentFrame = 0; // the current frame to draw
var counter = 0; // keep track of frame rate
// start and end range for frames
for (var frameNumber = startFrame; frameNumber <= endFrame; frameNumber++)
animationSequence.push(frameNumber);
/**
* Update the animation
*/
this.update = function() {
// update to the next frame if it is time
if (counter == (frameSpeed - 1))
currentFrame = (currentFrame + 1) % animationSequence.length;
// update the counter
counter = (counter + 1) % frameSpeed;
};
/**
* Draw the current frame
* @param {integer} x - X position to draw
* @param {integer} y - Y position to draw
*/
this.draw = function(x, y) {
// get the row and col of the frame
var row = Math.floor(animationSequence[currentFrame] / spritesheet.framesPerRow);
var col = Math.floor(animationSequence[currentFrame] % spritesheet.framesPerRow);
ctx.drawImage(
spritesheet.image,
col * spritesheet.frameWidth, row * spritesheet.frameHeight,
spritesheet.frameWidth, spritesheet.frameHeight,
x, y,
spritesheet.frameWidth, spritesheet.frameHeight);
};
}
/**
* Create a parallax background
*/
var background = (function() {
var sky = {};
var backdrop = {};
var backdrop2 = {};
/**
* Draw the backgrounds to the screen at different speeds
*/
this.draw = function() {
ctx.drawImage(assetLoader.imgs.bg, 0, 0);
// Pan background
sky.x -= sky.speed;
backdrop.x -= backdrop.speed;
backdrop2.x -= backdrop2.speed;
// draw images side by side to loop
ctx.drawImage(assetLoader.imgs.sky, sky.x, sky.y);
ctx.drawImage(assetLoader.imgs.sky, sky.x + canvas.width, sky.y);
ctx.drawImage(assetLoader.imgs.backdrop, backdrop.x, backdrop.y);
ctx.drawImage(assetLoader.imgs.backdrop, backdrop.x + canvas.width, backdrop.y);
ctx.drawImage(assetLoader.imgs.backdrop2, backdrop2.x, backdrop2.y);
ctx.drawImage(assetLoader.imgs.backdrop2, backdrop2.x + canvas.width, backdrop2.y);
// If the image scrolled off the screen, reset
if (sky.x + assetLoader.imgs.sky.width <= 0)
sky.x = 0;
if (backdrop.x + assetLoader.imgs.backdrop.width <= 0)
backdrop.x = 0;
if (backdrop2.x + assetLoader.imgs.backdrop2.width <= 0)
backdrop2.x = 0;
};
/**
* Reset background to zero
*/
this.reset = function() {
sky.x = 0;
sky.y = 0;
sky.speed = 0.2;
backdrop.x = 0;
backdrop.y = 0;
backdrop.speed = 0.4;
backdrop2.x = 0;
backdrop2.y = 0;
backdrop2.speed = 0.6;
}
return {
draw: this.draw,
reset: this.reset
};
})();
/**
* A vector for 2d space.
* @param {integer} x - Center x coordinate.
* @param {integer} y - Center y coordinate.
* @param {integer} dx - Change in x.
* @param {integer} dy - Change in y.
*/
function Vector(x, y, dx, dy) {
// position
this.x = x || 0;
this.y = y || 0;
// direction
this.dx = dx || 0;
this.dy = dy || 0;
}
/**
* Advance the vectors position by dx,dy
*/
Vector.prototype.advance = function() {
this.x += this.dx;
this.y += this.dy;
};
/**
* Get the minimum distance between two vectors
* @param {Vector}
* @return minDist
*/
Vector.prototype.minDist = function(vec) {
var minDist = Infinity;
var max = Math.max( Math.abs(this.dx), Math.abs(this.dy),
Math.abs(vec.dx ), Math.abs(vec.dy ) );
var slice = 1 / max;
var x, y, distSquared;
// get the middle of each vector
var vec1 = {}, vec2 = {};
vec1.x = this.x + this.width/2;
vec1.y = this.y + this.height/2;
vec2.x = vec.x + vec.width/2;
vec2.y = vec.y + vec.height/2;
for (var percent = 0; percent < 1; percent += slice) {
x = (vec1.x + this.dx * percent) - (vec2.x + vec.dx * percent);
y = (vec1.y + this.dy * percent) - (vec2.y + vec.dy * percent);
distSquared = x * x + y * y;
minDist = Math.min(minDist, distSquared);
}
return Math.sqrt(minDist);
};
/**
* The player object
*/
var player = (function(player) {
// add properties directly to the player imported object
player.width = 60;
player.height = 96;
player.speed = 6;
// jumping
player.gravity = 1;
player.dy = 0;
player.jumpDy = -10;
player.isFalling = false;
player.isJumping = false;
// spritesheets
player.sheet = new SpriteSheet('imgs/normal_walk.png', player.width, player.height);
player.walkAnim = new Animation(player.sheet, 4, 0, 15);
player.jumpAnim = new Animation(player.sheet, 4, 15, 15);
player.fallAnim = new Animation(player.sheet, 4, 11, 11);
player.anim = player.walkAnim;
Vector.call(player, 0, 0, 0, player.dy);
var jumpCounter = 0; // how long the jump button can be pressed down
/**
* Update the player's position and animation
*/
player.update = function() {
// jump if not currently jumping or falling
if (KEY_STATUS.space && player.dy === 0 && !player.isJumping) {
player.isJumping = true;
player.dy = player.jumpDy;
jumpCounter = 12;
}
// jump higher if the space bar is continually pressed
if (KEY_STATUS.space && jumpCounter) {
player.dy = player.jumpDy;
}
jumpCounter = Math.max(jumpCounter-1, 0);
this.advance();
// add gravity
if (player.isFalling || player.isJumping) {
player.dy += player.gravity;
}
// change animation if falling
if (player.dy > 0) {
player.anim = player.fallAnim;
}
// change animation is jumping
else if (player.dy < 0) {
player.anim = player.jumpAnim;
}
else {
player.anim = player.walkAnim;
}
player.anim.update();
};
/**
* Draw the player at it's current position
*/
player.draw = function() {
player.anim.draw(player.x, player.y);
};
/**
* Reset the player's position
*/
player.reset = function() {
player.x = 64;
player.y = 250;
};
return player;
})(Object.create(Vector.prototype));
/**
* Sprites are anything drawn to the screen (ground, enemies, etc.)
* @param {integer} x - Starting x position of the player
* @param {integer} y - Starting y position of the player
* @param {string} type - Type of sprite
*/
function Sprite(x, y, type) {
this.x = x;
this.y = y;
this.width = platformWidth;
this.height = platformWidth;
this.type = type;
Vector.call(this, x, y, 0, 0);
/**
* Update the Sprite's position by the player's speed
*/
this.update = function() {
this.dx = -player.speed;
this.advance();
};
/**
* Draw the sprite at it's current position
*/
this.draw = function() {
ctx.save();
ctx.translate(0.5,0.5);
ctx.drawImage(assetLoader.imgs[this.type], this.x, this.y);
ctx.restore();
};
}
Sprite.prototype = Object.create(Vector.prototype);
/**
* Get the type of a platform based on platform height
* @return Type of platform
*/
function getType() {
var type;
switch (platformHeight) {
case 0:
case 1:
type = Math.random() > 0.5 ? 'grass1' : 'grass2';
break;
case 2:
type = 'grass';
break;
case 3:
type = 'bridge';
break;
case 4:
type = 'box';
break;
}
if (platformLength === 1 && platformHeight < 3 && rand(0, 3) === 0) {
type = 'cliff';
}
return type;
}
/**
* Update all ground position and draw. Also check for collision against the player.
*/
function updateGround() {
// animate ground
player.isFalling = true;
for (var i = 0; i < ground.length; i++) {
ground[i].update();
ground[i].draw();
// stop the player from falling when landing on a platform
var angle;
if (player.minDist(ground[i]) <= player.height/2 + platformWidth/2 &&
(angle = Math.atan2(player.y - ground[i].y, player.x - ground[i].x) * 180/Math.PI) > -130 &&
angle < -50) {
player.isJumping = false;
player.isFalling = false;
player.y = ground[i].y - player.height + 5;
player.dy = 0;
}
}
// remove ground that have gone off screen
if (ground[0] && ground[0].x < -platformWidth) {
ground.splice(0, 1);
}
}
/**
* Update all water position and draw.
*/
function updateWater() {
// animate water
for (var i = 0; i < water.length; i++) {
water[i].update();
water[i].draw();
}
// remove water that has gone off screen
if (water[0] && water[0].x < -platformWidth) {
var w = water.splice(0, 1)[0];
w.x = water[water.length-1].x + platformWidth;
water.push(w);
}
}
/**
* Update all environment position and draw.
*/
function updateEnvironment() {
// animate environment
for (var i = 0; i < environment.length; i++) {
environment[i].update();
environment[i].draw();
}
// remove environment that have gone off screen
if (environment[0] && environment[0].x < -platformWidth) {
environment.splice(0, 1);
}
}
/**
* Update all enemies position and draw. Also check for collision against the player.
*/
function | () {
// animate enemies
for (var i = 0; i < enemies.length; i++) {
enemies[i].update();
enemies[i].draw();
// player ran into enemy
if (player.minDist(enemies[i]) <= player.width - platformWidth/2) {
gameOver();
}
}
// remove enemies that have gone off screen
if (enemies[0] && enemies[0].x < -platformWidth) {
enemies.splice(0, 1);
}
}
/**
* Update the players position and draw
*/
function updatePlayer() {
player.update();
player.draw();
// game over
if (player.y + player.height >= canvas.height) {
gameOver();
}
}
/**
* Spawn new sprites off screen
*/
function spawnSprites() {
// increase score
score++;
// first create a gap
if (gapLength > 0) {
gapLength--;
}
// then create ground
else if (platformLength > 0) {
var type = getType();
ground.push(new Sprite(
canvas.width + platformWidth % player.speed,
platformBase - platformHeight * platformSpacer,
type
));
platformLength--;
// add random environment sprites
spawnEnvironmentSprites();
// add random enemies
spawnEnemySprites();
}
// start over
else {
// increase gap length every speed increase of 4
gapLength = rand(player.speed - 2, player.speed);
// only allow a ground to increase by 1
platformHeight = bound(rand(0, platformHeight + rand(0, 2)), 0, 4);
platformLength = rand(Math.floor(player.speed/2), player.speed * 4);
}
}
/**
* Spawn new environment sprites off screen
*/
function spawnEnvironmentSprites() {
if (score > 40 && rand(0, 20) === 0 && platformHeight < 3) {
if (Math.random() > 0.5) {
environment.push(new Sprite(
canvas.width + platformWidth % player.speed,
platformBase - platformHeight * platformSpacer - platformWidth,
'plant'
));
}
else if (platformLength > 2) {
environment.push(new Sprite(
canvas.width + platformWidth % player.speed,
platformBase - platformHeight * platformSpacer - platformWidth,
'bush1'
));
environment.push(new Sprite(
canvas.width + platformWidth % player.speed + platformWidth,
platformBase - platformHeight * platformSpacer - platformWidth,
'bush2'
));
}
}
}
/**
* Spawn new enemy sprites off screen
*/
function spawnEnemySprites() {
if (score > 100 && Math.random() > 0.96 && enemies.length < 3 && platformLength > 5 &&
(enemies.length ? canvas.width - enemies[enemies.length-1].x >= platformWidth * 3 ||
canvas.width - enemies[enemies.length-1].x < platformWidth : true)) {
enemies.push(new Sprite(
canvas.width + platformWidth % player.speed,
platformBase - platformHeight * platformSpacer - platformWidth,
Math.random() > 0.5 ? 'spikes' : 'slime'
));
}
}
/**
* Game loop
*/
function animate() {
if (!stop) {
requestAnimFrame( animate );
ctx.clearRect(0, 0, canvas.width, canvas.height);
background.draw();
// update entities
updateWater();
updateEnvironment();
updatePlayer();
updateGround();
updateEnemies();
// draw the score
ctx.fillText('得分: ' + score + 'm', canvas.width - 140, 30);
// spawn a new Sprite
if (ticker % Math.floor(platformWidth / player.speed) === 0) {
spawnSprites();
}
// increase player speed only when player is jumping
if (ticker > (Math.floor(platformWidth / player.speed) * player.speed * 20) && player.dy !== 0) {
player.speed = bound(++player.speed, 0, 15);
player.walkAnim.frameSpeed = Math.floor(platformWidth / player.speed) - 1;
// reset ticker
ticker = 0;
// spawn a platform to fill in gap created by increasing player speed
if (gapLength === 0) {
var type = getType();
ground.push(new Sprite(
canvas.width + platformWidth % player.speed,
platformBase - platformHeight * platformSpacer,
type
));
platformLength--;
}
}
ticker++;
}
}
/**
* Keep track of the spacebar events
*/
var KEY_CODES = {
32: 'space'
};
var KEY_STATUS = {};
for (var code in KEY_CODES) {
if (KEY_CODES.hasOwnProperty(code)) {
KEY_STATUS[KEY_CODES[code]] = false;
}
}
document.onkeydown = function(e) {
var keyCode = (e.keyCode) ? e.keyCode : e.charCode;
if (KEY_CODES[keyCode]) {
e.preventDefault();
KEY_STATUS[KEY_CODES[keyCode]] = true;
}
};
document.onkeyup = function(e) {
var keyCode = (e.keyCode) ? e.keyCode : e.charCode;
if (KEY_CODES[keyCode]) {
e.preventDefault();
KEY_STATUS[KEY_CODES[keyCode]] = false;
}
};
/**
* Request Animation Polyfill
*/
var requestAnimFrame = (function(){
return window.requestAnimationFrame ||
window.webkitRequestAnimationFrame ||
window.mozRequestAnimationFrame ||
window.oRequestAnimationFrame ||
window.msRequestAnimationFrame ||
function(callback, element){
window.setTimeout(callback, 1000 / 60);
};
})();
/**
* Start the game - reset all variables and entities, spawn ground and water.
*/
function startGame() {
document.getElementById('game-over').style.display = 'none';
ground = [];
water = [];
environment = [];
enemies = [];
player.reset();
ticker = 0;
stop = false;
score = 0;
platformHeight = 2;
platformLength = 15;
gapLength = 0;
ctx.font = '16px arial, sans-serif';
for (var i = 0; i < 30; i++) {
ground.push(new Sprite(i * (platformWidth-3), platformBase - platformHeight * platformSpacer, 'grass'));
}
for (i = 0; i < canvas.width / 32 + 2; i++) {
water.push(new Sprite(i * platformWidth, platformBase, 'water'));
}
background.reset();
animate();
}
/**
* End the game and restart
*/
function gameOver() {
stop = true;
document.getElementById('game-over').style.display = 'block';
}
document.getElementById('restart').addEventListener('click', startGame);
assetLoader.downloadAll();
})(); | updateEnemies |
01_prepare_and_save_models.py | import os
import pickle
import pandas as pd
from sklearn.cluster import DBSCAN
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import classification_report
from sklearn.preprocessing import LabelEncoder
from tensorflow.keras.callbacks import ModelCheckpoint
from tensorflow.keras.layers import Dense, Dropout
from tensorflow.keras.models import Sequential
from scripts.settings import *
from scripts.utils import create_dir
def prepare_for_learning(file_path, model_path, n_samples=5000, use_neutral=False):
# load data
rev_vec = pd.read_pickle(file_path)
# remove neutral if not used
if not use_neutral:
rev_vec = rev_vec[rev_vec['Information'] != 'neu']
# use only part of available data
rev_vec = rev_vec.sample(n_samples)
# save indices of training and validation set
pickle.dump(rev_vec.index, open(learning_index_path, 'wb'))
X, y = rev_vec[[col for col in rev_vec.columns if col.startswith('Vec')]], rev_vec['Information']
le = LabelEncoder()
le.fit(y.values.reshape(-1, 1))
create_dir(os.path.dirname(model_path))
pickle.dump(le, open(model_path, 'wb'))
return rev_vec, X, y
def classification_report_to_excel(y_test, y_pred, excel_path):
cr = classification_report(y_test, y_pred, output_dict=True)
create_dir(os.path.dirname(excel_path))
pd.DataFrame(cr).T.to_excel(excel_path)
def neural_network():
model = Sequential()
model.add(Dense(256, input_dim=1024, activation='relu', use_bias=True,
kernel_initializer='random_normal'))
model.add(Dropout(0.5))
model.add(Dense(64, activation='relu', use_bias=True, kernel_initializer='random_normal'))
model.add(Dropout(0.5))
model.add(Dense(16, activation='relu', use_bias=True, kernel_initializer='random_normal'))
model.add(Dense(1, activation='sigmoid', use_bias=True, kernel_initializer='random_normal'))
model.compile(loss='binary_crossentropy', optimizer='adadelta', metrics=['acc'])
return model
def fit_and_save_model(X_train, y_train, model, model_path, network=False):
# create directory for model
create_dir(os.path.dirname(model_path)) | model.fit(X_train, y_train, epochs=150, batch_size=512, validation_split=0.2, callbacks=[checkpoint])
else:
model.fit(X_train, y_train)
pickle.dump(model, open(model_path, 'wb'))
def main():
rev_vec, X, y = prepare_for_learning(rev_path,
os.path.join(model_dir, label_encoder_file),
n_samples=5000,
use_neutral=False)
le_path = os.path.join(model_dir, label_encoder_file)
le = pickle.load(open(le_path, 'rb'))
y = le.transform(y)
# learn random forest
rf = RandomForestClassifier(n_estimators=100, max_depth=5,
min_samples_leaf=2,
class_weight='balanced', criterion='entropy')
fit_and_save_model(X, y, rf, os.path.join(model_dir, random_forest_file), network=False)
# use DBSCAN to find negative
dbs = DBSCAN(eps=0.01, min_samples=2)
pickle.dump(dbs, open(os.path.join(model_dir, dbscan_file), 'wb'))
# use neural network
network = neural_network()
fit_and_save_model(X, y, network, os.path.join(model_dir, network_file), network=True)
if __name__ == '__main__':
main() |
if network:
checkpoint = ModelCheckpoint(model_path, monitor='val_acc', verbose=1, save_best_only=True) |
MondrianApp.rs | MondrianApp | MondrianApp$ConnectionData |
|
testing.go | package vault
import (
"bytes"
"crypto/rand"
"crypto/sha256"
"crypto/tls"
"crypto/x509"
"encoding/pem"
"fmt"
"net"
"net/http"
"os/exec"
"testing"
"time"
log "github.com/mgutz/logxi/v1"
"golang.org/x/crypto/ssh"
"github.com/hashicorp/go-uuid"
"github.com/hashicorp/vault/audit"
"github.com/hashicorp/vault/helper/logformat"
"github.com/hashicorp/vault/helper/salt"
"github.com/hashicorp/vault/logical"
"github.com/hashicorp/vault/logical/framework"
"github.com/hashicorp/vault/physical"
)
// This file contains a number of methods that are useful for unit
// tests within other packages.
const (
testSharedPublicKey = `
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC9i+hFxZHGo6KblVme4zrAcJstR6I0PTJozW286X4WyvPnkMYDQ5mnhEYC7UWCvjoTWbPEXPX7NjhRtwQTGD67bV+lrxgfyzK1JZbUXK4PwgKJvQD+XyyWYMzDgGSQY61KUSqCxymSm/9NZkPU3ElaQ9xQuTzPpztM4ROfb8f2Yv6/ZESZsTo0MTAkp8Pcy+WkioI/uJ1H7zqs0EA4OMY4aDJRu0UtP4rTVeYNEAuRXdX+eH4aW3KMvhzpFTjMbaJHJXlEeUm2SaX5TNQyTOvghCeQILfYIL/Ca2ij8iwCmulwdV6eQGfd4VDu40PvSnmfoaE38o6HaPnX0kUcnKiT
`
testSharedPrivateKey = `
-----BEGIN RSA PRIVATE KEY-----
MIIEogIBAAKCAQEAvYvoRcWRxqOim5VZnuM6wHCbLUeiND0yaM1tvOl+Fsrz55DG
A0OZp4RGAu1Fgr46E1mzxFz1+zY4UbcEExg+u21fpa8YH8sytSWW1FyuD8ICib0A
/l8slmDMw4BkkGOtSlEqgscpkpv/TWZD1NxJWkPcULk8z6c7TOETn2/H9mL+v2RE
mbE6NDEwJKfD3MvlpIqCP7idR+86rNBAODjGOGgyUbtFLT+K01XmDRALkV3V/nh+
GltyjL4c6RU4zG2iRyV5RHlJtkml+UzUMkzr4IQnkCC32CC/wmtoo/IsAprpcHVe
nkBn3eFQ7uND70p5n6GhN/KOh2j519JFHJyokwIDAQABAoIBAHX7VOvBC3kCN9/x
+aPdup84OE7Z7MvpX6w+WlUhXVugnmsAAVDczhKoUc/WktLLx2huCGhsmKvyVuH+
MioUiE+vx75gm3qGx5xbtmOfALVMRLopjCnJYf6EaFA0ZeQ+NwowNW7Lu0PHmAU8
Z3JiX8IwxTz14DU82buDyewO7v+cEr97AnERe3PUcSTDoUXNaoNxjNpEJkKREY6h
4hAY676RT/GsRcQ8tqe/rnCqPHNd7JGqL+207FK4tJw7daoBjQyijWuB7K5chSal
oPInylM6b13ASXuOAOT/2uSUBWmFVCZPDCmnZxy2SdnJGbsJAMl7Ma3MUlaGvVI+
Tfh1aQkCgYEA4JlNOabTb3z42wz6mz+Nz3JRwbawD+PJXOk5JsSnV7DtPtfgkK9y
6FTQdhnozGWShAvJvc+C4QAihs9AlHXoaBY5bEU7R/8UK/pSqwzam+MmxmhVDV7G
IMQPV0FteoXTaJSikhZ88mETTegI2mik+zleBpVxvfdhE5TR+lq8Br0CgYEA2AwJ
CUD5CYUSj09PluR0HHqamWOrJkKPFPwa+5eiTTCzfBBxImYZh7nXnWuoviXC0sg2
AuvCW+uZ48ygv/D8gcz3j1JfbErKZJuV+TotK9rRtNIF5Ub7qysP7UjyI7zCssVM
kuDd9LfRXaB/qGAHNkcDA8NxmHW3gpln4CFdSY8CgYANs4xwfercHEWaJ1qKagAe
rZyrMpffAEhicJ/Z65lB0jtG4CiE6w8ZeUMWUVJQVcnwYD+4YpZbX4S7sJ0B8Ydy
AhkSr86D/92dKTIt2STk6aCN7gNyQ1vW198PtaAWH1/cO2UHgHOy3ZUt5X/Uwxl9
cex4flln+1Viumts2GgsCQKBgCJH7psgSyPekK5auFdKEr5+Gc/jB8I/Z3K9+g4X
5nH3G1PBTCJYLw7hRzw8W/8oALzvddqKzEFHphiGXK94Lqjt/A4q1OdbCrhiE68D
My21P/dAKB1UYRSs9Y8CNyHCjuZM9jSMJ8vv6vG/SOJPsnVDWVAckAbQDvlTHC9t
O98zAoGAcbW6uFDkrv0XMCpB9Su3KaNXOR0wzag+WIFQRXCcoTvxVi9iYfUReQPi
oOyBJU/HMVvBfv4g+OVFLVgSwwm6owwsouZ0+D/LasbuHqYyqYqdyPJQYzWA2Y+F
+B6f4RoPdSXj24JHPg/ioRxjaj094UXJxua2yfkcecGNEuBQHSs=
-----END RSA PRIVATE KEY-----
`
)
// TestCore returns a pure in-memory, uninitialized core for testing.
func TestCore(t *testing.T) *Core {
return TestCoreWithSeal(t, nil)
}
// TestCoreWithSeal returns a pure in-memory, uninitialized core with the
// specified seal for testing.
func TestCoreWithSeal(t *testing.T, testSeal Seal) *Core {
noopAudits := map[string]audit.Factory{
"noop": func(config *audit.BackendConfig) (audit.Backend, error) {
view := &logical.InmemStorage{}
view.Put(&logical.StorageEntry{
Key: "salt",
Value: []byte("foo"),
})
var err error
config.Salt, err = salt.NewSalt(view, &salt.Config{
HMAC: sha256.New,
HMACType: "hmac-sha256",
})
if err != nil {
t.Fatalf("error getting new salt: %v", err)
}
return &noopAudit{
Config: config,
}, nil
},
}
noopBackends := make(map[string]logical.Factory)
noopBackends["noop"] = func(config *logical.BackendConfig) (logical.Backend, error) {
b := new(framework.Backend)
b.Setup(config)
return b, nil
}
noopBackends["http"] = func(config *logical.BackendConfig) (logical.Backend, error) {
return new(rawHTTP), nil | }
logicalBackends["generic"] = LeasedPassthroughBackendFactory
for backendName, backendFactory := range testLogicalBackends {
logicalBackends[backendName] = backendFactory
}
logger := logformat.NewVaultLogger(log.LevelTrace)
physicalBackend := physical.NewInmem(logger)
conf := &CoreConfig{
Physical: physicalBackend,
AuditBackends: noopAudits,
LogicalBackends: logicalBackends,
CredentialBackends: noopBackends,
DisableMlock: true,
Logger: logger,
}
if testSeal != nil {
conf.Seal = testSeal
}
c, err := NewCore(conf)
if err != nil {
t.Fatalf("err: %s", err)
}
return c
}
// TestCoreInit initializes the core with a single key, and returns
// the key that must be used to unseal the core and a root token.
func TestCoreInit(t *testing.T, core *Core) ([]byte, string) {
return TestCoreInitClusterWrapperSetup(t, core, nil, func() (http.Handler, http.Handler) { return nil, nil })
}
func TestCoreInitClusterWrapperSetup(t *testing.T, core *Core, clusterAddrs []*net.TCPAddr, handlerSetupFunc func() (http.Handler, http.Handler)) ([]byte, string) {
core.SetClusterListenerAddrs(clusterAddrs)
core.SetClusterSetupFuncs(handlerSetupFunc)
result, err := core.Initialize(&InitParams{
BarrierConfig: &SealConfig{
SecretShares: 1,
SecretThreshold: 1,
},
RecoveryConfig: nil,
})
if err != nil {
t.Fatalf("err: %s", err)
}
return result.SecretShares[0], result.RootToken
}
func TestCoreUnseal(core *Core, key []byte) (bool, error) {
core.SetClusterSetupFuncs(func() (http.Handler, http.Handler) { return nil, nil })
return core.Unseal(key)
}
// TestCoreUnsealed returns a pure in-memory core that is already
// initialized and unsealed.
func TestCoreUnsealed(t *testing.T) (*Core, []byte, string) {
core := TestCore(t)
key, token := TestCoreInit(t, core)
if _, err := TestCoreUnseal(core, TestKeyCopy(key)); err != nil {
t.Fatalf("unseal err: %s", err)
}
sealed, err := core.Sealed()
if err != nil {
t.Fatalf("err checking seal status: %s", err)
}
if sealed {
t.Fatal("should not be sealed")
}
return core, key, token
}
// TestCoreWithTokenStore returns an in-memory core that has a token store
// mounted, so that logical token functions can be used
func TestCoreWithTokenStore(t *testing.T) (*Core, *TokenStore, []byte, string) {
c, key, root := TestCoreUnsealed(t)
me := &MountEntry{
Table: credentialTableType,
Path: "token/",
Type: "token",
Description: "token based credentials",
}
meUUID, err := uuid.GenerateUUID()
if err != nil {
t.Fatal(err)
}
me.UUID = meUUID
view := NewBarrierView(c.barrier, credentialBarrierPrefix+me.UUID+"/")
tokenstore, _ := c.newCredentialBackend("token", c.mountEntrySysView(me), view, nil)
ts := tokenstore.(*TokenStore)
router := NewRouter()
router.Mount(ts, "auth/token/", &MountEntry{Table: credentialTableType, UUID: ""}, ts.view)
subview := c.systemBarrierView.SubView(expirationSubPath)
logger := logformat.NewVaultLogger(log.LevelTrace)
exp := NewExpirationManager(router, subview, ts, logger)
ts.SetExpirationManager(exp)
return c, ts, key, root
}
// TestKeyCopy is a silly little function to just copy the key so that
// it can be used with Unseal easily.
func TestKeyCopy(key []byte) []byte {
result := make([]byte, len(key))
copy(result, key)
return result
}
var testLogicalBackends = map[string]logical.Factory{}
// Starts the test server which responds to SSH authentication.
// Used to test the SSH secret backend.
func StartSSHHostTestServer() (string, error) {
pubKey, _, _, _, err := ssh.ParseAuthorizedKey([]byte(testSharedPublicKey))
if err != nil {
return "", fmt.Errorf("Error parsing public key")
}
serverConfig := &ssh.ServerConfig{
PublicKeyCallback: func(conn ssh.ConnMetadata, key ssh.PublicKey) (*ssh.Permissions, error) {
if bytes.Compare(pubKey.Marshal(), key.Marshal()) == 0 {
return &ssh.Permissions{}, nil
} else {
return nil, fmt.Errorf("Key does not match")
}
},
}
signer, err := ssh.ParsePrivateKey([]byte(testSharedPrivateKey))
if err != nil {
panic("Error parsing private key")
}
serverConfig.AddHostKey(signer)
soc, err := net.Listen("tcp", "127.0.0.1:0")
if err != nil {
return "", fmt.Errorf("Error listening to connection")
}
go func() {
for {
conn, err := soc.Accept()
if err != nil {
panic(fmt.Sprintf("Error accepting incoming connection: %s", err))
}
defer conn.Close()
sshConn, chanReqs, _, err := ssh.NewServerConn(conn, serverConfig)
if err != nil {
panic(fmt.Sprintf("Handshaking error: %v", err))
}
go func() {
for chanReq := range chanReqs {
go func(chanReq ssh.NewChannel) {
if chanReq.ChannelType() != "session" {
chanReq.Reject(ssh.UnknownChannelType, "unknown channel type")
return
}
ch, requests, err := chanReq.Accept()
if err != nil {
panic(fmt.Sprintf("Error accepting channel: %s", err))
}
go func(ch ssh.Channel, in <-chan *ssh.Request) {
for req := range in {
executeServerCommand(ch, req)
}
}(ch, requests)
}(chanReq)
}
sshConn.Close()
}()
}
}()
return soc.Addr().String(), nil
}
// This executes the commands requested to be run on the server.
// Used to test the SSH secret backend.
func executeServerCommand(ch ssh.Channel, req *ssh.Request) {
command := string(req.Payload[4:])
cmd := exec.Command("/bin/bash", []string{"-c", command}...)
req.Reply(true, nil)
cmd.Stdout = ch
cmd.Stderr = ch
cmd.Stdin = ch
err := cmd.Start()
if err != nil {
panic(fmt.Sprintf("Error starting the command: '%s'", err))
}
go func() {
_, err := cmd.Process.Wait()
if err != nil {
panic(fmt.Sprintf("Error while waiting for command to finish:'%s'", err))
}
ch.Close()
}()
}
// This adds a logical backend for the test core. This needs to be
// invoked before the test core is created.
func AddTestLogicalBackend(name string, factory logical.Factory) error {
if name == "" {
return fmt.Errorf("Missing backend name")
}
if factory == nil {
return fmt.Errorf("Missing backend factory function")
}
testLogicalBackends[name] = factory
return nil
}
type noopAudit struct {
Config *audit.BackendConfig
}
func (n *noopAudit) GetHash(data string) string {
return n.Config.Salt.GetIdentifiedHMAC(data)
}
func (n *noopAudit) LogRequest(a *logical.Auth, r *logical.Request, e error) error {
return nil
}
func (n *noopAudit) LogResponse(a *logical.Auth, r *logical.Request, re *logical.Response, err error) error {
return nil
}
func (n *noopAudit) Reload() error {
return nil
}
type rawHTTP struct{}
func (n *rawHTTP) HandleRequest(req *logical.Request) (*logical.Response, error) {
return &logical.Response{
Data: map[string]interface{}{
logical.HTTPStatusCode: 200,
logical.HTTPContentType: "plain/text",
logical.HTTPRawBody: []byte("hello world"),
},
}, nil
}
func (n *rawHTTP) HandleExistenceCheck(req *logical.Request) (bool, bool, error) {
return false, false, nil
}
func (n *rawHTTP) SpecialPaths() *logical.Paths {
return &logical.Paths{Unauthenticated: []string{"*"}}
}
func (n *rawHTTP) System() logical.SystemView {
return logical.StaticSystemView{
DefaultLeaseTTLVal: time.Hour * 24,
MaxLeaseTTLVal: time.Hour * 24 * 32,
}
}
func (n *rawHTTP) Cleanup() {
// noop
}
func GenerateRandBytes(length int) ([]byte, error) {
if length < 0 {
return nil, fmt.Errorf("length must be >= 0")
}
buf := make([]byte, length)
if length == 0 {
return buf, nil
}
n, err := rand.Read(buf)
if err != nil {
return nil, err
}
if n != length {
return nil, fmt.Errorf("unable to read %d bytes; only read %d", length, n)
}
return buf, nil
}
func TestWaitActive(t *testing.T, core *Core) {
start := time.Now()
var standby bool
var err error
for time.Now().Sub(start) < time.Second {
standby, err = core.Standby()
if err != nil {
t.Fatalf("err: %v", err)
}
if !standby {
break
}
}
if standby {
t.Fatalf("should not be in standby mode")
}
}
type TestListener struct {
net.Listener
Address *net.TCPAddr
}
type TestClusterCore struct {
*Core
Listeners []*TestListener
Root string
Key []byte
CACertBytes []byte
CACert *x509.Certificate
TLSConfig *tls.Config
}
func (t *TestClusterCore) CloseListeners() {
if t.Listeners != nil {
for _, ln := range t.Listeners {
ln.Close()
}
}
// Give time to actually shut down/clean up before the next test
time.Sleep(time.Second)
}
func TestCluster(t *testing.T, handlers []http.Handler, base *CoreConfig, unsealStandbys bool) []*TestClusterCore {
if handlers == nil || len(handlers) != 3 {
t.Fatal("handlers must be size 3")
}
//
// TLS setup
//
block, _ := pem.Decode([]byte(TestClusterCACert))
if block == nil {
t.Fatal("error decoding cluster CA cert")
}
caBytes := block.Bytes
caCert, err := x509.ParseCertificate(caBytes)
if err != nil {
t.Fatal(err)
}
serverCert, err := tls.X509KeyPair([]byte(TestClusterServerCert), []byte(TestClusterServerKey))
if err != nil {
t.Fatal(err)
}
rootCAs := x509.NewCertPool()
rootCAs.AppendCertsFromPEM([]byte(TestClusterCACert))
tlsConfig := &tls.Config{
Certificates: []tls.Certificate{serverCert},
RootCAs: rootCAs,
ClientCAs: rootCAs,
ClientAuth: tls.RequireAndVerifyClientCert,
}
tlsConfig.BuildNameToCertificate()
// Sanity checking
block, _ = pem.Decode([]byte(TestClusterServerCert))
if block == nil {
t.Fatal(err)
}
parsedServerCert, err := x509.ParseCertificate(block.Bytes)
if err != nil {
t.Fatal(err)
}
chains, err := parsedServerCert.Verify(x509.VerifyOptions{
DNSName: "127.0.0.1",
Roots: rootCAs,
KeyUsages: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth},
})
if err != nil {
t.Fatal(err)
}
if chains == nil || len(chains) == 0 {
t.Fatal("no verified chains for server auth")
}
chains, err = parsedServerCert.Verify(x509.VerifyOptions{
DNSName: "127.0.0.1",
Roots: rootCAs,
KeyUsages: []x509.ExtKeyUsage{x509.ExtKeyUsageClientAuth},
})
if err != nil {
t.Fatal(err)
}
if chains == nil || len(chains) == 0 {
t.Fatal("no verified chains for chains auth")
}
logger := logformat.NewVaultLogger(log.LevelTrace)
//
// Listener setup
//
ln, err := net.ListenTCP("tcp", &net.TCPAddr{
IP: net.ParseIP("127.0.0.1"),
Port: 0,
})
if err != nil {
t.Fatal(err)
}
c1lns := []*TestListener{&TestListener{
Listener: tls.NewListener(ln, tlsConfig),
Address: ln.Addr().(*net.TCPAddr),
},
}
ln, err = net.ListenTCP("tcp", &net.TCPAddr{
IP: net.ParseIP("127.0.0.1"),
Port: 0,
})
if err != nil {
t.Fatal(err)
}
c1lns = append(c1lns, &TestListener{
Listener: tls.NewListener(ln, tlsConfig),
Address: ln.Addr().(*net.TCPAddr),
})
server1 := &http.Server{
Handler: handlers[0],
}
for _, ln := range c1lns {
go server1.Serve(ln)
}
ln, err = net.ListenTCP("tcp", &net.TCPAddr{
IP: net.ParseIP("127.0.0.1"),
Port: 0,
})
if err != nil {
t.Fatal(err)
}
c2lns := []*TestListener{&TestListener{
Listener: tls.NewListener(ln, tlsConfig),
Address: ln.Addr().(*net.TCPAddr),
},
}
server2 := &http.Server{
Handler: handlers[1],
}
for _, ln := range c2lns {
go server2.Serve(ln)
}
ln, err = net.ListenTCP("tcp", &net.TCPAddr{
IP: net.ParseIP("127.0.0.1"),
Port: 0,
})
if err != nil {
t.Fatal(err)
}
c3lns := []*TestListener{&TestListener{
Listener: tls.NewListener(ln, tlsConfig),
Address: ln.Addr().(*net.TCPAddr),
},
}
server3 := &http.Server{
Handler: handlers[2],
}
for _, ln := range c3lns {
go server3.Serve(ln)
}
// Create three cores with the same physical and different redirect/cluster addrs
coreConfig := &CoreConfig{
Physical: physical.NewInmem(logger),
HAPhysical: physical.NewInmemHA(logger),
LogicalBackends: make(map[string]logical.Factory),
CredentialBackends: make(map[string]logical.Factory),
AuditBackends: make(map[string]audit.Factory),
RedirectAddr: fmt.Sprintf("https://127.0.0.1:%d", c1lns[0].Address.Port),
ClusterAddr: fmt.Sprintf("https://127.0.0.1:%d", c1lns[0].Address.Port+1),
DisableMlock: true,
}
if base != nil {
// Used to set something non-working to test fallback
switch base.ClusterAddr {
case "empty":
coreConfig.ClusterAddr = ""
case "":
default:
coreConfig.ClusterAddr = base.ClusterAddr
}
if base.LogicalBackends != nil {
for k, v := range base.LogicalBackends {
coreConfig.LogicalBackends[k] = v
}
}
if base.CredentialBackends != nil {
for k, v := range base.CredentialBackends {
coreConfig.CredentialBackends[k] = v
}
}
if base.AuditBackends != nil {
for k, v := range base.AuditBackends {
coreConfig.AuditBackends[k] = v
}
}
}
c1, err := NewCore(coreConfig)
if err != nil {
t.Fatalf("err: %v", err)
}
coreConfig.RedirectAddr = fmt.Sprintf("https://127.0.0.1:%d", c2lns[0].Address.Port)
if coreConfig.ClusterAddr != "" {
coreConfig.ClusterAddr = fmt.Sprintf("https://127.0.0.1:%d", c2lns[0].Address.Port+1)
}
c2, err := NewCore(coreConfig)
if err != nil {
t.Fatalf("err: %v", err)
}
coreConfig.RedirectAddr = fmt.Sprintf("https://127.0.0.1:%d", c3lns[0].Address.Port)
if coreConfig.ClusterAddr != "" {
coreConfig.ClusterAddr = fmt.Sprintf("https://127.0.0.1:%d", c3lns[0].Address.Port+1)
}
c3, err := NewCore(coreConfig)
if err != nil {
t.Fatalf("err: %v", err)
}
//
// Clustering setup
//
clusterAddrGen := func(lns []*TestListener) []*net.TCPAddr {
ret := make([]*net.TCPAddr, len(lns))
for i, ln := range lns {
ret[i] = &net.TCPAddr{
IP: ln.Address.IP,
Port: ln.Address.Port + 1,
}
}
return ret
}
c2.SetClusterListenerAddrs(clusterAddrGen(c2lns))
c2.SetClusterSetupFuncs(WrapHandlerForClustering(handlers[1], logger))
c3.SetClusterListenerAddrs(clusterAddrGen(c3lns))
c3.SetClusterSetupFuncs(WrapHandlerForClustering(handlers[2], logger))
key, root := TestCoreInitClusterWrapperSetup(t, c1, clusterAddrGen(c1lns), WrapHandlerForClustering(handlers[0], logger))
if _, err := c1.Unseal(TestKeyCopy(key)); err != nil {
t.Fatalf("unseal err: %s", err)
}
// Verify unsealed
sealed, err := c1.Sealed()
if err != nil {
t.Fatalf("err checking seal status: %s", err)
}
if sealed {
t.Fatal("should not be sealed")
}
TestWaitActive(t, c1)
if unsealStandbys {
if _, err := c2.Unseal(TestKeyCopy(key)); err != nil {
t.Fatalf("unseal err: %s", err)
}
if _, err := c3.Unseal(TestKeyCopy(key)); err != nil {
t.Fatalf("unseal err: %s", err)
}
// Let them come fully up to standby
time.Sleep(2 * time.Second)
// Ensure cluster connection info is populated
isLeader, _, err := c2.Leader()
if err != nil {
t.Fatal(err)
}
if isLeader {
t.Fatal("c2 should not be leader")
}
isLeader, _, err = c3.Leader()
if err != nil {
t.Fatal(err)
}
if isLeader {
t.Fatal("c3 should not be leader")
}
}
return []*TestClusterCore{
&TestClusterCore{
Core: c1,
Listeners: c1lns,
Root: root,
Key: TestKeyCopy(key),
CACertBytes: caBytes,
CACert: caCert,
TLSConfig: tlsConfig,
},
&TestClusterCore{
Core: c2,
Listeners: c2lns,
Root: root,
Key: TestKeyCopy(key),
CACertBytes: caBytes,
CACert: caCert,
TLSConfig: tlsConfig,
},
&TestClusterCore{
Core: c3,
Listeners: c3lns,
Root: root,
Key: TestKeyCopy(key),
CACertBytes: caBytes,
CACert: caCert,
TLSConfig: tlsConfig,
},
}
}
const (
TestClusterCACert = `-----BEGIN CERTIFICATE-----
MIIDPjCCAiagAwIBAgIUfIKsF2VPT7sdFcKOHJH2Ii6K4MwwDQYJKoZIhvcNAQEL
BQAwFjEUMBIGA1UEAxMLbXl2YXVsdC5jb20wIBcNMTYwNTAyMTYwNTQyWhgPMjA2
NjA0MjAxNjA2MTJaMBYxFDASBgNVBAMTC215dmF1bHQuY29tMIIBIjANBgkqhkiG
9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuOimEXawD2qBoLCFP3Skq5zi1XzzcMAJlfdS
xz9hfymuJb+cN8rB91HOdU9wQCwVKnkUtGWxUnMp0tT0uAZj5NzhNfyinf0JGAbP
67HDzVZhGBHlHTjPX0638yaiUx90cTnucX0N20SgCYct29dMSgcPl+W78D3Jw3xE
JsHQPYS9ASe2eONxG09F/qNw7w/RO5/6WYoV2EmdarMMxq52pPe2chtNMQdSyOUb
cCcIZyk4QVFZ1ZLl6jTnUPb+JoCx1uMxXvMek4NF/5IL0Wr9dw2gKXKVKoHDr6SY
WrCONRw61A5Zwx1V+kn73YX3USRlkufQv/ih6/xThYDAXDC9cwIDAQABo4GBMH8w
DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFOuKvPiU
G06iHkRXAOeMiUdBfHFyMB8GA1UdIwQYMBaAFOuKvPiUG06iHkRXAOeMiUdBfHFy
MBwGA1UdEQQVMBOCC215dmF1bHQuY29thwR/AAABMA0GCSqGSIb3DQEBCwUAA4IB
AQBcN/UdAMzc7UjRdnIpZvO+5keBGhL/vjltnGM1dMWYHa60Y5oh7UIXF+P1RdNW
n7g80lOyvkSR15/r1rDkqOK8/4oruXU31EcwGhDOC4hU6yMUy4ltV/nBoodHBXNh
MfKiXeOstH1vdI6G0P6W93Bcww6RyV1KH6sT2dbETCw+iq2VN9CrruGIWzd67UT/
spe/kYttr3UYVV3O9kqgffVVgVXg/JoRZ3J7Hy2UEXfh9UtWNanDlRuXaZgE9s/d
CpA30CHpNXvKeyNeW2ktv+2nAbSpvNW+e6MecBCTBIoDSkgU8ShbrzmDKVwNN66Q
5gn6KxUPBKHEtNzs5DgGM7nq
-----END CERTIFICATE-----`
TestClusterCAKey = `-----BEGIN RSA PRIVATE KEY-----
MIIEowIBAAKCAQEAuOimEXawD2qBoLCFP3Skq5zi1XzzcMAJlfdSxz9hfymuJb+c
N8rB91HOdU9wQCwVKnkUtGWxUnMp0tT0uAZj5NzhNfyinf0JGAbP67HDzVZhGBHl
HTjPX0638yaiUx90cTnucX0N20SgCYct29dMSgcPl+W78D3Jw3xEJsHQPYS9ASe2
eONxG09F/qNw7w/RO5/6WYoV2EmdarMMxq52pPe2chtNMQdSyOUbcCcIZyk4QVFZ
1ZLl6jTnUPb+JoCx1uMxXvMek4NF/5IL0Wr9dw2gKXKVKoHDr6SYWrCONRw61A5Z
wx1V+kn73YX3USRlkufQv/ih6/xThYDAXDC9cwIDAQABAoIBAG3bCo7ljMQb6tel
CAUjL5Ilqz5a9ebOsONABRYLOclq4ePbatxawdJF7/sSLwZxKkIJnZtvr2Hkubxg
eOO8KC0YbVS9u39Rjc2QfobxHfsojpbWSuCJl+pvwinbkiUAUxXR7S/PtCPJKat/
fGdYCiMQ/tqnynh4vR4+/d5o12c0KuuQ22/MdEf3GOadUamRXS1ET9iJWqla1pJW
TmzrlkGAEnR5PPO2RMxbnZCYmj3dArxWAnB57W+bWYla0DstkDKtwg2j2ikNZpXB
nkZJJpxR76IYD1GxfwftqAKxujKcyfqB0dIKCJ0UmfOkauNWjexroNLwaAOC3Nud
XIxppAECgYEA1wJ9EH6A6CrSjdzUocF9LtQy1LCDHbdiQFHxM5/zZqIxraJZ8Gzh
Q0d8JeOjwPdG4zL9pHcWS7+x64Wmfn0+Qfh6/47Vy3v90PIL0AeZYshrVZyJ/s6X
YkgFK80KEuWtacqIZ1K2UJyCw81u/ynIl2doRsIbgkbNeN0opjmqVTMCgYEA3CkW
2fETWK1LvmgKFjG1TjOotVRIOUfy4iN0kznPm6DK2PgTF5DX5RfktlmA8i8WPmB7
YFOEdAWHf+RtoM/URa7EAGZncCWe6uggAcWqznTS619BJ63OmncpSWov5Byg90gJ
48qIMY4wDjE85ypz1bmBc2Iph974dtWeDtB7dsECgYAyKZh4EquMfwEkq9LH8lZ8
aHF7gbr1YeWAUB3QB49H8KtacTg+iYh8o97pEBUSXh6hvzHB/y6qeYzPAB16AUpX
Jdu8Z9ylXsY2y2HKJRu6GjxAewcO9bAH8/mQ4INrKT6uIdx1Dq0OXZV8jR9KVLtB
55RCfeLhIBesDR0Auw9sVQKBgB0xTZhkgP43LF35Ca1btgDClNJGdLUztx8JOIH1
HnQyY/NVIaL0T8xO2MLdJ131pGts+68QI/YGbaslrOuv4yPCQrcS3RBfzKy1Ttkt
TrLFhtoy7T7HqyeMOWtEq0kCCs3/PWB5EIoRoomfOcYlOOrUCDg2ge9EP4nyVVz9
hAGBAoGBAJXw/ufevxpBJJMSyULmVWYr34GwLC1OhSE6AVVt9JkIYnc5L4xBKTHP
QNKKJLmFmMsEqfxHUNWmpiHkm2E0p37Zehui3kywo+A4ybHPTua70ZWQfZhKxLUr
PvJa8JmwiCM7kO8zjOv+edY1mMWrbjAZH1YUbfcTHmST7S8vp0F3
-----END RSA PRIVATE KEY-----`
TestClusterServerCert = `-----BEGIN CERTIFICATE-----
MIIDtzCCAp+gAwIBAgIUBLqh6ctGWVDUxFhxJX7m6S/bnrcwDQYJKoZIhvcNAQEL
BQAwFjEUMBIGA1UEAxMLbXl2YXVsdC5jb20wIBcNMTYwNTAyMTYwOTI2WhgPMjA2
NjA0MjAxNTA5NTZaMBsxGTAXBgNVBAMTEGNlcnQubXl2YXVsdC5jb20wggEiMA0G
CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDY3gPB29kkdbu0mPO6J0efagQhSiXB
9OyDuLf5sMk6CVDWVWal5hISkyBmw/lXgF7qC2XFKivpJOrcGQd5Ep9otBqyJLzI
b0IWdXuPIrVnXDwcdWr86ybX2iC42zKWfbXgjzGijeAVpl0UJLKBj+fk5q6NvkRL
5FUL6TRV7Krn9mrmnrV9J5IqV15pTd9W2aVJ6IqWvIPCACtZKulqWn4707uy2X2W
1Stq/5qnp1pDshiGk1VPyxCwQ6yw3iEcgecbYo3vQfhWcv7Q8LpSIM9ZYpXu6OmF
+czqRZS9gERl+wipmmrN1MdYVrTuQem21C/PNZ4jo4XUk1SFx6JrcA+lAgMBAAGj
gfUwgfIwHQYDVR0lBBYwFAYIKwYBBQUHAwEGCCsGAQUFBwMCMB0GA1UdDgQWBBSe
Cl9WV3BjGCwmS/KrDSLRjfwyqjAfBgNVHSMEGDAWgBTrirz4lBtOoh5EVwDnjIlH
QXxxcjA7BggrBgEFBQcBAQQvMC0wKwYIKwYBBQUHMAKGH2h0dHA6Ly8xMjcuMC4w
LjE6ODIwMC92MS9wa2kvY2EwIQYDVR0RBBowGIIQY2VydC5teXZhdWx0LmNvbYcE
fwAAATAxBgNVHR8EKjAoMCagJKAihiBodHRwOi8vMTI3LjAuMC4xOjgyMDAvdjEv
cGtpL2NybDANBgkqhkiG9w0BAQsFAAOCAQEAWGholPN8buDYwKbUiDavbzjsxUIX
lU4MxEqOHw7CD3qIYIauPboLvB9EldBQwhgOOy607Yvdg3rtyYwyBFwPhHo/hK3Z
6mn4hc6TF2V+AUdHBvGzp2dbYLeo8noVoWbQ/lBulggwlIHNNF6+a3kALqsqk1Ch
f/hzsjFnDhAlNcYFgG8TgfE2lE/FckvejPqBffo7Q3I+wVAw0buqiz5QL81NOT+D
Y2S9LLKLRaCsWo9wRU1Az4Rhd7vK5SEMh16jJ82GyEODWPvuxOTI1MnzfnbWyLYe
TTp6YBjGMVf1I6NEcWNur7U17uIOiQjMZ9krNvoMJ1A/cxCoZ98QHgcIPg==
-----END CERTIFICATE-----`
TestClusterServerKey = `-----BEGIN RSA PRIVATE KEY-----
MIIEpAIBAAKCAQEA2N4DwdvZJHW7tJjzuidHn2oEIUolwfTsg7i3+bDJOglQ1lVm
peYSEpMgZsP5V4Be6gtlxSor6STq3BkHeRKfaLQasiS8yG9CFnV7jyK1Z1w8HHVq
/Osm19oguNsyln214I8xoo3gFaZdFCSygY/n5Oaujb5ES+RVC+k0Veyq5/Zq5p61
fSeSKldeaU3fVtmlSeiKlryDwgArWSrpalp+O9O7stl9ltUrav+ap6daQ7IYhpNV
T8sQsEOssN4hHIHnG2KN70H4VnL+0PC6UiDPWWKV7ujphfnM6kWUvYBEZfsIqZpq
zdTHWFa07kHpttQvzzWeI6OF1JNUhceia3APpQIDAQABAoIBAQCH3vEzr+3nreug
RoPNCXcSJXXY9X+aeT0FeeGqClzIg7Wl03OwVOjVwl/2gqnhbIgK0oE8eiNwurR6
mSPZcxV0oAJpwiKU4T/imlCDaReGXn86xUX2l82KRxthNdQH/VLKEmzij0jpx4Vh
bWx5SBPdkbmjDKX1dmTiRYWIn/KjyNPvNvmtwdi8Qluhf4eJcNEUr2BtblnGOmfL
FdSu+brPJozpoQ1QdDnbAQRgqnh7Shl0tT85whQi0uquqIj1gEOGVjmBvDDnL3GV
WOENTKqsmIIoEzdZrql1pfmYTk7WNaD92bfpN128j8BF7RmAV4/DphH0pvK05y9m
tmRhyHGxAoGBAOV2BBocsm6xup575VqmFN+EnIOiTn+haOvfdnVsyQHnth63fOQx
PNtMpTPR1OMKGpJ13e2bV0IgcYRsRkScVkUtoa/17VIgqZXffnJJ0A/HT67uKBq3
8o7RrtyK5N20otw0lZHyqOPhyCdpSsurDhNON1kPVJVYY4N1RiIxfut/AoGBAPHz
HfsJ5ZkyELE9N/r4fce04lprxWH+mQGK0/PfjS9caXPhj/r5ZkVMvzWesF3mmnY8
goE5S35TuTvV1+6rKGizwlCFAQlyXJiFpOryNWpLwCmDDSzLcm+sToAlML3tMgWU
jM3dWHx3C93c3ft4rSWJaUYI9JbHsMzDW6Yh+GbbAoGBANIbKwxh5Hx5XwEJP2yu
kIROYCYkMy6otHLujgBdmPyWl+suZjxoXWoMl2SIqR8vPD+Jj6mmyNJy9J6lqf3f
DRuQ+fEuBZ1i7QWfvJ+XuN0JyovJ5Iz6jC58D1pAD+p2IX3y5FXcVQs8zVJRFjzB
p0TEJOf2oqORaKWRd6ONoMKvAoGALKu6aVMWdQZtVov6/fdLIcgf0pn7Q3CCR2qe
X3Ry2L+zKJYIw0mwvDLDSt8VqQCenB3n6nvtmFFU7ds5lvM67rnhsoQcAOaAehiS
rl4xxoJd5Ewx7odRhZTGmZpEOYzFo4odxRSM9c30/u18fqV1Mm0AZtHYds4/sk6P
aUj0V+kCgYBMpGrJk8RSez5g0XZ35HfpI4ENoWbiwB59FIpWsLl2LADEh29eC455
t9Muq7MprBVBHQo11TMLLFxDIjkuMho/gcKgpYXCt0LfiNm8EZehvLJUXH+3WqUx
we6ywrbFCs6LaxaOCtTiLsN+GbZCatITL0UJaeBmTAbiw0KQjUuZPQ==
-----END RSA PRIVATE KEY-----`
) | }
logicalBackends := make(map[string]logical.Factory)
for backendName, backendFactory := range noopBackends {
logicalBackends[backendName] = backendFactory |
multitrait.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct | {
y: int
}
impl Cmp, ToString for S { //~ ERROR: expected `{`, found `,`
fn eq(&&other: S) { false }
fn to_string(&self) -> String { "hi".to_string() }
}
| S |
reference.d.ts | /**
* @license
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { Reference } from '../src/reference';
import * as types from '@firebase/storage-types';
import { Metadata } from '../src/metadata';
import { StringFormat } from '../src/implementation/string';
import { ListOptions } from '../src/list';
import { StorageServiceCompat } from './service';
export declare class ReferenceCompat implements types.Reference {
| constructor(_delegate: Reference, storage: StorageServiceCompat);
get name(): string;
get bucket(): string;
get fullPath(): string;
toString(): string;
/**
* @returns A reference to the object obtained by
* appending childPath, removing any duplicate, beginning, or trailing
* slashes.
*/
child(childPath: string): types.Reference;
get root(): types.Reference;
/**
* @returns A reference to the parent of the
* current object, or null if the current object is the root.
*/
get parent(): types.Reference | null;
/**
* Uploads a blob to this object's location.
* @param data - The blob to upload.
* @returns An UploadTask that lets you control and
* observe the upload.
*/
put(data: Blob | Uint8Array | ArrayBuffer, metadata?: Metadata): types.UploadTask;
/**
* Uploads a string to this object's location.
* @param value - The string to upload.
* @param format - The format of the string to upload.
* @returns An UploadTask that lets you control and
* observe the upload.
*/
putString(value: string, format?: StringFormat, metadata?: Metadata): types.UploadTask;
/**
* List all items (files) and prefixes (folders) under this storage reference.
*
* This is a helper method for calling list() repeatedly until there are
* no more results. The default pagination size is 1000.
*
* Note: The results may not be consistent if objects are changed while this
* operation is running.
*
* Warning: listAll may potentially consume too many resources if there are
* too many results.
*
* @returns A Promise that resolves with all the items and prefixes under
* the current storage reference. `prefixes` contains references to
* sub-directories and `items` contains references to objects in this
* folder. `nextPageToken` is never returned.
*/
listAll(): Promise<types.ListResult>;
/**
* List items (files) and prefixes (folders) under this storage reference.
*
* List API is only available for Firebase Rules Version 2.
*
* GCS is a key-blob store. Firebase Storage imposes the semantic of '/'
* delimited folder structure. Refer to GCS's List API if you want to learn more.
*
* To adhere to Firebase Rules's Semantics, Firebase Storage does not
* support objects whose paths end with "/" or contain two consecutive
* "/"s. Firebase Storage List API will filter these unsupported objects.
* list() may fail if there are too many unsupported objects in the bucket.
*
* @param options - See ListOptions for details.
* @returns A Promise that resolves with the items and prefixes.
* `prefixes` contains references to sub-folders and `items`
* contains references to objects in this folder. `nextPageToken`
* can be used to get the rest of the results.
*/
list(options?: ListOptions | null): Promise<types.ListResult>;
/**
* A promise that resolves with the metadata for this object. If this
* object doesn't exist or metadata cannot be retreived, the promise is
* rejected.
*/
getMetadata(): Promise<Metadata>;
/**
* Updates the metadata for this object.
* @param metadata - The new metadata for the object.
* Only values that have been explicitly set will be changed. Explicitly
* setting a value to null will remove the metadata.
* @returns A promise that resolves
* with the new metadata for this object.
* @see firebaseStorage.Reference.prototype.getMetadata
*/
updateMetadata(metadata: Metadata): Promise<Metadata>;
/**
* @returns A promise that resolves with the download
* URL for this object.
*/
getDownloadURL(): Promise<string>;
/**
* Deletes the object at this location.
* @returns A promise that resolves if the deletion succeeds.
*/
delete(): Promise<void>;
private _throwIfRoot;
} | private readonly _delegate;
storage: StorageServiceCompat;
|
utils.py | from __future__ import absolute_import, unicode_literals
from django.db.models import Case, IntegerField, When
def order_enum(field, members):
"""
Make an annotation value that can be used to sort by an enum field.
``field``
The name of an EnumChoiceField.
``members``
An iterable of Enum members in the order to sort by.
Use like:
.. code-block:: python
desired_order = [MyEnum.bar, MyEnum.baz, MyEnum.foo]
ChoiceModel.objects\\
.annotate(my_order=order_enum('choice', desired_order))\\
.order_by('my_order')
As Enums are iterable, ``members`` can be the Enum itself
if the default ordering is desired:
| .. code-block:: python
ChoiceModel.objects\\
.annotate(my_order=order_enum('choice', MyEnum))\\
.order_by('my_order')
.. warning:: On Python 2, Enums may not have a consistent order,
depending upon how they were defined.
You can set an explicit order using ``__order__`` to fix this.
See the ``enum34`` docs for more information.
Any enum members not present in the list of members
will be sorted to the end of the results.
"""
members = list(members)
return Case(
*(When(**{field: member, 'then': i})
for i, member in enumerate(members)),
default=len(members),
output_field=IntegerField()) | |
About.js | import React from 'react';
import ReactDOM from 'react-dom';
export default class About extends React.Component {
render(){ | } | return <div>Hello From About Component </div>
} |
instruction_sequence.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use ffi::{
BumpSliceMut,
Maybe::{self, Just, Nothing},
Pair, Slice, Str,
};
use hhbc_ast::*;
use iterator::Id as IterId;
use label::Label;
use local::Local;
use oxidized::ast_defs::Pos;
use runtime::TypedValue;
use thiserror::Error;
pub type Result<T> = std::result::Result<T, Error>;
#[derive(Error, Debug)]
pub enum Error {
#[error("IncludeTimeFatalException: FatalOp={0:?}, {1}")]
IncludeTimeFatalException(FatalOp, Pos, std::string::String),
#[error("Unrecoverable: {0}")]
Unrecoverable(std::string::String),
}
pub fn unrecoverable(msg: impl std::convert::Into<std::string::String>) -> Error {
Error::Unrecoverable(msg.into())
}
/// The various from_X functions below take some kind of AST
/// (expression, statement, etc.) and produce what is logically a
/// sequence of instructions. This could be represented by a list, but
/// we wish to avoid the quadratic complexity associated with repeated
/// appending. So, we build a tree of instructions which can be
/// flattened when complete.
#[derive(Debug)]
#[repr(C)]
pub enum InstrSeq<'a> {
List(BumpSliceMut<'a, Instruct<'a>>),
Concat(BumpSliceMut<'a, InstrSeq<'a>>),
}
// The slices are mutable because of `rewrite_user_labels`. This means
// we can't derive `Clone` (because you can't have multiple mutable
// references referring to the same resource). It is possible to implement
// deep copy functionality though: see `InstrSeq::clone()` below.
impl<'a> std::convert::From<(&'a bumpalo::Bump, (InstrSeq<'a>, InstrSeq<'a>))> for InstrSeq<'a> {
fn from((alloc, (i1, i2)): (&'a bumpalo::Bump, (InstrSeq<'a>, InstrSeq<'a>))) -> InstrSeq<'a> {
InstrSeq::gather(alloc, vec![i1, i2])
}
}
#[derive(Debug)]
pub struct CompactIter<'i, 'a, I>
where
I: Iterator<Item = &'i Instruct<'a>>,
{
iter: I,
next: Option<&'i Instruct<'a>>,
}
impl<'i, 'a, I> CompactIter<'i, 'a, I>
where
I: Iterator<Item = &'i Instruct<'a>>,
{
pub fn new(i: I) -> Self {
Self {
iter: i,
next: None,
}
}
}
impl<'i, 'a, I> Iterator for CompactIter<'i, 'a, I>
where
I: Iterator<Item = &'i Instruct<'a>>,
{
type Item = &'i Instruct<'a>;
fn next(&mut self) -> Option<Self::Item> {
if self.next.is_some() {
std::mem::replace(&mut self.next, None)
} else {
let mut cur = self.iter.next();
match cur {
Some(i) if InstrSeq::is_srcloc(i) => {
self.next = self.iter.next();
while self.next.map_or(false, InstrSeq::is_srcloc) {
cur = self.next;
self.next = self.iter.next();
}
cur
}
_ => cur,
}
}
}
}
#[derive(Debug)]
pub struct InstrIter<'i, 'a> {
instr_seq: &'i InstrSeq<'a>,
index: usize,
concat_stack: std::vec::Vec<
itertools::Either<
(&'i BumpSliceMut<'a, Instruct<'a>>, usize),
(&'i BumpSliceMut<'a, InstrSeq<'a>>, usize),
>,
>,
}
impl<'i, 'a> InstrIter<'i, 'a> {
pub fn new(instr_seq: &'i InstrSeq<'a>) -> Self {
Self {
instr_seq,
index: 0,
concat_stack: std::vec::Vec::new(),
}
}
}
impl<'i, 'a> Iterator for InstrIter<'i, 'a> {
type Item = &'i Instruct<'a>;
fn next(&mut self) -> Option<Self::Item> {
//self: & mut InstrIter<'i, 'a>
//self.instr_seq: &'i InstrSeq<'a>
match self.instr_seq {
InstrSeq::List(s) if s.is_empty() => None,
InstrSeq::List(s) if s.len() == 1 && self.index > 0 => None,
InstrSeq::List(s) if s.len() == 1 => {
self.index += 1;
s.get(0)
}
InstrSeq::List(s) if s.len() > 1 && self.index >= s.len() => None,
InstrSeq::List(s) => {
let r = s.get(self.index);
self.index += 1;
r
}
InstrSeq::Concat(s) => {
if self.concat_stack.is_empty() {
if self.index == 0 {
self.index += 1;
self.concat_stack.push(itertools::Either::Right((s, 0)));
} else {
return None;
}
}
while !self.concat_stack.is_empty() {
let top: &mut itertools::Either<_, _> = self.concat_stack.last_mut().unwrap();
match top {
itertools::Either::Left((list, size)) if *size >= list.len() => {
self.concat_stack.pop();
}
itertools::Either::Left((list, size)) => {
let r: Option<&'i Instruct<'a>> = list.get(*size);
*size += 1;
return r;
}
itertools::Either::Right((concat, size)) if *size >= concat.len() => {
self.concat_stack.pop();
}
itertools::Either::Right((concat, size)) => {
let i: &'i InstrSeq<'a> = &(concat[*size]);
*size += 1;
match i {
InstrSeq::List(s) if s.is_empty() => {}
InstrSeq::List(s) if s.len() == 1 => {
return s.get(0);
}
InstrSeq::List(s) => {
self.concat_stack.push(itertools::Either::Left((s, 0)));
}
InstrSeq::Concat(s) => {
self.concat_stack.push(itertools::Either::Right((s, 0)));
}
}
}
}
}
None
}
}
}
}
pub mod instr {
use crate::*;
pub fn empty<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
InstrSeq::new_empty(alloc)
}
pub fn instr<'a>(alloc: &'a bumpalo::Bump, i: Instruct<'a>) -> InstrSeq<'a> {
InstrSeq::new_singleton(alloc, i)
}
pub fn instrs<'a>(alloc: &'a bumpalo::Bump, is: &'a mut [Instruct<'a>]) -> InstrSeq<'a> {
InstrSeq::new_list(alloc, is)
}
pub fn lit_const<'a>(alloc: &'a bumpalo::Bump, l: InstructLitConst<'a>) -> InstrSeq<'a> {
instr(alloc, Instruct::ILitConst(l))
}
pub fn iterinit<'a>(
alloc: &'a bumpalo::Bump,
args: IterArgs<'a>,
label: Label,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IIterator(InstructIterator::IterInit(args, label)),
)
}
pub fn iternext<'a>(
alloc: &'a bumpalo::Bump,
args: IterArgs<'a>,
label: Label,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IIterator(InstructIterator::IterNext(args, label)),
)
}
pub fn iternextk<'a>(
alloc: &'a bumpalo::Bump,
id: IterId,
label: Label,
value: Local<'a>,
key: Local<'a>,
) -> InstrSeq<'a> {
let args = IterArgs {
iter_id: id,
key_id: Just(key),
val_id: value,
};
instr(
alloc,
Instruct::IIterator(InstructIterator::IterNext(args, label)),
)
}
pub fn iterfree<'a>(alloc: &'a bumpalo::Bump, id: IterId) -> InstrSeq<'a> {
instr(alloc, Instruct::IIterator(InstructIterator::IterFree(id)))
}
pub fn whresult<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IAsync(AsyncFunctions::WHResult))
}
pub fn jmp<'a>(alloc: &'a bumpalo::Bump, label: Label) -> InstrSeq<'a> {
instr(alloc, Instruct::IContFlow(InstructControlFlow::Jmp(label)))
}
pub fn jmpz<'a>(alloc: &'a bumpalo::Bump, label: Label) -> InstrSeq<'a> {
instr(alloc, Instruct::IContFlow(InstructControlFlow::JmpZ(label)))
}
pub fn jmpnz<'a>(alloc: &'a bumpalo::Bump, label: Label) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IContFlow(InstructControlFlow::JmpNZ(label)),
)
}
pub fn jmpns<'a>(alloc: &'a bumpalo::Bump, label: Label) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IContFlow(InstructControlFlow::JmpNS(label)),
)
}
pub fn continue_<'a>(alloc: &'a bumpalo::Bump, level: isize) -> InstrSeq<'a> {
instr(
alloc,
Instruct::ISpecialFlow(InstructSpecialFlow::Continue(level)),
)
}
pub fn break_<'a>(alloc: &'a bumpalo::Bump, level: isize) -> InstrSeq<'a> {
instr(
alloc,
Instruct::ISpecialFlow(InstructSpecialFlow::Break(level)),
)
}
pub fn | <'a>(
alloc: &'a bumpalo::Bump,
label: Label,
itrs: std::vec::Vec<IterId>,
) -> InstrSeq<'a> {
let mut vec = bumpalo::collections::Vec::from_iter_in(
itrs.into_iter()
.map(|id| Instruct::IIterator(InstructIterator::IterFree(id))),
alloc,
);
vec.push(Instruct::IContFlow(InstructControlFlow::Jmp(label)));
instrs(alloc, vec.into_bump_slice_mut())
}
pub fn false_<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::ILitConst(InstructLitConst::False))
}
pub fn true_<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::ILitConst(InstructLitConst::True))
}
pub fn clscnsd<'a>(
alloc: &'a bumpalo::Bump,
const_id: ConstId<'a>,
cid: ClassId<'a>,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::ILitConst(InstructLitConst::ClsCnsD(const_id, cid)),
)
}
pub fn clscns<'a>(alloc: &'a bumpalo::Bump, const_id: ConstId<'a>) -> InstrSeq<'a> {
instr(
alloc,
Instruct::ILitConst(InstructLitConst::ClsCns(const_id)),
)
}
pub fn clscnsl<'a>(alloc: &'a bumpalo::Bump, local: Local<'a>) -> InstrSeq<'a> {
instr(alloc, Instruct::ILitConst(InstructLitConst::ClsCnsL(local)))
}
pub fn eq<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::Eq))
}
pub fn neq<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::Neq))
}
pub fn gt<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::Gt))
}
pub fn gte<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::Gte))
}
pub fn lt<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::Lt))
}
pub fn lte<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::Lte))
}
pub fn concat<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::Concat))
}
pub fn concatn<'a>(alloc: &'a bumpalo::Bump, n: isize) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::ConcatN(n)))
}
pub fn print<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::Print))
}
pub fn cast_dict<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::CastDict))
}
pub fn cast_string<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::CastString))
}
pub fn cast_int<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::CastInt))
}
pub fn cast_bool<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::CastBool))
}
pub fn cast_double<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::CastDouble))
}
pub fn retc<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IContFlow(InstructControlFlow::RetC))
}
pub fn retc_suspended<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IContFlow(InstructControlFlow::RetCSuspended),
)
}
pub fn retm<'a>(alloc: &'a bumpalo::Bump, p: NumParams) -> InstrSeq<'a> {
instr(alloc, Instruct::IContFlow(InstructControlFlow::RetM(p)))
}
pub fn null<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::ILitConst(InstructLitConst::Null))
}
pub fn nulluninit<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::ILitConst(InstructLitConst::NullUninit))
}
pub fn chain_faults<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IMisc(InstructMisc::ChainFaults))
}
pub fn dup<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IBasic(InstructBasic::Dup))
}
pub fn nop<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IBasic(InstructBasic::Nop))
}
pub fn instanceofd<'a>(alloc: &'a bumpalo::Bump, s: ClassId<'a>) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::InstanceOfD(s)))
}
pub fn instanceof<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::InstanceOf))
}
pub fn islateboundcls<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::IsLateBoundCls))
}
pub fn istypestructc<'a>(alloc: &'a bumpalo::Bump, mode: TypeStructResolveOp) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::IsTypeStructC(mode)))
}
pub fn throwastypestructexception<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IOp(InstructOperator::ThrowAsTypeStructException),
)
}
pub fn throw_non_exhaustive_switch<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IMisc(InstructMisc::ThrowNonExhaustiveSwitch),
)
}
pub fn raise_class_string_conversion_warning<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IMisc(InstructMisc::RaiseClassStringConversionWarning),
)
}
pub fn combine_and_resolve_type_struct<'a>(alloc: &'a bumpalo::Bump, i: isize) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IOp(InstructOperator::CombineAndResolveTypeStruct(i)),
)
}
pub fn record_reified_generic<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IMisc(InstructMisc::RecordReifiedGeneric))
}
pub fn check_reified_generic_mismatch<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IMisc(InstructMisc::CheckReifiedGenericMismatch),
)
}
pub fn int<'a>(alloc: &'a bumpalo::Bump, i: isize) -> InstrSeq<'a> {
instr(
alloc,
Instruct::ILitConst(InstructLitConst::Int(i.try_into().unwrap())),
)
}
pub fn int64<'a>(alloc: &'a bumpalo::Bump, i: i64) -> InstrSeq<'a> {
instr(alloc, Instruct::ILitConst(InstructLitConst::Int(i)))
}
pub fn int_of_string<'a>(alloc: &'a bumpalo::Bump, litstr: &str) -> InstrSeq<'a> {
instr(
alloc,
Instruct::ILitConst(InstructLitConst::Int(litstr.parse::<i64>().unwrap())),
)
}
pub fn double<'a>(alloc: &'a bumpalo::Bump, litstr: &str) -> InstrSeq<'a> {
instr(
alloc,
Instruct::ILitConst(InstructLitConst::Double(Str::from(
bumpalo::collections::String::from_str_in(litstr, alloc).into_bump_str(),
))),
)
}
pub fn string<'a>(alloc: &'a bumpalo::Bump, litstr: impl Into<String>) -> InstrSeq<'a> {
instr(
alloc,
Instruct::ILitConst(InstructLitConst::String(Str::from(
bumpalo::collections::String::from_str_in(litstr.into().as_str(), alloc)
.into_bump_str(),
))),
)
}
pub fn this<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IMisc(InstructMisc::This))
}
pub fn istypec<'a>(alloc: &'a bumpalo::Bump, op: IsTypeOp) -> InstrSeq<'a> {
instr(alloc, Instruct::IIsset(InstructIsset::IsTypeC(op)))
}
pub fn istypel<'a>(alloc: &'a bumpalo::Bump, id: Local<'a>, op: IsTypeOp) -> InstrSeq<'a> {
instr(alloc, Instruct::IIsset(InstructIsset::IsTypeL(id, op)))
}
pub fn add<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::Add))
}
pub fn addo<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::AddO))
}
pub fn sub<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::Sub))
}
pub fn subo<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::SubO))
}
pub fn mul<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::Mul))
}
pub fn mulo<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::MulO))
}
pub fn shl<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::Shl))
}
pub fn shr<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::Shr))
}
pub fn cmp<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::Cmp))
}
pub fn mod_<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::Mod))
}
pub fn div<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::Div))
}
pub fn same<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::Same))
}
pub fn pow<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::Pow))
}
pub fn nsame<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::NSame))
}
pub fn not<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::Not))
}
pub fn bitnot<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::BitNot))
}
pub fn bitand<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::BitAnd))
}
pub fn bitor<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::BitOr))
}
pub fn bitxor<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::BitXor))
}
pub fn sets<'a>(alloc: &'a bumpalo::Bump, readonly_op: ReadonlyOp) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IMutator(InstructMutator::SetS(readonly_op)),
)
}
pub fn setl<'a>(alloc: &'a bumpalo::Bump, local: Local<'a>) -> InstrSeq<'a> {
instr(alloc, Instruct::IMutator(InstructMutator::SetL(local)))
}
pub fn setg<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IMutator(InstructMutator::SetG))
}
pub fn unsetl<'a>(alloc: &'a bumpalo::Bump, local: Local<'a>) -> InstrSeq<'a> {
instr(alloc, Instruct::IMutator(InstructMutator::UnsetL(local)))
}
pub fn unsetg<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IMutator(InstructMutator::UnsetG))
}
pub fn incdecl<'a>(alloc: &'a bumpalo::Bump, local: Local<'a>, op: IncDecOp) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IMutator(InstructMutator::IncDecL(local, op)),
)
}
pub fn incdecg<'a>(alloc: &'a bumpalo::Bump, op: IncDecOp) -> InstrSeq<'a> {
instr(alloc, Instruct::IMutator(InstructMutator::IncDecG(op)))
}
pub fn incdecs<'a>(alloc: &'a bumpalo::Bump, op: IncDecOp) -> InstrSeq<'a> {
instr(alloc, Instruct::IMutator(InstructMutator::IncDecS(op)))
}
pub fn setopg<'a>(alloc: &'a bumpalo::Bump, op: EqOp) -> InstrSeq<'a> {
instr(alloc, Instruct::IMutator(InstructMutator::SetOpG(op)))
}
pub fn setopl<'a>(alloc: &'a bumpalo::Bump, local: Local<'a>, op: EqOp) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IMutator(InstructMutator::SetOpL(local, op)),
)
}
pub fn setops<'a>(alloc: &'a bumpalo::Bump, op: EqOp) -> InstrSeq<'a> {
instr(alloc, Instruct::IMutator(InstructMutator::SetOpS(op)))
}
pub fn issetl<'a>(alloc: &'a bumpalo::Bump, local: Local<'a>) -> InstrSeq<'a> {
instr(alloc, Instruct::IIsset(InstructIsset::IssetL(local)))
}
pub fn issetg<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IIsset(InstructIsset::IssetG))
}
pub fn issets<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IIsset(InstructIsset::IssetS))
}
pub fn isunsetl<'a>(alloc: &'a bumpalo::Bump, local: Local<'a>) -> InstrSeq<'a> {
instr(alloc, Instruct::IIsset(InstructIsset::IsUnsetL(local)))
}
pub fn cgets<'a>(alloc: &'a bumpalo::Bump, readonly_op: ReadonlyOp) -> InstrSeq<'a> {
instr(alloc, Instruct::IGet(InstructGet::CGetS(readonly_op)))
}
pub fn cgetg<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IGet(InstructGet::CGetG))
}
pub fn cgetl<'a>(alloc: &'a bumpalo::Bump, local: Local<'a>) -> InstrSeq<'a> {
instr(alloc, Instruct::IGet(InstructGet::CGetL(local)))
}
pub fn cugetl<'a>(alloc: &'a bumpalo::Bump, local: Local<'a>) -> InstrSeq<'a> {
instr(alloc, Instruct::IGet(InstructGet::CUGetL(local)))
}
pub fn cgetl2<'a>(alloc: &'a bumpalo::Bump, local: Local<'a>) -> InstrSeq<'a> {
instr(alloc, Instruct::IGet(InstructGet::CGetL2(local)))
}
pub fn cgetquietl<'a>(alloc: &'a bumpalo::Bump, local: Local<'a>) -> InstrSeq<'a> {
instr(alloc, Instruct::IGet(InstructGet::CGetQuietL(local)))
}
pub fn classgetc<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IGet(InstructGet::ClassGetC))
}
pub fn classgetts<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IGet(InstructGet::ClassGetTS))
}
pub fn classname<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IMisc(InstructMisc::ClassName))
}
pub fn lazyclassfromclass<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IMisc(InstructMisc::LazyClassFromClass))
}
pub fn self_<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IMisc(InstructMisc::Self_))
}
pub fn lateboundcls<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IMisc(InstructMisc::LateBoundCls))
}
pub fn parent<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IMisc(InstructMisc::Parent))
}
pub fn popu<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IBasic(InstructBasic::PopU))
}
pub fn popc<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IBasic(InstructBasic::PopC))
}
pub fn popl<'a>(alloc: &'a bumpalo::Bump, l: Local<'a>) -> InstrSeq<'a> {
instr(alloc, Instruct::IMutator(InstructMutator::PopL(l)))
}
pub fn initprop<'a>(alloc: &'a bumpalo::Bump, pid: PropId<'a>, op: InitPropOp) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IMutator(InstructMutator::InitProp(pid, op)),
)
}
pub fn checkprop<'a>(alloc: &'a bumpalo::Bump, pid: PropId<'a>) -> InstrSeq<'a> {
instr(alloc, Instruct::IMutator(InstructMutator::CheckProp(pid)))
}
pub fn pushl<'a>(alloc: &'a bumpalo::Bump, local: Local<'a>) -> InstrSeq<'a> {
instr(alloc, Instruct::IGet(InstructGet::PushL(local)))
}
pub fn throw<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IContFlow(InstructControlFlow::Throw))
}
pub fn new_vec_array<'a>(alloc: &'a bumpalo::Bump, i: isize) -> InstrSeq<'a> {
instr(alloc, Instruct::ILitConst(InstructLitConst::NewVec(i)))
}
pub fn new_pair<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::ILitConst(InstructLitConst::NewPair))
}
pub fn add_elemc<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::ILitConst(InstructLitConst::AddElemC))
}
pub fn add_new_elemc<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::ILitConst(InstructLitConst::AddNewElemC))
}
pub fn switch<'a>(
alloc: &'a bumpalo::Bump,
targets: bumpalo::collections::Vec<'a, Label>,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IContFlow(InstructControlFlow::Switch {
kind: SwitchKind::Unbounded,
base: 0,
targets: BumpSliceMut::new(alloc, targets.into_bump_slice_mut()),
}),
)
}
pub fn newobj<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::ICall(InstructCall::NewObj))
}
pub fn sswitch<'a>(
alloc: &'a bumpalo::Bump,
cases: bumpalo::collections::Vec<'a, (&'a str, Label)>,
) -> InstrSeq<'a> {
let targets = BumpSliceMut::new(
alloc,
alloc.alloc_slice_fill_iter(cases.iter().map(|(_, target)| *target)),
);
let cases = BumpSliceMut::new(
alloc,
alloc.alloc_slice_fill_iter(cases.into_iter().map(|(s, _)| Str::from(s))),
);
instr(
alloc,
Instruct::IContFlow(InstructControlFlow::SSwitch { cases, targets }),
)
}
pub fn newobjr<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::ICall(InstructCall::NewObjR))
}
pub fn newobjd<'a>(alloc: &'a bumpalo::Bump, id: ClassId<'a>) -> InstrSeq<'a> {
instr(alloc, Instruct::ICall(InstructCall::NewObjD(id)))
}
pub fn newobjrd<'a>(alloc: &'a bumpalo::Bump, id: ClassId<'a>) -> InstrSeq<'a> {
instr(alloc, Instruct::ICall(InstructCall::NewObjRD(id)))
}
pub fn newobjs<'a>(alloc: &'a bumpalo::Bump, scref: SpecialClsRef) -> InstrSeq<'a> {
instr(alloc, Instruct::ICall(InstructCall::NewObjS(scref)))
}
pub fn lockobj<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IMisc(InstructMisc::LockObj))
}
pub fn clone<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::Clone))
}
pub fn new_record<'a>(
alloc: &'a bumpalo::Bump,
id: ClassId<'a>,
keys: &'a [&'a str],
) -> InstrSeq<'a> {
let keys = Slice::new(alloc.alloc_slice_fill_iter(keys.iter().map(|s| Str::from(*s))));
instr(
alloc,
Instruct::ILitConst(InstructLitConst::NewRecord(id, keys)),
)
}
pub fn newstructdict<'a>(alloc: &'a bumpalo::Bump, keys: &'a [&'a str]) -> InstrSeq<'a> {
let keys = Slice::new(alloc.alloc_slice_fill_iter(keys.iter().map(|s| Str::from(*s))));
instr(
alloc,
Instruct::ILitConst(InstructLitConst::NewStructDict(keys)),
)
}
pub fn newcol<'a>(alloc: &'a bumpalo::Bump, collection_type: CollectionType) -> InstrSeq<'a> {
instr(
alloc,
Instruct::ILitConst(InstructLitConst::NewCol(collection_type)),
)
}
pub fn colfromarray<'a>(
alloc: &'a bumpalo::Bump,
collection_type: CollectionType,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::ILitConst(InstructLitConst::ColFromArray(collection_type)),
)
}
pub fn entrynop<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IBasic(InstructBasic::EntryNop))
}
pub fn typedvalue<'a>(alloc: &'a bumpalo::Bump, xs: TypedValue<'a>) -> InstrSeq<'a> {
instr(alloc, Instruct::ILitConst(InstructLitConst::TypedValue(xs)))
}
pub fn basel<'a>(
alloc: &'a bumpalo::Bump,
local: Local<'a>,
mode: MemberOpMode,
readonly_op: ReadonlyOp,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IBase(InstructBase::BaseL(local, mode, readonly_op)),
)
}
pub fn basec<'a>(
alloc: &'a bumpalo::Bump,
stack_index: StackIndex,
mode: MemberOpMode,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IBase(InstructBase::BaseC(stack_index, mode)),
)
}
pub fn basegc<'a>(
alloc: &'a bumpalo::Bump,
stack_index: StackIndex,
mode: MemberOpMode,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IBase(InstructBase::BaseGC(stack_index, mode)),
)
}
pub fn basegl<'a>(
alloc: &'a bumpalo::Bump,
local: Local<'a>,
mode: MemberOpMode,
) -> InstrSeq<'a> {
instr(alloc, Instruct::IBase(InstructBase::BaseGL(local, mode)))
}
pub fn basesc<'a>(
alloc: &'a bumpalo::Bump,
y: StackIndex,
z: StackIndex,
mode: MemberOpMode,
readonly_op: ReadonlyOp,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IBase(InstructBase::BaseSC(y, z, mode, readonly_op)),
)
}
pub fn baseh<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IBase(InstructBase::BaseH))
}
pub fn cgetcunop<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IMisc(InstructMisc::CGetCUNop))
}
pub fn ugetcunop<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IMisc(InstructMisc::UGetCUNop))
}
pub fn memoget<'a>(
alloc: &'a bumpalo::Bump,
label: Label,
range: Option<(Local<'a>, isize)>,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IMisc(InstructMisc::MemoGet(
label,
match range {
Some((fst, snd)) => Just(Pair(fst, snd)),
None => Nothing,
},
)),
)
}
// Factored out to reduce verbosity.
fn range_opt_to_maybe<'a>(range: Option<(Local<'a>, isize)>) -> Maybe<Pair<Local<'a>, isize>> {
match range {
Some((fst, snd)) => Just(Pair(fst, snd)),
None => Nothing,
}
}
pub fn memoget_eager<'a>(
alloc: &'a bumpalo::Bump,
label1: Label,
label2: Label,
range: Option<(Local<'a>, isize)>,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IMisc(InstructMisc::MemoGetEager(
[label1, label2],
range_opt_to_maybe(range),
)),
)
}
pub fn memoset<'a>(
alloc: &'a bumpalo::Bump,
range: Option<(Local<'a>, isize)>,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IMisc(InstructMisc::MemoSet(range_opt_to_maybe(range))),
)
}
pub fn memoset_eager<'a>(
alloc: &'a bumpalo::Bump,
range: Option<(Local<'a>, isize)>,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IMisc(InstructMisc::MemoSetEager(range_opt_to_maybe(range))),
)
}
pub fn getmemokeyl<'a>(alloc: &'a bumpalo::Bump, local: Local<'a>) -> InstrSeq<'a> {
instr(alloc, Instruct::IMisc(InstructMisc::GetMemoKeyL(local)))
}
pub fn barethis<'a>(alloc: &'a bumpalo::Bump, notice: BareThisOp) -> InstrSeq<'a> {
instr(alloc, Instruct::IMisc(InstructMisc::BareThis(notice)))
}
pub fn checkthis<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IMisc(InstructMisc::CheckThis))
}
pub fn verify_ret_type_c<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IMisc(InstructMisc::VerifyRetTypeC))
}
pub fn verify_ret_type_ts<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IMisc(InstructMisc::VerifyRetTypeTS))
}
pub fn verify_out_type<'a>(alloc: &'a bumpalo::Bump, i: ParamId<'a>) -> InstrSeq<'a> {
instr(alloc, Instruct::IMisc(InstructMisc::VerifyOutType(i)))
}
pub fn verify_param_type<'a>(alloc: &'a bumpalo::Bump, i: ParamId<'a>) -> InstrSeq<'a> {
instr(alloc, Instruct::IMisc(InstructMisc::VerifyParamType(i)))
}
pub fn verify_param_type_ts<'a>(alloc: &'a bumpalo::Bump, i: ParamId<'a>) -> InstrSeq<'a> {
instr(alloc, Instruct::IMisc(InstructMisc::VerifyParamTypeTS(i)))
}
pub fn dim<'a>(alloc: &'a bumpalo::Bump, op: MemberOpMode, key: MemberKey<'a>) -> InstrSeq<'a> {
instr(alloc, Instruct::IBase(InstructBase::Dim(op, key)))
}
pub fn dim_warn_pt<'a>(
alloc: &'a bumpalo::Bump,
key: PropId<'a>,
readonly_op: ReadonlyOp,
) -> InstrSeq<'a> {
dim(alloc, MemberOpMode::Warn, MemberKey::PT(key, readonly_op))
}
pub fn dim_define_pt<'a>(
alloc: &'a bumpalo::Bump,
key: PropId<'a>,
readonly_op: ReadonlyOp,
) -> InstrSeq<'a> {
dim(alloc, MemberOpMode::Define, MemberKey::PT(key, readonly_op))
}
pub fn fcallclsmethod<'a>(
alloc: &'a bumpalo::Bump,
log: IsLogAsDynamicCallOp,
fcall_args: FcallArgs<'a>,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::ICall(InstructCall::FCallClsMethod { fcall_args, log }),
)
}
pub fn fcallclsmethodd<'a>(
alloc: &'a bumpalo::Bump,
fcall_args: FcallArgs<'a>,
method: MethodId<'a>,
class: ClassId<'a>,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::ICall(InstructCall::FCallClsMethodD {
fcall_args,
class,
method,
}),
)
}
pub fn fcallclsmethods<'a>(
alloc: &'a bumpalo::Bump,
fcall_args: FcallArgs<'a>,
clsref: SpecialClsRef,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::ICall(InstructCall::FCallClsMethodS { fcall_args, clsref }),
)
}
pub fn fcallclsmethodsd<'a>(
alloc: &'a bumpalo::Bump,
fcall_args: FcallArgs<'a>,
clsref: SpecialClsRef,
method: MethodId<'a>,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::ICall(InstructCall::FCallClsMethodSD {
fcall_args,
clsref,
method,
}),
)
}
pub fn fcallctor<'a>(alloc: &'a bumpalo::Bump, fcall_args: FcallArgs<'a>) -> InstrSeq<'a> {
instr(alloc, Instruct::ICall(InstructCall::FCallCtor(fcall_args)))
}
pub fn fcallfunc<'a>(alloc: &'a bumpalo::Bump, fcall_args: FcallArgs<'a>) -> InstrSeq<'a> {
instr(alloc, Instruct::ICall(InstructCall::FCallFunc(fcall_args)))
}
pub fn fcallfuncd<'a>(
alloc: &'a bumpalo::Bump,
fcall_args: FcallArgs<'a>,
func: FunctionId<'a>,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::ICall(InstructCall::FCallFuncD { fcall_args, func }),
)
}
pub fn fcallobjmethod<'a>(
alloc: &'a bumpalo::Bump,
fcall_args: FcallArgs<'a>,
flavor: ObjNullFlavor,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::ICall(InstructCall::FCallObjMethod { fcall_args, flavor }),
)
}
pub fn fcallobjmethodd<'a>(
alloc: &'a bumpalo::Bump,
fcall_args: FcallArgs<'a>,
method: MethodId<'a>,
flavor: ObjNullFlavor,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::ICall(InstructCall::FCallObjMethodD {
fcall_args,
flavor,
method,
}),
)
}
pub fn fcallobjmethodd_nullthrows<'a>(
alloc: &'a bumpalo::Bump,
fcall_args: FcallArgs<'a>,
method: MethodId<'a>,
) -> InstrSeq<'a> {
fcallobjmethodd(alloc, fcall_args, method, ObjNullFlavor::NullThrows)
}
pub fn querym<'a>(
alloc: &'a bumpalo::Bump,
num_params: NumParams,
op: QueryOp,
key: MemberKey<'a>,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IFinal(InstructFinal::QueryM(num_params, op, key)),
)
}
pub fn querym_cget_pt<'a>(
alloc: &'a bumpalo::Bump,
num_params: NumParams,
key: PropId<'a>,
readonly_op: ReadonlyOp,
) -> InstrSeq<'a> {
querym(
alloc,
num_params,
QueryOp::CGet,
MemberKey::PT(key, readonly_op),
)
}
pub fn setm<'a>(
alloc: &'a bumpalo::Bump,
num_params: NumParams,
key: MemberKey<'a>,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IFinal(InstructFinal::SetM(num_params, key)),
)
}
pub fn unsetm<'a>(
alloc: &'a bumpalo::Bump,
num_params: NumParams,
key: MemberKey<'a>,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IFinal(InstructFinal::UnsetM(num_params, key)),
)
}
pub fn setopm<'a>(
alloc: &'a bumpalo::Bump,
num_params: NumParams,
op: EqOp,
key: MemberKey<'a>,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IFinal(InstructFinal::SetOpM(num_params, op, key)),
)
}
pub fn incdecm<'a>(
alloc: &'a bumpalo::Bump,
num_params: NumParams,
op: IncDecOp,
key: MemberKey<'a>,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IFinal(InstructFinal::IncDecM(num_params, op, key)),
)
}
pub fn setm_pt<'a>(
alloc: &'a bumpalo::Bump,
num_params: NumParams,
key: PropId<'a>,
readonly_op: ReadonlyOp,
) -> InstrSeq<'a> {
setm(alloc, num_params, MemberKey::PT(key, readonly_op))
}
pub fn resolve_func<'a>(alloc: &'a bumpalo::Bump, func_id: FunctionId<'a>) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::ResolveFunc(func_id)))
}
pub fn resolve_rfunc<'a>(alloc: &'a bumpalo::Bump, func_id: FunctionId<'a>) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IOp(InstructOperator::ResolveRFunc(func_id)),
)
}
pub fn resolveclsmethod<'a>(alloc: &'a bumpalo::Bump, method_id: MethodId<'a>) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IOp(InstructOperator::ResolveClsMethod(method_id)),
)
}
pub fn resolveclsmethodd<'a>(
alloc: &'a bumpalo::Bump,
class_id: ClassId<'a>,
method_id: MethodId<'a>,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IOp(InstructOperator::ResolveClsMethodD(class_id, method_id)),
)
}
pub fn resolveclsmethods<'a>(
alloc: &'a bumpalo::Bump,
scref: SpecialClsRef,
method_id: MethodId<'a>,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IOp(InstructOperator::ResolveClsMethodS(scref, method_id)),
)
}
pub fn resolverclsmethod<'a>(
alloc: &'a bumpalo::Bump,
method_id: MethodId<'a>,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IOp(InstructOperator::ResolveRClsMethod(method_id)),
)
}
pub fn resolverclsmethodd<'a>(
alloc: &'a bumpalo::Bump,
class_id: ClassId<'a>,
method_id: MethodId<'a>,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IOp(InstructOperator::ResolveRClsMethodD(class_id, method_id)),
)
}
pub fn resolverclsmethods<'a>(
alloc: &'a bumpalo::Bump,
scref: SpecialClsRef,
method_id: MethodId<'a>,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IOp(InstructOperator::ResolveRClsMethodS(scref, method_id)),
)
}
pub fn resolve_meth_caller<'a>(
alloc: &'a bumpalo::Bump,
fun_id: FunctionId<'a>,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IOp(InstructOperator::ResolveMethCaller(fun_id)),
)
}
pub fn resolveclass<'a>(alloc: &'a bumpalo::Bump, class_id: ClassId<'a>) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IOp(InstructOperator::ResolveClass(class_id)),
)
}
pub fn lazyclass<'a>(alloc: &'a bumpalo::Bump, class_id: ClassId<'a>) -> InstrSeq<'a> {
instr(
alloc,
Instruct::ILitConst(InstructLitConst::LazyClass(class_id)),
)
}
pub fn oodeclexists<'a>(alloc: &'a bumpalo::Bump, class_kind: ClassishKind) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IMisc(InstructMisc::OODeclExists(class_kind)),
)
}
pub fn fatal<'a>(alloc: &'a bumpalo::Bump, op: FatalOp) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::Fatal(op)))
}
pub fn await_<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IAsync(AsyncFunctions::Await))
}
pub fn yield_<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IGenerator(GenCreationExecution::Yield))
}
pub fn yieldk<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IGenerator(GenCreationExecution::YieldK))
}
pub fn createcont<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IGenerator(GenCreationExecution::CreateCont),
)
}
pub fn awaitall<'a>(
alloc: &'a bumpalo::Bump,
range: Option<(Local<'a>, isize)>,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IAsync(AsyncFunctions::AwaitAll(range_opt_to_maybe(range))),
)
}
pub fn label<'a>(alloc: &'a bumpalo::Bump, label: Label) -> InstrSeq<'a> {
instr(alloc, Instruct::ILabel(label))
}
pub fn awaitall_list<'a>(
alloc: &'a bumpalo::Bump,
unnamed_locals: std::vec::Vec<Local<'a>>,
) -> InstrSeq<'a> {
use Local::Unnamed;
match unnamed_locals.split_first() {
None => panic!("Expected at least one await"),
Some((hd, tl)) => {
if let Unnamed(hd_id) = hd {
let mut prev_id = hd_id;
for unnamed_local in tl.iter() {
match unnamed_local {
Unnamed(id) => {
assert_eq!(*prev_id + 1, *id);
prev_id = id;
}
_ => panic!("Expected unnamed local"),
}
}
awaitall(
alloc,
Some((Unnamed(*hd_id), unnamed_locals.len().try_into().unwrap())),
)
} else {
panic!("Expected unnamed local")
}
}
}
}
pub fn exit<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IOp(InstructOperator::Exit))
}
pub fn idx<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IMisc(InstructMisc::Idx))
}
pub fn array_idx<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IMisc(InstructMisc::ArrayIdx))
}
pub fn createcl<'a>(
alloc: &'a bumpalo::Bump,
param_num: NumParams,
cls_num: ClassNum,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IMisc(InstructMisc::CreateCl(param_num, cls_num)),
)
}
pub fn eval<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IIncludeEvalDefine(InstructIncludeEvalDefine::Eval),
)
}
pub fn incl<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IIncludeEvalDefine(InstructIncludeEvalDefine::Incl),
)
}
pub fn inclonce<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IIncludeEvalDefine(InstructIncludeEvalDefine::InclOnce),
)
}
pub fn req<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IIncludeEvalDefine(InstructIncludeEvalDefine::Req),
)
}
pub fn reqdoc<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IIncludeEvalDefine(InstructIncludeEvalDefine::ReqDoc),
)
}
pub fn reqonce<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IIncludeEvalDefine(InstructIncludeEvalDefine::ReqOnce),
)
}
pub fn silence_start<'a>(alloc: &'a bumpalo::Bump, local: Local<'a>) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IMisc(InstructMisc::Silence(local, OpSilence::Start)),
)
}
pub fn silence_end<'a>(alloc: &'a bumpalo::Bump, local: Local<'a>) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IMisc(InstructMisc::Silence(local, OpSilence::End)),
)
}
pub fn contcheck_check<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IGenerator(GenCreationExecution::ContCheck(CheckStarted::CheckStarted)),
)
}
pub fn contcheck_ignore<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IGenerator(GenCreationExecution::ContCheck(CheckStarted::IgnoreStarted)),
)
}
pub fn contenter<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IGenerator(GenCreationExecution::ContEnter))
}
pub fn contraise<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IGenerator(GenCreationExecution::ContRaise))
}
pub fn contvalid<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IGenerator(GenCreationExecution::ContValid))
}
pub fn contcurrent<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IGenerator(GenCreationExecution::ContCurrent),
)
}
pub fn contkey<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IGenerator(GenCreationExecution::ContKey))
}
pub fn contgetreturn<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IGenerator(GenCreationExecution::ContGetReturn),
)
}
pub fn nativeimpl<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(alloc, Instruct::IMisc(InstructMisc::NativeImpl))
}
pub fn srcloc<'a>(
alloc: &'a bumpalo::Bump,
line_begin: isize,
line_end: isize,
col_begin: isize,
col_end: isize,
) -> InstrSeq<'a> {
instr(
alloc,
Instruct::ISrcLoc(SrcLoc {
line_begin,
line_end,
col_begin,
col_end,
}),
)
}
pub fn is_type_structc_resolve<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IOp(InstructOperator::IsTypeStructC(
TypeStructResolveOp::Resolve,
)),
)
}
pub fn is_type_structc_dontresolve<'a>(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
instr(
alloc,
Instruct::IOp(InstructOperator::IsTypeStructC(
TypeStructResolveOp::DontResolve,
)),
)
}
}
impl<'a> InstrSeq<'a> {
/// We can't implement `std::Clone`` because of the need for an
/// allocator. Instead, use this associated function.
pub fn clone(alloc: &'a bumpalo::Bump, s: &InstrSeq<'a>) -> InstrSeq<'a> {
InstrSeq::from_iter_in(alloc, InstrIter::new(s).cloned())
}
/// We can't implement `std::Default` because of the need
/// for an allocator. Instead, use this associated function
/// to produce an empty instruction sequence.
pub fn new_empty(alloc: &'a bumpalo::Bump) -> InstrSeq<'a> {
InstrSeq::List(BumpSliceMut::new(
alloc,
bumpalo::vec![in alloc; ].into_bump_slice_mut(),
))
}
/// An instruction sequence of a single instruction.
pub fn new_singleton(alloc: &'a bumpalo::Bump, i: Instruct<'a>) -> InstrSeq<'a> {
InstrSeq::List(BumpSliceMut::new(
alloc,
bumpalo::vec![in alloc; i].into_bump_slice_mut(),
))
}
/// An instruction sequence of a sequence of instructions.
pub fn new_list(alloc: &'a bumpalo::Bump, is: &'a mut [Instruct<'a>]) -> InstrSeq<'a> {
InstrSeq::List(BumpSliceMut::new(alloc, is))
}
/// An instruction sequence of a concatenation of instruction sequences.
pub fn new_concat(alloc: &'a bumpalo::Bump, iss: &'a mut [InstrSeq<'a>]) -> InstrSeq<'a> {
InstrSeq::Concat(BumpSliceMut::new(alloc, iss))
}
/// Move instructions out of a container.
pub fn from_iter_in<T: IntoIterator<Item = Instruct<'a>>>(
alloc: &'a bumpalo::Bump,
it: T,
) -> InstrSeq<'a> {
InstrSeq::new_list(
alloc,
bumpalo::collections::Vec::from_iter_in(it, alloc).into_bump_slice_mut(),
)
}
/// Transitional version. We mean to write a `gather!` in the future.
pub fn gather(alloc: &'a bumpalo::Bump, iss: std::vec::Vec<InstrSeq<'a>>) -> InstrSeq<'a> {
fn prd<'a>(is: &InstrSeq<'a>) -> bool {
match is {
InstrSeq::List(s) if s.is_empty() => false,
_ => true,
}
}
let non_empty = bumpalo::collections::Vec::from_iter_in(iss.into_iter().filter(prd), alloc);
if non_empty.is_empty() {
InstrSeq::new_empty(alloc)
} else {
InstrSeq::new_concat(alloc, non_empty.into_bump_slice_mut())
}
}
pub fn iter<'i>(&'i self) -> InstrIter<'i, 'a> {
InstrIter::new(self)
}
pub fn compact_iter<'i>(&'i self) -> impl Iterator<Item = &Instruct<'a>> {
CompactIter::new(self.iter())
}
pub fn create_try_catch(
alloc: &'a bumpalo::Bump,
label_gen: &mut label::Gen,
opt_done_label: Option<Label>,
skip_throw: bool,
try_instrs: Self,
catch_instrs: Self,
) -> Self {
let done_label = match opt_done_label {
Some(l) => l,
None => label_gen.next_regular(),
};
InstrSeq::gather(
alloc,
vec![
instr::instr(alloc, Instruct::ITry(InstructTry::TryCatchBegin)),
try_instrs,
instr::jmp(alloc, done_label),
instr::instr(alloc, Instruct::ITry(InstructTry::TryCatchMiddle)),
catch_instrs,
if skip_throw {
instr::empty(alloc)
} else {
instr::instr(alloc, Instruct::IContFlow(InstructControlFlow::Throw))
},
instr::instr(alloc, Instruct::ITry(InstructTry::TryCatchEnd)),
instr::label(alloc, done_label),
],
)
}
/// Test whether `i` is of case `Instruct::ISrcLoc`.
fn is_srcloc(instruction: &Instruct<'a>) -> bool {
match instruction {
Instruct::ISrcLoc(_) => true,
_ => false,
}
}
pub fn first(&self) -> Option<&Instruct<'a>> {
// self: &InstrSeq<'a>
match self {
InstrSeq::List(s) if s.is_empty() => None,
InstrSeq::List(s) if s.len() == 1 => {
let i = &s[0];
if InstrSeq::is_srcloc(i) {
None
} else {
Some(i)
}
}
InstrSeq::List(s) => match s.iter().find(|&i| !InstrSeq::is_srcloc(i)) {
Some(i) => Some(i),
None => None,
},
InstrSeq::Concat(s) => s.iter().find_map(InstrSeq::first),
}
}
/// Test for the empty instruction sequence.
pub fn is_empty(&self) -> bool {
// self:&InstrSeq<'a>
match self {
InstrSeq::List(s) if s.is_empty() => true,
InstrSeq::List(s) if s.len() == 1 => InstrSeq::is_srcloc(&s[0]),
InstrSeq::List(s) => s.is_empty() || s.iter().all(InstrSeq::is_srcloc),
InstrSeq::Concat(s) => s.iter().all(InstrSeq::is_empty),
}
}
pub fn flat_map_seq<F>(&self, alloc: &'a bumpalo::Bump, f: &mut F) -> Self
where
F: FnMut(&Instruct<'a>) -> Self,
{
// self: &InstrSeq<'a>
match self {
InstrSeq::List(s) if s.is_empty() => InstrSeq::new_empty(alloc),
InstrSeq::List(s) if s.len() == 1 => f(&s[0]),
InstrSeq::List(s) => InstrSeq::Concat(BumpSliceMut::new(
alloc,
bumpalo::collections::vec::Vec::from_iter_in(s.iter().map(f), alloc)
.into_bump_slice_mut(),
)),
InstrSeq::Concat(s) => InstrSeq::Concat(BumpSliceMut::new(
alloc,
bumpalo::collections::vec::Vec::from_iter_in(
s.iter().map(|x| x.flat_map_seq(alloc, f)),
alloc,
)
.into_bump_slice_mut(),
)),
}
}
pub fn filter_map<F>(&self, alloc: &'a bumpalo::Bump, f: &mut F) -> Self
where
F: FnMut(&Instruct<'a>) -> Option<Instruct<'a>>,
{
//self: &InstrSeq<'a>
match self {
InstrSeq::List(s) if s.is_empty() => InstrSeq::new_empty(alloc),
InstrSeq::List(s) if s.len() == 1 => {
let x: &Instruct<'a> = &s[0];
match f(x) {
Some(x) => instr::instr(alloc, x),
None => InstrSeq::new_empty(alloc),
}
}
InstrSeq::List(s) => InstrSeq::List(BumpSliceMut::new(
alloc,
bumpalo::collections::vec::Vec::from_iter_in(s.iter().filter_map(f), alloc)
.into_bump_slice_mut(),
)),
InstrSeq::Concat(s) => InstrSeq::Concat(BumpSliceMut::new(
alloc,
bumpalo::collections::vec::Vec::from_iter_in(
s.iter().map(|x| x.filter_map(alloc, f)),
alloc,
)
.into_bump_slice_mut(),
)),
}
}
pub fn filter_map_mut<F>(&mut self, alloc: &'a bumpalo::Bump, f: &mut F)
where
F: FnMut(&mut Instruct<'a>) -> bool,
{
//self: &mut InstrSeq<'a>
match self {
InstrSeq::List(s) if s.is_empty() => {}
InstrSeq::List(s) if s.len() == 1 => {
let x: &mut Instruct<'a> = &mut s[0];
if !f(x) {
*self = instr::empty(alloc)
}
}
InstrSeq::List(s) => {
let mut new_lst = bumpalo::vec![in alloc;];
for i in s.iter_mut() {
if f(i) {
new_lst.push(i.clone())
}
}
*self = instr::instrs(alloc, new_lst.into_bump_slice_mut())
}
InstrSeq::Concat(s) => s.iter_mut().for_each(|x| x.filter_map_mut(alloc, f)),
}
}
pub fn map_mut<F>(&mut self, f: &mut F)
where
F: FnMut(&mut Instruct<'a>),
{
//self: &mut InstrSeq<'a>
match self {
InstrSeq::List(s) if s.is_empty() => {}
InstrSeq::List(s) if s.len() == 1 => f(&mut s[0]),
InstrSeq::List(s) => s.iter_mut().for_each(f),
InstrSeq::Concat(s) => s.iter_mut().for_each(|x| x.map_mut(f)),
}
}
#[allow(clippy::result_unit_err)]
pub fn map_result_mut<F>(&mut self, f: &mut F) -> Result<()>
where
F: FnMut(&mut Instruct<'a>) -> Result<()>,
{
//self: &mut InstrSeq<'a>
match self {
InstrSeq::List(s) if s.is_empty() => Ok(()),
InstrSeq::List(s) if s.len() == 1 => f(&mut s[0]),
InstrSeq::List(s) => s.iter_mut().try_for_each(f),
InstrSeq::Concat(s) => s.iter_mut().try_for_each(|x| x.map_result_mut(f)),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::instr::{instr, instrs};
use pretty_assertions::assert_eq;
#[test]
fn iter() {
let a = bumpalo::Bump::new();
let alloc: &bumpalo::Bump = &a;
let mk_i = || Instruct::IComment(Str::from(""));
let empty = || InstrSeq::new_empty(alloc);
let one = || instr(alloc, mk_i());
let list0 = || instrs(alloc, bumpalo::vec![in alloc;].into_bump_slice_mut());
let list1 = || instrs(alloc, bumpalo::vec![in alloc; mk_i()].into_bump_slice_mut());
let list2 = || {
instrs(
alloc,
bumpalo::vec![in alloc; mk_i(), mk_i()].into_bump_slice_mut(),
)
};
let concat0 = || {
InstrSeq::Concat(BumpSliceMut::new(
alloc,
bumpalo::vec![in alloc;].into_bump_slice_mut(),
))
};
let concat1 = || {
InstrSeq::Concat(BumpSliceMut::new(
alloc,
bumpalo::vec![in alloc; one()].into_bump_slice_mut(),
))
};
assert_eq!(empty().iter().count(), 0);
assert_eq!(one().iter().count(), 1);
assert_eq!(list0().iter().count(), 0);
assert_eq!(list1().iter().count(), 1);
assert_eq!(list2().iter().count(), 2);
assert_eq!(concat0().iter().count(), 0);
assert_eq!(concat1().iter().count(), 1);
let concat = InstrSeq::Concat(BumpSliceMut::new(
alloc,
bumpalo::vec![in alloc; empty()].into_bump_slice_mut(),
));
assert_eq!(concat.iter().count(), 0);
let concat = InstrSeq::Concat(BumpSliceMut::new(
alloc,
bumpalo::vec![in alloc; empty(), one()].into_bump_slice_mut(),
));
assert_eq!(concat.iter().count(), 1);
let concat = InstrSeq::Concat(BumpSliceMut::new(
alloc,
bumpalo::vec![in alloc; one(), empty()].into_bump_slice_mut(),
));
assert_eq!(concat.iter().count(), 1);
let concat = InstrSeq::Concat(BumpSliceMut::new(
alloc,
bumpalo::vec![in alloc; one(), list1()].into_bump_slice_mut(),
));
assert_eq!(concat.iter().count(), 2);
let concat = InstrSeq::Concat(BumpSliceMut::new(
alloc,
bumpalo::vec![in alloc; list2(), list1()].into_bump_slice_mut(),
));
assert_eq!(concat.iter().count(), 3);
let concat = InstrSeq::Concat(BumpSliceMut::new(
alloc,
bumpalo::vec![in alloc; concat0(), list2(), list1()].into_bump_slice_mut(),
));
assert_eq!(concat.iter().count(), 3);
let concat = InstrSeq::Concat(BumpSliceMut::new(
alloc,
bumpalo::vec![in alloc; concat1(), concat1()].into_bump_slice_mut(),
));
assert_eq!(concat.iter().count(), 2);
let concat = InstrSeq::Concat(BumpSliceMut::new(
alloc,
bumpalo::vec![in alloc; concat0(), concat1()].into_bump_slice_mut(),
));
assert_eq!(concat.iter().count(), 1);
let concat = InstrSeq::Concat(BumpSliceMut::new(
alloc,
bumpalo::vec![in alloc; list2(), concat1()].into_bump_slice_mut(),
));
assert_eq!(concat.iter().count(), 3);
let concat = InstrSeq::Concat(BumpSliceMut::new(
alloc,
bumpalo::vec![in alloc; list2(), concat0()].into_bump_slice_mut(),
));
assert_eq!(concat.iter().count(), 2);
let concat = InstrSeq::Concat(BumpSliceMut::new(
alloc,
bumpalo::vec![in alloc; one(), concat0()].into_bump_slice_mut(),
));
assert_eq!(concat.iter().count(), 1);
let concat = InstrSeq::Concat(BumpSliceMut::new(
alloc,
bumpalo::vec![in alloc; empty(), concat0()].into_bump_slice_mut(),
));
assert_eq!(concat.iter().count(), 0);
}
}
#[no_mangle]
pub unsafe extern "C" fn no_call_compile_only_USED_TYPES_instruction_sequence<'arena>(
_: InstrSeq<'arena>,
) {
unimplemented!()
}
| iter_break |
contact.go | package service
import (
"errors"
"go_im/model"
)
type ContactService struct {
} | contact := model.Contact{}
if userid == dstid {
return errors.New("not add self for friend")
}
//是否已经添加自己为好友
_, e := DbEngin.Where("ownerid=?", userid).And("dstobj=?", dstid).And("cate=?", model.CONCAT_CATE_USER).Get(&contact)
if e != nil {
return e
}
if contact.Id > 0 {
return errors.New("user account is add the friends")
}
session := DbEngin.NewSession()
session.Begin()
//插入自己的
_, e1 := session.InsertOne(model.Contact{
Ownerid: userid,
Dstobj: dstid,
Cate: model.CONCAT_CATE_USER,
})
//插入对方的
_, e2 := session.InsertOne(model.Contact{
Ownerid: dstid,
Dstobj: userid,
Cate: model.CONCAT_CATE_USER,
})
if e1 == nil && e2 == nil {
session.Commit()
return nil
} else {
session.Rollback()
if e1 != nil {
return e1
} else {
return e2
}
}
}
func (this *ContactService) SearchFriend(userid int64) ([]model.User, error) {
//var
userids := make([]string, 0)
e := DbEngin.Table(new(model.Contact)).Where("ownerid=?", userid).And("cate=?", model.CONCAT_CATE_USER).Cols("dstobj").Find(&userids)
if e != nil {
return nil, e
}
users := make([]model.User, 0)
e = DbEngin.In("id", userids).Find(&users)
if e != nil {
return nil, e
}
return users, nil
}
func (this *ContactService) SearchComunityIds(userId int64) ([]int64, error) {
conids := make([]int64, 0)
e := DbEngin.Table(new(model.Contact)).Where("ownerid =? and cate=?", userId, model.CONCAT_CATE_COMUNITY).Cols("dstobj").Find(&conids)
return conids, e
}
func (service *ContactService) SearchComunity(userId int64) ([]model.Community) {
comIds := make([]int64, 0)
DbEngin.Table(new(model.Contact)).Where("ownerid = ? and cate = ?", userId, model.CONCAT_CATE_COMUNITY).Cols("dstobj").Find(&comIds)
if len(comIds) == 0 {
return nil
}
coms := make([]model.Community, 0)
DbEngin.In("id", comIds).Find(&coms)
return coms
}
//加群
func (service *ContactService) JoinCommunity(userId, comId int64) error {
cot := model.Contact{
Ownerid: userId,
Dstobj: comId,
Cate: model.CONCAT_CATE_COMUNITY,
}
DbEngin.Get(&cot)
if (cot.Id == 0) {
_, err := DbEngin.InsertOne(cot)
return err
} else {
return nil
}
}
func (this *ContactService) CreateGroup(userId int64, groupName string) (model.Community, error) {
group := model.Community{}
group.Ownerid = userId
group.Name = groupName
group.Cate = 2
group.Memo = ""
group.Icon = "/asset/images/avatar0.png"
_, e := DbEngin.InsertOne(&group)
return group, e
} |
func (this *ContactService) AddFriend(userid, dstid int64) error {
|
check_brackets.py | # python3
from collections import namedtuple
Bracket = namedtuple("Bracket", ["char", "position"])
def are_matching(left, right):
return (left + right) in ["()", "[]", "{}"]
def find_mismatch(text):
opening_brackets_stack = []
mismatch = []
for i, next in enumerate(text):
# Process opening bracket, write your code here
if next in "([{":
opening_brackets_stack.append((next,i+1))
# Process closing bracket, write your code here
if next in ")]}":
if not opening_brackets_stack:
mismatch.append((next,i+1))
else:
if are_matching(opening_brackets_stack[-1][0],next):
opening_brackets_stack.pop()
else:
mismatch.append(opening_brackets_stack.pop())
if len(opening_brackets_stack)>0:
mismatch.extend(opening_brackets_stack)
return mismatch
def main():
text = input()
mismatch = find_mismatch(text)
# Printing answer, write your code here
if not mismatch:
print("Success") | else:
for i in mismatch:
print(i[1])
if __name__ == "__main__":
main() | |
messages.rs | // Copyright 2020 - developers of the `grammers` project.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Methods related to sending messages.
use crate::types::{IterBuffer, Message};
use crate::utils::{generate_random_id, generate_random_ids};
use crate::{types, ChatMap, Client};
pub use grammers_mtsender::{AuthorizationError, InvocationError};
use grammers_session::PackedChat;
use grammers_tl_types as tl;
use std::collections::HashMap;
fn map_random_ids_to_messages(
client: &Client,
random_ids: &[i64],
updates: tl::enums::Updates,
) -> Vec<Option<Message>> {
match updates {
tl::enums::Updates::Updates(tl::types::Updates {
updates,
users,
chats,
date: _,
seq: _,
}) => {
let chats = ChatMap::new(users, chats);
let rnd_to_id = updates
.iter()
.filter_map(|update| match update {
tl::enums::Update::MessageId(u) => Some((u.random_id, u.id)),
_ => None,
})
.collect::<HashMap<_, _>>();
// TODO ideally this would use the same UpdateIter mechanism to make sure we don't
// accidentally miss variants
let mut id_to_msg = updates
.into_iter()
.filter_map(|update| match update {
tl::enums::Update::NewMessage(tl::types::UpdateNewMessage {
message, ..
}) => Some(message),
tl::enums::Update::NewChannelMessage(tl::types::UpdateNewChannelMessage {
message,
..
}) => Some(message),
_ => None,
})
.filter_map(|message| Message::new(client, message, &chats))
.map(|message| (message.msg.id, message))
.collect::<HashMap<_, _>>();
random_ids
.iter()
.map(|rnd| rnd_to_id.get(rnd).and_then(|id| id_to_msg.remove(id)))
.collect()
}
_ => panic!("API returned something other than Updates so messages can't be mapped"),
}
}
const MAX_LIMIT: usize = 100;
impl<R: tl::RemoteCall<Return = tl::enums::messages::Messages>> IterBuffer<R, Message> {
/// Fetches the total unless cached.
///
/// The `request.limit` should be set to the right value before calling this method.
async fn get_total(&mut self) -> Result<usize, InvocationError> {
if let Some(total) = self.total {
return Ok(total);
}
use tl::enums::messages::Messages;
let total = match self.client.invoke(&self.request).await? {
Messages::Messages(messages) => messages.messages.len(),
Messages::Slice(messages) => messages.count as usize,
Messages::ChannelMessages(messages) => messages.count as usize,
Messages::NotModified(messages) => messages.count as usize,
};
self.total = Some(total);
Ok(total)
}
/// Performs the network call, fills the buffer, and returns the `offset_rate` if any.
///
/// The `request.limit` should be set to the right value before calling this method.
async fn fill_buffer(&mut self, limit: i32) -> Result<Option<i32>, InvocationError> {
use tl::enums::messages::Messages;
let (messages, users, chats, rate) = match self.client.invoke(&self.request).await? {
Messages::Messages(m) => {
self.last_chunk = true;
self.total = Some(m.messages.len());
(m.messages, m.users, m.chats, None)
}
Messages::Slice(m) => {
self.last_chunk = m.messages.len() < limit as usize;
self.total = Some(m.count as usize);
(m.messages, m.users, m.chats, m.next_rate)
}
Messages::ChannelMessages(m) => {
self.last_chunk = m.messages.len() < limit as usize;
self.total = Some(m.count as usize);
(m.messages, m.users, m.chats, None)
}
Messages::NotModified(_) => {
panic!("API returned Messages::NotModified even though hash = 0")
}
};
let chats = ChatMap::new(users, chats);
let client = self.client.clone();
self.buffer.extend(
messages
.into_iter()
.flat_map(|message| Message::new(&client, message, &chats)),
);
Ok(rate)
}
}
pub type MessageIter = IterBuffer<tl::functions::messages::GetHistory, Message>;
impl MessageIter {
fn new(client: &Client, peer: PackedChat) -> Self {
// TODO let users tweak all the options from the request
Self::from_request(
client,
MAX_LIMIT,
tl::functions::messages::GetHistory {
peer: peer.to_input_peer(),
offset_id: 0,
offset_date: 0,
add_offset: 0,
limit: 0,
max_id: 0,
min_id: 0,
hash: 0,
},
)
}
/// Determines how many messages there are in total.
///
/// This only performs a network call if `next` has not been called before.
pub async fn total(&mut self) -> Result<usize, InvocationError> {
self.request.limit = 1;
self.get_total().await
}
/// Return the next `Message` from the internal buffer, filling the buffer previously if it's
/// empty.
///
/// Returns `None` if the `limit` is reached or there are no messages left.
pub async fn next(&mut self) -> Result<Option<Message>, InvocationError> {
if let Some(result) = self.next_raw() {
return result;
}
self.request.limit = self.determine_limit(MAX_LIMIT);
self.fill_buffer(self.request.limit).await?;
// Don't bother updating offsets if this is the last time stuff has to be fetched.
if !self.last_chunk && !self.buffer.is_empty() {
let last = &self.buffer[self.buffer.len() - 1];
self.request.offset_id = last.msg.id;
self.request.offset_date = last.msg.date;
}
Ok(self.pop_item())
}
}
pub type SearchIter = IterBuffer<tl::functions::messages::Search, Message>;
impl SearchIter {
fn new(client: &Client, peer: PackedChat) -> Self {
// TODO let users tweak all the options from the request
Self::from_request(
client,
MAX_LIMIT,
tl::functions::messages::Search {
peer: peer.to_input_peer(),
q: String::new(),
from_id: None,
top_msg_id: None,
filter: tl::enums::MessagesFilter::InputMessagesFilterEmpty,
min_date: 0,
max_date: 0,
offset_id: 0,
add_offset: 0,
limit: 0,
max_id: 0,
min_id: 0,
hash: 0,
},
)
}
/// Changes the query of the search. Telegram servers perform a somewhat fuzzy search over
/// this query (so a world in singular may also return messages with the word in plural, for
/// example).
pub fn query(mut self, query: &str) -> Self {
self.request.q = query.to_string();
self
}
/// Changes the media filter. Only messages with this type of media will be fetched.
pub fn filter(mut self, filter: tl::enums::MessagesFilter) -> Self {
self.request.filter = filter;
self
}
/// Determines how many messages there are in total.
///
/// This only performs a network call if `next` has not been called before.
pub async fn total(&mut self) -> Result<usize, InvocationError> {
// Unlike most requests, a limit of 0 actually returns 0 and not a default amount
// (as of layer 120).
self.request.limit = 0;
self.get_total().await
}
/// Return the next `Message` from the internal buffer, filling the buffer previously if it's
/// empty.
///
/// Returns `None` if the `limit` is reached or there are no messages left.
pub async fn next(&mut self) -> Result<Option<Message>, InvocationError> {
if let Some(result) = self.next_raw() {
return result;
}
self.request.limit = self.determine_limit(MAX_LIMIT);
self.fill_buffer(self.request.limit).await?;
// Don't bother updating offsets if this is the last time stuff has to be fetched.
if !self.last_chunk && !self.buffer.is_empty() {
let last = &self.buffer[self.buffer.len() - 1];
self.request.offset_id = last.msg.id;
self.request.max_date = last.msg.date;
}
Ok(self.pop_item())
}
}
pub type GlobalSearchIter = IterBuffer<tl::functions::messages::SearchGlobal, Message>;
impl GlobalSearchIter {
fn new(client: &Client) -> Self {
// TODO let users tweak all the options from the request
Self::from_request(
client,
MAX_LIMIT,
tl::functions::messages::SearchGlobal {
folder_id: None,
q: String::new(),
filter: tl::enums::MessagesFilter::InputMessagesFilterEmpty,
min_date: 0,
max_date: 0,
offset_rate: 0,
offset_peer: tl::enums::InputPeer::Empty,
offset_id: 0,
limit: 0,
},
)
}
/// Changes the query of the search. Telegram servers perform a somewhat fuzzy search over
/// this query (so a world in singular may also return messages with the word in plural, for
/// example).
pub fn query(mut self, query: &str) -> Self {
self.request.q = query.to_string();
self
}
/// Changes the media filter. Only messages with this type of media will be fetched.
pub fn filter(mut self, filter: tl::enums::MessagesFilter) -> Self {
self.request.filter = filter;
self
}
/// Determines how many messages there are in total.
///
/// This only performs a network call if `next` has not been called before.
pub async fn total(&mut self) -> Result<usize, InvocationError> {
self.request.limit = 1;
self.get_total().await
}
/// Return the next `Message` from the internal buffer, filling the buffer previously if it's
/// empty.
///
/// Returns `None` if the `limit` is reached or there are no messages left.
pub async fn next(&mut self) -> Result<Option<Message>, InvocationError> {
if let Some(result) = self.next_raw() {
return result;
}
self.request.limit = self.determine_limit(MAX_LIMIT);
let offset_rate = self.fill_buffer(self.request.limit).await?;
// Don't bother updating offsets if this is the last time stuff has to be fetched.
if !self.last_chunk && !self.buffer.is_empty() {
let last = &self.buffer[self.buffer.len() - 1];
self.request.offset_rate = offset_rate.unwrap_or(0);
self.request.offset_peer = last.chat().pack().to_input_peer();
self.request.offset_id = last.msg.id;
}
Ok(self.pop_item())
}
}
/// Method implementations related to sending, modifying or getting messages.
impl Client {
/// Sends a message to the desired chat.
///
/// This method can also be used to send media such as photos, videos, documents, polls, etc.
///
/// If you want to send a local file as media, you will need to use
/// [`Client::upload_file`] first.
///
/// Refer to [`InputMessage`] to learn more formatting options, such as using markdown or
/// adding buttons under your message (if you're logged in as a bot).
///
/// See also: [`Message::respond`], [`Message::reply`].
///
/// # Examples
///
/// ```
/// # async fn f(chat: grammers_client::types::Chat, mut client: grammers_client::Client) -> Result<(), Box<dyn std::error::Error>> {
/// client.send_message(&chat, "Boring text message :-(").await?;
///
/// use grammers_client::InputMessage;
///
/// client.send_message(&chat, InputMessage::text("Sneaky message").silent(true)).await?;
/// # Ok(())
/// # }
/// ```
///
/// [`InputMessage`]: crate::InputMessage
pub async fn send_message<C: Into<PackedChat>, M: Into<types::InputMessage>>(
&self,
chat: C,
message: M,
) -> Result<Message, InvocationError> {
let chat = chat.into();
let message = message.into();
let random_id = generate_random_id();
let updates = if let Some(media) = message.media.clone() {
self.invoke(&tl::functions::messages::SendMedia {
silent: message.silent,
background: message.background,
clear_draft: message.clear_draft,
peer: chat.to_input_peer(),
reply_to_msg_id: message.reply_to,
media,
message: message.text.clone(),
random_id,
reply_markup: message.reply_markup.clone(),
entities: if message.entities.is_empty() {
None
} else {
Some(message.entities.clone())
},
schedule_date: message.schedule_date,
})
.await
} else {
self.invoke(&tl::functions::messages::SendMessage {
no_webpage: !message.link_preview,
silent: message.silent,
background: message.background,
clear_draft: message.clear_draft,
peer: chat.to_input_peer(),
reply_to_msg_id: message.reply_to,
message: message.text.clone(),
random_id,
reply_markup: message.reply_markup.clone(),
entities: if message.entities.is_empty() {
None
} else {
Some(message.entities.clone())
},
schedule_date: message.schedule_date,
})
.await
}?;
Ok(match updates {
tl::enums::Updates::UpdateShortSentMessage(updates) => {
Message::from_short_updates(self, updates, message, chat)
}
updates => map_random_ids_to_messages(self, &[random_id], updates)
.pop()
.unwrap()
.unwrap(),
})
}
/// Edits an existing message.
///
/// Similar to [`Client::send_message`], advanced formatting can be achieved with the
/// options offered by [`InputMessage`].
///
/// See also: [`Message::edit`].
///
/// # Examples
///
/// ```
/// # async fn f(chat: grammers_client::types::Chat, mut client: grammers_client::Client) -> Result<(), Box<dyn std::error::Error>> {
/// let old_message_id = 123;
/// client.edit_message(&chat, old_message_id, "New text message").await?;
/// # Ok(())
/// # }
/// ```
///
/// [`InputMessage`]: crate::InputMessage
// TODO don't require nasty InputPeer
pub async fn edit_message<C: Into<PackedChat>, M: Into<types::InputMessage>>(
&self,
chat: C,
message_id: i32,
new_message: M,
) -> Result<(), InvocationError> {
let new_message = new_message.into();
self.invoke(&tl::functions::messages::EditMessage {
no_webpage: !new_message.link_preview,
peer: chat.into().to_input_peer(),
id: message_id,
message: Some(new_message.text),
media: new_message.media,
reply_markup: new_message.reply_markup,
entities: Some(new_message.entities),
schedule_date: new_message.schedule_date,
})
.await?;
Ok(())
}
/// Deletes up to 100 messages in a chat.
///
/// <div class="stab unstable">
///
/// **Warning**: when deleting messages from small group chats or private conversations, this
/// method cannot validate that the provided message IDs actually belong to the input chat due
/// to the way Telegram's API works. Make sure to pass correct [`Message::id`]'s.
///
/// </div>
///
/// The messages are deleted for both ends.
///
/// The amount of deleted messages is returned (it might be less than the amount of input
/// message IDs if some of them were already missing). It is not possible to find out which
/// messages were actually deleted, but if the request succeeds, none of the specified message
/// IDs will appear in the message history from that point on.
///
/// See also: [`Message::delete`].
///
/// # Examples
///
/// ```
/// # async fn f(chat: grammers_client::types::Chat, mut client: grammers_client::Client) -> Result<(), Box<dyn std::error::Error>> {
/// let message_ids = [123, 456, 789];
///
/// // Careful, these messages will be gone after the method succeeds!
/// client.delete_messages(&chat, &message_ids).await?;
/// # Ok(())
/// # }
/// ```
pub async fn delete_messages<C: Into<PackedChat>>(
&self,
chat: C,
message_ids: &[i32],
) -> Result<usize, InvocationError> {
let tl::enums::messages::AffectedMessages::Messages(affected) =
if let Some(channel) = chat.into().try_to_input_channel() {
self.invoke(&tl::functions::channels::DeleteMessages {
channel,
id: message_ids.to_vec(),
})
.await
} else {
self.invoke(&tl::functions::messages::DeleteMessages {
revoke: true,
id: message_ids.to_vec(),
})
.await
}?;
Ok(affected.pts_count as usize)
}
/// Forwards up to 100 messages from `source` into `destination`.
///
/// For consistency with other methods, the chat upon which this request acts comes first
/// (destination), and then the source chat.
///
/// Returns the new forwarded messages in a list. Those messages that could not be forwarded
/// will be `None`. The length of the resulting list is the same as the length of the input
/// message IDs, and the indices from the list of IDs map to the indices in the result so
/// you can find which messages were forwarded and which message they became.
///
/// See also: [`Message::forward_to`].
///
/// # Examples
///
/// ```
/// # async fn f(destination: grammers_client::types::Chat, source: grammers_client::types::Chat, mut client: grammers_client::Client) -> Result<(), Box<dyn std::error::Error>> {
/// let message_ids = [123, 456, 789];
///
/// let messages = client.forward_messages(&destination, &message_ids, &source).await?;
/// let fwd_count = messages.into_iter().filter(Option::is_some).count();
/// println!("Forwarded {} out of {} messages!", fwd_count, message_ids.len());
/// # Ok(())
/// # }
/// ```
pub async fn | <C: Into<PackedChat>, S: Into<PackedChat>>(
&self,
destination: C,
message_ids: &[i32],
source: S,
) -> Result<Vec<Option<Message>>, InvocationError> {
// TODO let user customize more options
let request = tl::functions::messages::ForwardMessages {
silent: false,
background: false,
with_my_score: false,
from_peer: source.into().to_input_peer(),
id: message_ids.to_vec(),
random_id: generate_random_ids(message_ids.len()),
to_peer: destination.into().to_input_peer(),
schedule_date: None,
};
let result = self.invoke(&request).await?;
Ok(map_random_ids_to_messages(self, &request.random_id, result))
}
/// Gets the [`Message`] to which the input message is replying to.
///
/// See also: [`Message::get_reply`].
///
/// # Examples
///
/// ```
/// # async fn f(message: grammers_client::types::Message, mut client: grammers_client::Client) -> Result<(), Box<dyn std::error::Error>> {
/// if let Some(reply) = client.get_reply_to_message(&message).await? {
/// println!("The reply said: {}", reply.text());
/// }
/// # Ok(())
/// # }
/// ```
pub async fn get_reply_to_message(
&self,
message: &Message,
) -> Result<Option<Message>, InvocationError> {
/// Helper method to fetch a single message by its input message.
async fn get_message(
client: &Client,
chat: PackedChat,
id: tl::enums::InputMessage,
) -> Result<(tl::enums::messages::Messages, bool), InvocationError> {
if let Some(channel) = chat.try_to_input_channel() {
client
.invoke(&tl::functions::channels::GetMessages {
id: vec![id],
channel,
})
.await
.map(|res| (res, false))
} else {
client
.invoke(&tl::functions::messages::GetMessages { id: vec![id] })
.await
.map(|res| (res, true))
}
}
// TODO shouldn't this method take in a message id anyway?
let chat = message.chat().pack();
let reply_to_message_id = match message.reply_to_message_id() {
Some(id) => id,
None => return Ok(None),
};
let input_id =
tl::enums::InputMessage::ReplyTo(tl::types::InputMessageReplyTo { id: message.msg.id });
let (res, filter_req) = match get_message(self, chat, input_id).await {
Ok(tup) => tup,
Err(_) => {
let input_id = tl::enums::InputMessage::Id(tl::types::InputMessageId {
id: reply_to_message_id,
});
get_message(self, chat, input_id).await?
}
};
use tl::enums::messages::Messages;
let (messages, users, chats) = match res {
Messages::Messages(m) => (m.messages, m.users, m.chats),
Messages::Slice(m) => (m.messages, m.users, m.chats),
Messages::ChannelMessages(m) => (m.messages, m.users, m.chats),
Messages::NotModified(_) => {
panic!("API returned Messages::NotModified even though GetMessages was used")
}
};
let chats = ChatMap::new(users, chats);
Ok(messages
.into_iter()
.flat_map(|m| Message::new(self, m, &chats))
.next()
.filter(|m| !filter_req || m.msg.peer_id == message.msg.peer_id))
}
/// Iterate over the message history of a chat, from most recent to oldest.
///
/// # Examples
///
/// ```
/// # async fn f(chat: grammers_client::types::Chat, mut client: grammers_client::Client) -> Result<(), Box<dyn std::error::Error>> {
/// // Note we're setting a reasonable limit, or we'd print out ALL the messages in chat!
/// let mut messages = client.iter_messages(&chat).limit(100);
///
/// while let Some(message) = messages.next().await? {
/// println!("{}", message.text());
/// }
/// # Ok(())
/// # }
/// ```
pub fn iter_messages<C: Into<PackedChat>>(&self, chat: C) -> MessageIter {
MessageIter::new(self, chat.into())
}
/// Iterate over the messages that match certain search criteria.
///
/// This allows you to search by text within a chat or filter by media among other things.
///
/// # Examples
///
/// ```
/// # async fn f(chat: grammers_client::types::Chat, mut client: grammers_client::Client) -> Result<(), Box<dyn std::error::Error>> {
/// // Let's print all the people who think grammers is cool.
/// let mut messages = client.search_messages(&chat).query("grammers is cool");
///
/// while let Some(message) = messages.next().await? {
/// println!("{}", message.sender().unwrap().name());
/// }
/// # Ok(())
/// # }
/// ```
pub fn search_messages<C: Into<PackedChat>>(&self, chat: C) -> SearchIter {
SearchIter::new(self, chat.into())
}
/// Iterate over the messages that match certain search criteria, without being restricted to
/// searching in a specific chat. The downside is that this global search supports less filters.
///
/// This allows you to search by text within a chat or filter by media among other things.
///
/// # Examples
///
/// ```
/// # async fn f(mut client: grammers_client::Client) -> Result<(), Box<dyn std::error::Error>> {
/// // Let's print all the chats were people think grammers is cool.
/// let mut messages = client.search_all_messages().query("grammers is cool");
///
/// while let Some(message) = messages.next().await? {
/// println!("{}", message.chat().name());
/// }
/// # Ok(())
/// # }
/// ```
pub fn search_all_messages(&self) -> GlobalSearchIter {
GlobalSearchIter::new(self)
}
/// Get up to 100 messages using their ID.
///
/// Returns the new retrieved messages in a list. Those messages that could not be retrieved
/// or do not belong to the input chat will be `None`. The length of the resulting list is the
/// same as the length of the input message IDs, and the indices from the list of IDs map to
/// the indices in the result so you can map them into the new list.
///
/// # Examples
///
/// ```
/// # async fn f(chat: grammers_client::types::Chat, mut client: grammers_client::Client) -> Result<(), Box<dyn std::error::Error>> {
/// let message_ids = [123, 456, 789];
///
/// let messages = client.get_messages_by_id(&chat, &message_ids).await?;
/// let count = messages.into_iter().filter(Option::is_some).count();
/// println!("{} out of {} messages were deleted!", message_ids.len() - count, message_ids.len());
/// # Ok(())
/// # }
/// ```
pub async fn get_messages_by_id<C: Into<PackedChat>>(
&self,
chat: C,
message_ids: &[i32],
) -> Result<Vec<Option<Message>>, InvocationError> {
let chat = chat.into();
let id = message_ids
.iter()
.map(|&id| tl::enums::InputMessage::Id(tl::types::InputMessageId { id }))
.collect();
let result = if let Some(channel) = chat.try_to_input_channel() {
self.invoke(&tl::functions::channels::GetMessages { channel, id })
.await
} else {
self.invoke(&tl::functions::messages::GetMessages { id })
.await
}?;
let (messages, users, chats) = match result {
tl::enums::messages::Messages::Messages(m) => (m.messages, m.users, m.chats),
tl::enums::messages::Messages::Slice(m) => (m.messages, m.users, m.chats),
tl::enums::messages::Messages::ChannelMessages(m) => (m.messages, m.users, m.chats),
tl::enums::messages::Messages::NotModified(_) => {
panic!("API returned Messages::NotModified even though GetMessages was used")
}
};
let chats = ChatMap::new(users, chats);
let mut map = messages
.into_iter()
.flat_map(|m| Message::new(self, m, &chats))
.filter(|m| m.chat().pack() == chat)
.map(|m| (m.msg.id, m))
.collect::<HashMap<_, _>>();
Ok(message_ids.iter().map(|id| map.remove(id)).collect())
}
/// Get the latest pin from a chat.
///
/// # Examples
///
/// ```
/// # async fn f(chat: grammers_client::types::Chat, mut client: grammers_client::Client) -> Result<(), Box<dyn std::error::Error>> {
/// if let Some(message) = client.get_pinned_message(&chat).await? {
/// println!("There is a message pinned in {}: {}", chat.name(), message.text());
/// } else {
/// println!("There are no messages pinned in {}", chat.name());
/// }
/// # Ok(())
/// # }
/// ```
pub async fn get_pinned_message<C: Into<PackedChat>>(
&self,
chat: C,
) -> Result<Option<Message>, InvocationError> {
let chat = chat.into();
// TODO return types::Message and print its text in the example
let id = vec![tl::enums::InputMessage::Pinned];
let result = if let Some(channel) = chat.try_to_input_channel() {
self.invoke(&tl::functions::channels::GetMessages { channel, id })
.await
} else {
self.invoke(&tl::functions::messages::GetMessages { id })
.await
}?;
let (messages, users, chats) = match result {
tl::enums::messages::Messages::Messages(m) => (m.messages, m.users, m.chats),
tl::enums::messages::Messages::Slice(m) => (m.messages, m.users, m.chats),
tl::enums::messages::Messages::ChannelMessages(m) => (m.messages, m.users, m.chats),
tl::enums::messages::Messages::NotModified(_) => {
panic!("API returned Messages::NotModified even though GetMessages was used")
}
};
let chats = ChatMap::new(users, chats);
Ok(messages
.into_iter()
.flat_map(|m| Message::new(self, m, &chats))
.find(|m| m.chat().pack() == chat))
}
/// Pin a message in the chat. This will not notify any users.
///
/// # Examples
///
/// ```
/// # async fn f(chat: grammers_client::types::Chat, mut client: grammers_client::Client) -> Result<(), Box<dyn std::error::Error>> {
/// let message_id = 123;
/// client.pin_message(&chat, message_id).await?;
/// # Ok(())
/// # }
/// ```
// TODO return produced Option<service message>
pub async fn pin_message<C: Into<PackedChat>>(
&self,
chat: C,
message_id: i32,
) -> Result<(), InvocationError> {
self.update_pinned(chat.into(), message_id, true).await
}
/// Unpin a message from the chat.
///
/// # Examples
///
/// ```
/// # async fn f(chat: grammers_client::types::Chat, mut client: grammers_client::Client) -> Result<(), Box<dyn std::error::Error>> {
/// let message_id = 123;
/// client.unpin_message(&chat, message_id).await?;
/// # Ok(())
/// # }
/// ```
pub async fn unpin_message<C: Into<PackedChat>>(
&self,
chat: C,
message_id: i32,
) -> Result<(), InvocationError> {
self.update_pinned(chat.into(), message_id, false).await
}
async fn update_pinned(
&self,
chat: PackedChat,
id: i32,
pin: bool,
) -> Result<(), InvocationError> {
self.invoke(&tl::functions::messages::UpdatePinnedMessage {
silent: true,
unpin: !pin,
pm_oneside: false,
peer: chat.to_input_peer(),
id,
})
.await
.map(drop)
}
/// Unpin all currently-pinned messages from the chat.
///
/// # Examples
///
/// ```
/// # async fn f(chat: grammers_client::types::Chat, mut client: grammers_client::Client) -> Result<(), Box<dyn std::error::Error>> {
/// client.unpin_all_messages(&chat).await?;
/// # Ok(())
/// # }
/// ```
pub async fn unpin_all_messages<C: Into<PackedChat>>(
&self,
chat: C,
) -> Result<(), InvocationError> {
self.invoke(&tl::functions::messages::UnpinAllMessages {
peer: chat.into().to_input_peer(),
})
.await?;
Ok(())
}
}
| forward_messages |
GetAttachDevice2.go | package netgear_client
import (
"encoding/xml"
"fmt"
"html"
)
func (client *NetgearClient) GetAttachDevice2() ([]map[string]string, error) {
const ACTION = "urn:NETGEAR-ROUTER:service:DeviceInfo:1#GetAttachDevice2"
const REQUEST = `<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<SOAP-ENV:Envelope
xmlns:SOAPSDK1="http://www.w3.org/2001/XMLSchema"
xmlns:SOAPSDK2="http://www.w3.org/2001/XMLSchema-instance"
xmlns:SOAPSDK3="http://schemas.xmlsoap.org/soap/encoding/"
xmlns:SOAP-ENV="http://schemas.xmlsoap.org/soap/envelope/">
<SOAP-ENV:Header>
<SessionID>%s</SessionID>
</SOAP-ENV:Header>
<SOAP-ENV:Body>
<M1:GetAttachDevice xsi:nil="true" />
</SOAP-ENV:Body>
</SOAP-ENV:Envelope>`
response, err := client.send_request(ACTION, fmt.Sprintf(REQUEST, client.sessionid), true)
if err != nil {
return make([]map[string]string, 0), err
}
var inside Node
err = xml.Unmarshal(response, &inside)
if err != nil |
devices := make([]map[string]string, 0)
for _, node := range inside.Nodes[0].Nodes {
infoMap := make(map[string]string)
var deviceInfo Node
err = xml.Unmarshal([]byte(node.Content), &deviceInfo)
if err != nil {
return make([]map[string]string, 0), fmt.Errorf("Failed to unmarshal response from inside SOAP body: %v", err)
}
for _, infoValue := range node.Nodes {
infoMap[infoValue.XMLName.Local] = html.UnescapeString(infoValue.Content)
}
devices = append(devices, infoMap)
}
return devices, nil
}
| {
return make([]map[string]string, 0), fmt.Errorf("Failed to unmarshal response from inside SOAP body: %v", err)
} |
helping_chef.py | try:
t=int(input(''))
while t>0:
n=int(input(''))
if n<10:
print('What an obedient servant you are!')
else:
print('-1')
t=t-1 | except Exception as e:
pass | |
base.py | import abc
import enum
import itertools
import logging
import uuid
from copy import deepcopy
from typing import Any, Dict, List, MutableMapping, Optional, Union
from ruamel.yaml import YAML
from ruamel.yaml.comments import CommentedMap
from ruamel.yaml.compat import StringIO
import great_expectations.exceptions as ge_exceptions
from great_expectations.core.util import convert_to_json_serializable, nested_update
from great_expectations.marshmallow__shade import (
INCLUDE,
Schema,
ValidationError,
fields,
post_dump,
post_load,
pre_load,
validates_schema,
)
from great_expectations.marshmallow__shade.validate import OneOf
from great_expectations.types import DictDot, SerializableDictDot
from great_expectations.types.configurations import ClassConfigSchema
yaml = YAML()
yaml.indent(mapping=2, sequence=4, offset=2)
logger = logging.getLogger(__name__)
CURRENT_GE_CONFIG_VERSION = 3
FIRST_GE_CONFIG_VERSION_WITH_CHECKPOINT_STORE = 3
CURRENT_CHECKPOINT_CONFIG_VERSION = 1
MINIMUM_SUPPORTED_CONFIG_VERSION = 2
DEFAULT_USAGE_STATISTICS_URL = (
"https://stats.greatexpectations.io/great_expectations/v1/usage_statistics"
)
def object_to_yaml_str(obj):
output_str: str
with StringIO() as string_stream:
yaml.dump(obj, string_stream)
output_str = string_stream.getvalue()
return output_str
class BaseYamlConfig(SerializableDictDot):
_config_schema_class = None
def __init__(self, commented_map: CommentedMap = None):
if commented_map is None:
commented_map = CommentedMap()
self._commented_map = commented_map
@classmethod
def _get_schema_instance(cls) -> Schema:
if not issubclass(cls.get_schema_class(), Schema):
raise ge_exceptions.InvalidConfigError(
"Invalid type: A configuration schema class needs to inherit from the Marshmallow Schema class."
)
if not issubclass(cls.get_config_class(), BaseYamlConfig):
raise ge_exceptions.InvalidConfigError(
"Invalid type: A configuration class needs to inherit from the BaseYamlConfig class."
)
if hasattr(cls.get_config_class(), "_schema_instance"):
# noinspection PyProtectedMember
schema_instance: Schema = cls.get_config_class()._schema_instance
if schema_instance is None:
cls.get_config_class()._schema_instance = (cls.get_schema_class())()
else:
return schema_instance
else:
cls.get_config_class().schema_instance = (cls.get_schema_class())()
return cls.get_config_class().schema_instance
@classmethod
def from_commented_map(cls, commented_map: CommentedMap):
try:
config: Union[dict, BaseYamlConfig]
config = cls._get_schema_instance().load(commented_map)
if isinstance(config, dict):
return cls.get_config_class()(commented_map=commented_map, **config)
return config
except ValidationError:
logger.error(
"Encountered errors during loading config. See ValidationError for more details."
)
raise
def _get_schema_validated_updated_commented_map(self) -> CommentedMap:
commented_map: CommentedMap = deepcopy(self._commented_map)
commented_map.update(self._get_schema_instance().dump(self))
return commented_map
def to_yaml(self, outfile):
"""
:returns None (but writes a YAML file containing the project configuration)
"""
yaml.dump(self.commented_map, outfile)
def to_yaml_str(self) -> str:
"""
:returns a YAML string containing the project configuration
"""
return object_to_yaml_str(self.commented_map)
def to_json_dict(self) -> dict:
"""
:returns a JSON-serialiable dict containing the project configuration
"""
commented_map: CommentedMap = self.commented_map
return convert_to_json_serializable(data=commented_map)
@property
def commented_map(self) -> CommentedMap:
return self._get_schema_validated_updated_commented_map()
@classmethod
def get_config_class(cls):
raise NotImplementedError
@classmethod
def get_schema_class(cls):
raise NotImplementedError
class AssetConfig(DictDot):
def __init__(
self,
name=None,
class_name=None,
module_name=None,
bucket=None,
prefix=None,
delimiter=None,
max_keys=None,
batch_spec_passthrough=None,
**kwargs,
):
if name is not None:
self.name = name
self._class_name = class_name
self._module_name = module_name
if bucket is not None:
self.bucket = bucket
if prefix is not None:
self.prefix = prefix
if delimiter is not None:
self.delimiter = delimiter
if max_keys is not None:
self.max_keys = max_keys
if batch_spec_passthrough is not None:
self.batch_spec_passthrough = batch_spec_passthrough
for k, v in kwargs.items():
setattr(self, k, v)
@property
def class_name(self):
return self._class_name
@property
def module_name(self):
return self._module_name
class AssetConfigSchema(Schema):
class Meta:
unknown = INCLUDE
name = fields.String(required=False, allow_none=True)
class_name = fields.String(required=False, allow_none=True, missing="Asset")
module_name = fields.String(
required=False,
all_none=True,
missing="great_expectations.datasource.data_connector.asset",
)
base_directory = fields.String(required=False, allow_none=True)
glob_directive = fields.String(required=False, allow_none=True)
pattern = fields.String(required=False, allow_none=True)
group_names = fields.List(
cls_or_instance=fields.Str(), required=False, allow_none=True
)
bucket = fields.String(required=False, allow_none=True)
prefix = fields.String(required=False, allow_none=True)
delimiter = fields.String(required=False, allow_none=True)
max_keys = fields.Integer(required=False, allow_none=True)
batch_spec_passthrough = fields.Dict(required=False, allow_none=True)
@validates_schema
def validate_schema(self, data, **kwargs):
pass
# noinspection PyUnusedLocal
@post_load
def make_asset_config(self, data, **kwargs):
return AssetConfig(**data)
class SorterConfig(DictDot):
def __init__(
self,
name,
class_name=None,
module_name=None,
orderby="asc",
reference_list=None,
datetime_format=None,
**kwargs,
):
self._name = name
self._class_name = class_name
self._module_name = module_name
self._orderby = orderby
for k, v in kwargs.items():
setattr(self, k, v)
if reference_list is not None:
self._reference_list = reference_list
if datetime_format is not None:
self._datetime_format = datetime_format
@property
def name(self):
return self._name
@property
def module_name(self):
return self._module_name
@property
def class_name(self):
|
@property
def orderby(self):
return self._orderby
@property
def reference_list(self):
return self._reference_list
@property
def datetime_format(self):
return self._datetime_format
class SorterConfigSchema(Schema):
class Meta:
unknown = INCLUDE
name = fields.String(required=True)
class_name = fields.String(required=True)
module_name = fields.String(
missing="great_expectations.datasource.data_connector.sorter"
)
orderby = fields.String(required=False, missing="asc", allow_none=False)
# allow_none = True because it is only used by some Sorters
reference_list = fields.List(
cls_or_instance=fields.Str(), required=False, missing=None, allow_none=True
)
datetime_format = fields.String(required=False, missing=None, allow_none=True)
@validates_schema
def validate_schema(self, data, **kwargs):
pass
# noinspection PyUnusedLocal
@post_load
def make_sorter_config(self, data, **kwargs):
return SorterConfig(**data)
class DataConnectorConfig(DictDot):
def __init__(
self,
class_name,
module_name=None,
credentials=None,
assets=None,
base_directory=None,
glob_directive=None,
default_regex=None,
batch_identifiers=None,
sorters=None,
batch_spec_passthrough=None,
# S3
boto3_options=None,
bucket=None,
max_keys=None,
# Azure
azure_options=None,
container=None,
name_starts_with=None,
# GCS
bucket_or_name=None,
max_results=None,
# Both S3/GCS
prefix=None,
# Both S3/Azure
delimiter=None,
**kwargs,
):
self._class_name = class_name
self._module_name = module_name
if credentials is not None:
self.credentials = credentials
if assets is not None:
self.assets = assets
if base_directory is not None:
self.base_directory = base_directory
if glob_directive is not None:
self.glob_directive = glob_directive
if default_regex is not None:
self.default_regex = default_regex
if batch_identifiers is not None:
self.batch_identifiers = batch_identifiers
if sorters is not None:
self.sorters = sorters
if batch_spec_passthrough is not None:
self.batch_spec_passthrough = batch_spec_passthrough
# S3
if boto3_options is not None:
self.boto3_options = boto3_options
if bucket is not None:
self.bucket = bucket
if max_keys is not None:
self.max_keys = max_keys
# Azure
if azure_options is not None:
self.azure_options = azure_options
if container is not None:
self.container = container
if name_starts_with is not None:
self.name_starts_with = name_starts_with
# GCS
if bucket_or_name is not None:
self.bucket_or_name = bucket_or_name
if max_results is not None:
self.max_results = max_results
# Both S3/GCS
if prefix is not None:
self.prefix = prefix
# Both S3/Azure
if delimiter is not None:
self.delimiter = delimiter
for k, v in kwargs.items():
setattr(self, k, v)
@property
def class_name(self):
return self._class_name
@property
def module_name(self):
return self._module_name
class DataConnectorConfigSchema(Schema):
class Meta:
unknown = INCLUDE
class_name = fields.String(required=True)
module_name = fields.String(missing="great_expectations.datasource.data_connector")
assets = fields.Dict(
keys=fields.Str(),
values=fields.Nested(AssetConfigSchema, required=False, allow_none=True),
required=False,
allow_none=True,
)
base_directory = fields.String(required=False, allow_none=True)
glob_directive = fields.String(required=False, allow_none=True)
sorters = fields.List(
fields.Nested(SorterConfigSchema, required=False, allow_none=True),
required=False,
allow_none=True,
)
default_regex = fields.Dict(required=False, allow_none=True)
credentials = fields.Raw(required=False, allow_none=True)
batch_identifiers = fields.List(
cls_or_instance=fields.Str(), required=False, allow_none=True
)
# S3
boto3_options = fields.Dict(
keys=fields.Str(), values=fields.Str(), required=False, allow_none=True
)
bucket = fields.String(required=False, allow_none=True)
max_keys = fields.Integer(required=False, allow_none=True)
# Azure
azure_options = fields.Dict(
keys=fields.Str(), values=fields.Str(), required=False, allow_none=True
)
container = fields.String(required=False, allow_none=True)
name_starts_with = fields.String(required=False, allow_none=True)
# GCS
gcs_options = fields.Dict(
keys=fields.Str(), values=fields.Str(), required=False, allow_none=True
)
bucket_or_name = fields.String(required=False, allow_none=True)
max_results = fields.String(required=False, allow_none=True)
# Both S3/GCS
prefix = fields.String(required=False, allow_none=True)
# Both S3/Azure
delimiter = fields.String(required=False, allow_none=True)
data_asset_name_prefix = fields.String(required=False, allow_none=True)
data_asset_name_suffix = fields.String(required=False, allow_none=True)
include_schema_name = fields.Boolean(required=False, allow_none=True)
splitter_method = fields.String(required=False, allow_none=True)
splitter_kwargs = fields.Dict(required=False, allow_none=True)
sampling_method = fields.String(required=False, allow_none=True)
sampling_kwargs = fields.Dict(required=False, allow_none=True)
excluded_tables = fields.List(
cls_or_instance=fields.Str(), required=False, allow_none=True
)
included_tables = fields.List(
cls_or_instance=fields.Str(), required=False, allow_none=True
)
skip_inapplicable_tables = fields.Boolean(required=False, allow_none=True)
batch_spec_passthrough = fields.Dict(required=False, allow_none=True)
@validates_schema
def validate_schema(self, data, **kwargs):
# If a class_name begins with the dollar sign ("$"), then it is assumed to be a variable name to be substituted.
if data["class_name"][0] == "$":
return
if ("default_regex" in data) and not (
data["class_name"]
in [
"InferredAssetFilesystemDataConnector",
"ConfiguredAssetFilesystemDataConnector",
"InferredAssetS3DataConnector",
"ConfiguredAssetS3DataConnector",
"InferredAssetAzureDataConnector",
"ConfiguredAssetAzureDataConnector",
"InferredAssetGCSDataConnector",
"ConfiguredAssetGCSDataConnector",
]
):
raise ge_exceptions.InvalidConfigError(
f"""Your current configuration uses one or more keys in a data connector that are required only by a
subclass of the FilePathDataConnector class (your data connector is "{data['class_name']}"). Please update your
configuration to continue.
"""
)
if ("glob_directive" in data) and not (
data["class_name"]
in [
"InferredAssetFilesystemDataConnector",
"ConfiguredAssetFilesystemDataConnector",
]
):
raise ge_exceptions.InvalidConfigError(
f"""Your current configuration uses one or more keys in a data connector that are required only by a
filesystem type of the data connector (your data connector is "{data['class_name']}"). Please update your
configuration to continue.
"""
)
if ("delimiter" in data) and not (
data["class_name"]
in [
"InferredAssetS3DataConnector",
"ConfiguredAssetS3DataConnector",
"InferredAssetAzureDataConnector",
"ConfiguredAssetAzureDataConnector",
]
):
raise ge_exceptions.InvalidConfigError(
f"""Your current configuration uses one or more keys in a data connector that are required only by an
S3/Azure type of the data connector (your data connector is "{data['class_name']}"). Please update your configuration to
continue.
"""
)
if ("prefix" in data) and not (
data["class_name"]
in [
"InferredAssetS3DataConnector",
"ConfiguredAssetS3DataConnector",
"InferredAssetGCSDataConnector",
"ConfiguredAssetGCSDataConnector",
]
):
raise ge_exceptions.InvalidConfigError(
f"""Your current configuration uses one or more keys in a data connector that are required only by an
S3/GCS type of the data connector (your data connector is "{data['class_name']}"). Please update your configuration to
continue.
"""
)
if ("bucket" in data or "max_keys" in data) and not (
data["class_name"]
in [
"InferredAssetS3DataConnector",
"ConfiguredAssetS3DataConnector",
]
):
raise ge_exceptions.InvalidConfigError(
f"""Your current configuration uses one or more keys in a data connector that are required only by an
S3 type of the data connector (your data connector is "{data['class_name']}"). Please update your configuration to
continue.
"""
)
if (
"azure_options" in data or "container" in data or "name_starts_with" in data
) and not (
data["class_name"]
in [
"InferredAssetAzureDataConnector",
"ConfiguredAssetAzureDataConnector",
]
):
raise ge_exceptions.InvalidConfigError(
f"""Your current configuration uses one or more keys in a data connector that are required only by an
Azure type of the data connector (your data connector is "{data['class_name']}"). Please update your configuration to
continue.
"""
)
if "azure_options" in data and data["class_name"] in [
"InferredAssetAzureDataConnector",
"ConfiguredAssetAzureDataConnector",
]:
azure_options = data["azure_options"]
if not (("conn_str" in azure_options) ^ ("account_url" in azure_options)):
raise ge_exceptions.InvalidConfigError(
f"""Your current configuration is either missing methods of authentication or is using too many for the Azure type of data connector.
You must only select one between `conn_str` or `account_url`. Please update your configuration to continue.
"""
)
if (
"gcs_options" in data or "bucket_or_name" in data or "max_results" in data
) and not (
data["class_name"]
in [
"InferredAssetGCSDataConnector",
"ConfiguredAssetGCSDataConnector",
]
):
raise ge_exceptions.InvalidConfigError(
f"""Your current configuration uses one or more keys in a data connector that are required only by a
GCS type of the data connector (your data connector is "{data['class_name']}"). Please update your configuration to
continue.
"""
)
if "gcs_options" in data and data["class_name"] in [
"InferredAssetGCSDataConnector",
"ConfiguredAssetGCSDataConnector",
]:
gcs_options = data["gcs_options"]
if "filename" in gcs_options and "info" in gcs_options:
raise ge_exceptions.InvalidConfigError(
f"""Your current configuration can only use a single method of authentication for the GCS type of data connector.
You must only select one between `filename` (from_service_account_file) and `info` (from_service_account_info). Please update your configuration to continue.
"""
)
if (
"data_asset_name_prefix" in data
or "data_asset_name_suffix" in data
or "include_schema_name" in data
or "splitter_method" in data
or "splitter_kwargs" in data
or "sampling_method" in data
or "sampling_kwargs" in data
or "excluded_tables" in data
or "included_tables" in data
or "skip_inapplicable_tables" in data
) and not (
data["class_name"]
in [
"InferredAssetSqlDataConnector",
"ConfiguredAssetSqlDataConnector",
]
):
raise ge_exceptions.InvalidConfigError(
f"""Your current configuration uses one or more keys in a data connector that are required only by an
SQL type of the data connector (your data connector is "{data['class_name']}"). Please update your configuration to
continue.
"""
)
# noinspection PyUnusedLocal
@post_load
def make_data_connector_config(self, data, **kwargs):
return DataConnectorConfig(**data)
class ExecutionEngineConfig(DictDot):
def __init__(
self,
class_name,
module_name=None,
caching=None,
batch_spec_defaults=None,
connection_string=None,
credentials=None,
spark_config=None,
boto3_options=None,
azure_options=None,
gcs_options=None,
**kwargs,
):
self._class_name = class_name
self._module_name = module_name
if caching is not None:
self.caching = caching
if batch_spec_defaults is not None:
self._batch_spec_defaults = batch_spec_defaults
if connection_string is not None:
self.connection_string = connection_string
if credentials is not None:
self.credentials = credentials
if spark_config is not None:
self.spark_config = spark_config
if boto3_options is not None:
self.boto3_options = boto3_options
if azure_options is not None:
self.azure_options = azure_options
if gcs_options is not None:
self.gcs_options = gcs_options
for k, v in kwargs.items():
setattr(self, k, v)
@property
def module_name(self):
return self._module_name
@property
def class_name(self):
return self._class_name
@property
def batch_spec_defaults(self):
return self._batch_spec_defaults
class ExecutionEngineConfigSchema(Schema):
class Meta:
unknown = INCLUDE
class_name = fields.String(required=True)
module_name = fields.String(missing="great_expectations.execution_engine")
connection_string = fields.String(required=False, allow_none=True)
credentials = fields.Raw(required=False, allow_none=True)
spark_config = fields.Raw(required=False, allow_none=True)
boto3_options = fields.Dict(
keys=fields.Str(), values=fields.Str(), required=False, allow_none=True
)
azure_options = fields.Dict(
keys=fields.Str(), values=fields.Str(), required=False, allow_none=True
)
gcs_options = fields.Dict(
keys=fields.Str(), values=fields.Str(), required=False, allow_none=True
)
caching = fields.Boolean(required=False, allow_none=True)
batch_spec_defaults = fields.Dict(required=False, allow_none=True)
@validates_schema
def validate_schema(self, data, **kwargs):
# If a class_name begins with the dollar sign ("$"), then it is assumed to be a variable name to be substituted.
if data["class_name"][0] == "$":
return
if ("connection_string" in data or "credentials" in data) and not (
data["class_name"] == "SqlAlchemyExecutionEngine"
):
raise ge_exceptions.InvalidConfigError(
f"""Your current configuration uses the "connection_string" key in an execution engine, but only
SqlAlchemyExecutionEngine requires this attribute (your execution engine is "{data['class_name']}"). Please update your
configuration to continue.
"""
)
if "spark_config" in data and not (
data["class_name"] == "SparkDFExecutionEngine"
):
raise ge_exceptions.InvalidConfigError(
f"""Your current configuration uses the "spark_config" key in an execution engine, but only
SparkDFExecutionEngine requires this attribute (your execution engine is "{data['class_name']}"). Please update your
configuration to continue.
"""
)
# noinspection PyUnusedLocal
@post_load
def make_execution_engine_config(self, data, **kwargs):
return ExecutionEngineConfig(**data)
class DatasourceConfig(DictDot):
def __init__(
self,
class_name=None,
module_name: Optional[str] = "great_expectations.datasource",
execution_engine=None,
data_connectors=None,
data_asset_type=None,
batch_kwargs_generators=None,
connection_string=None,
credentials=None,
introspection=None,
tables=None,
boto3_options=None,
azure_options=None,
gcs_options=None,
reader_method=None,
reader_options=None,
limit=None,
**kwargs,
):
# NOTE - JPC - 20200316: Currently, we are mostly inconsistent with respect to this type...
self._class_name = class_name
self._module_name = module_name
if execution_engine is not None:
self.execution_engine = execution_engine
if data_connectors is not None and isinstance(data_connectors, dict):
self.data_connectors = data_connectors
# NOTE - AJB - 20201202: This should use the datasource class build_configuration method as in DataContext.add_datasource()
if data_asset_type is None:
if class_name == "PandasDatasource":
data_asset_type = {
"class_name": "PandasDataset",
"module_name": "great_expectations.dataset",
}
elif class_name == "SqlAlchemyDatasource":
data_asset_type = {
"class_name": "SqlAlchemyDataset",
"module_name": "great_expectations.dataset",
}
elif class_name == "SparkDFDatasource":
data_asset_type = {
"class_name": "SparkDFDataset",
"module_name": "great_expectations.dataset",
}
if data_asset_type is not None:
self.data_asset_type = data_asset_type
if batch_kwargs_generators is not None:
self.batch_kwargs_generators = batch_kwargs_generators
if connection_string is not None:
self.connection_string = connection_string
if credentials is not None:
self.credentials = credentials
if introspection is not None:
self.introspection = introspection
if tables is not None:
self.tables = tables
if boto3_options is not None:
self.boto3_options = boto3_options
if azure_options is not None:
self.azure_options = azure_options
if gcs_options is not None:
self.gcs_options = gcs_options
if reader_method is not None:
self.reader_method = reader_method
if reader_options is not None:
self.reader_options = reader_options
if limit is not None:
self.limit = limit
for k, v in kwargs.items():
setattr(self, k, v)
@property
def class_name(self):
return self._class_name
@property
def module_name(self):
return self._module_name
class DatasourceConfigSchema(Schema):
class Meta:
unknown = INCLUDE
class_name = fields.String(missing="Datasource")
module_name = fields.String(missing="great_expectations.datasource")
force_reuse_spark_context = fields.Bool(required=False, allow_none=True)
spark_config = fields.Dict(
keys=fields.Str(), values=fields.Str(), required=False, allow_none=True
)
execution_engine = fields.Nested(
ExecutionEngineConfigSchema, required=False, allow_none=True
)
data_connectors = fields.Dict(
keys=fields.Str(),
values=fields.Nested(DataConnectorConfigSchema),
required=False,
allow_none=True,
)
data_asset_type = fields.Nested(ClassConfigSchema, required=False, allow_none=True)
# TODO: Update to generator-specific
# batch_kwargs_generators = fields.Mapping(keys=fields.Str(), values=fields.Nested(fields.GeneratorSchema))
batch_kwargs_generators = fields.Dict(
keys=fields.Str(), values=fields.Dict(), required=False, allow_none=True
)
connection_string = fields.String(required=False, allow_none=True)
credentials = fields.Raw(required=False, allow_none=True)
introspection = fields.Dict(required=False, allow_none=True)
tables = fields.Dict(required=False, allow_none=True)
boto3_options = fields.Dict(
keys=fields.Str(), values=fields.Str(), required=False, allow_none=True
)
azure_options = fields.Dict(
keys=fields.Str(), values=fields.Str(), required=False, allow_none=True
)
gcs_options = fields.Dict(
keys=fields.Str(), values=fields.Str(), required=False, allow_none=True
)
reader_method = fields.String(required=False, allow_none=True)
reader_options = fields.Dict(
keys=fields.Str(), values=fields.Str(), required=False, allow_none=True
)
limit = fields.Integer(required=False, allow_none=True)
@validates_schema
def validate_schema(self, data, **kwargs):
if "generators" in data:
raise ge_exceptions.InvalidConfigError(
'Your current configuration uses the "generators" key in a datasource, but in version 0.10 of '
'GE that key is renamed to "batch_kwargs_generators". Please update your configuration to continue.'
)
# If a class_name begins with the dollar sign ("$"), then it is assumed to be a variable name to be substituted.
if data["class_name"][0] == "$":
return
if (
"connection_string" in data
or "credentials" in data
or "introspection" in data
or "tables" in data
) and not (
data["class_name"]
in [
"SqlAlchemyDatasource",
"SimpleSqlalchemyDatasource",
]
):
raise ge_exceptions.InvalidConfigError(
f"""Your current configuration uses one or more keys in a data source that are required only by a
sqlalchemy data source (your data source is "{data['class_name']}"). Please update your configuration to continue.
"""
)
# noinspection PyUnusedLocal
@post_load
def make_datasource_config(self, data, **kwargs):
return DatasourceConfig(**data)
class AnonymizedUsageStatisticsConfig(DictDot):
def __init__(self, enabled=True, data_context_id=None, usage_statistics_url=None):
self._enabled = enabled
if data_context_id is None:
data_context_id = str(uuid.uuid4())
self._explicit_id = False
else:
self._explicit_id = True
self._data_context_id = data_context_id
if usage_statistics_url is None:
usage_statistics_url = DEFAULT_USAGE_STATISTICS_URL
self._explicit_url = False
else:
self._explicit_url = True
self._usage_statistics_url = usage_statistics_url
@property
def enabled(self):
return self._enabled
@enabled.setter
def enabled(self, enabled):
if not isinstance(enabled, bool):
raise ValueError("usage statistics enabled property must be boolean")
self._enabled = enabled
@property
def data_context_id(self):
return self._data_context_id
@data_context_id.setter
def data_context_id(self, data_context_id):
try:
uuid.UUID(data_context_id)
except ValueError:
raise ge_exceptions.InvalidConfigError(
"data_context_id must be a valid uuid"
)
self._data_context_id = data_context_id
self._explicit_id = True
@property
def explicit_id(self):
return self._explicit_id
@property
def usage_statistics_url(self):
return self._usage_statistics_url
@usage_statistics_url.setter
def usage_statistics_url(self, usage_statistics_url):
self._usage_statistics_url = usage_statistics_url
self._explicit_url = True
class AnonymizedUsageStatisticsConfigSchema(Schema):
data_context_id = fields.UUID()
enabled = fields.Boolean(default=True)
usage_statistics_url = fields.URL(allow_none=True)
_explicit_url = fields.Boolean(required=False)
# noinspection PyUnusedLocal
@post_load()
def make_usage_statistics_config(self, data, **kwargs):
if "data_context_id" in data:
data["data_context_id"] = str(data["data_context_id"])
return AnonymizedUsageStatisticsConfig(**data)
# noinspection PyUnusedLocal
@post_dump()
def filter_implicit(self, data, **kwargs):
if not data.get("_explicit_url") and "usage_statistics_url" in data:
del data["usage_statistics_url"]
if "_explicit_url" in data:
del data["_explicit_url"]
return data
class NotebookTemplateConfig(DictDot):
def __init__(self, file_name, template_kwargs=None):
self.file_name = file_name
if template_kwargs:
self.template_kwargs = template_kwargs
else:
self.template_kwargs = {}
class NotebookTemplateConfigSchema(Schema):
file_name = fields.String()
template_kwargs = fields.Dict(
keys=fields.Str(), values=fields.Str(), allow_none=True
)
# noinspection PyUnusedLocal
@post_load
def make_notebook_template_config(self, data, **kwargs):
return NotebookTemplateConfig(**data)
class NotebookConfig(DictDot):
def __init__(
self,
class_name,
module_name,
custom_templates_module,
header_markdown=None,
footer_markdown=None,
table_expectations_header_markdown=None,
column_expectations_header_markdown=None,
table_expectations_not_found_markdown=None,
column_expectations_not_found_markdown=None,
authoring_intro_markdown=None,
column_expectations_markdown=None,
header_code=None,
footer_code=None,
table_expectation_code=None,
column_expectation_code=None,
):
self.class_name = class_name
self.module_name = module_name
self.custom_templates_module = custom_templates_module
self.header_markdown = header_markdown
self.footer_markdown = footer_markdown
self.table_expectations_header_markdown = table_expectations_header_markdown
self.column_expectations_header_markdown = column_expectations_header_markdown
self.table_expectations_not_found_markdown = (
table_expectations_not_found_markdown
)
self.column_expectations_not_found_markdown = (
column_expectations_not_found_markdown
)
self.authoring_intro_markdown = authoring_intro_markdown
self.column_expectations_markdown = column_expectations_markdown
self.header_code = header_code
self.footer_code = footer_code
self.table_expectation_code = table_expectation_code
self.column_expectation_code = column_expectation_code
class NotebookConfigSchema(Schema):
class_name = fields.String(missing="SuiteEditNotebookRenderer")
module_name = fields.String(
missing="great_expectations.render.renderer.suite_edit_notebook_renderer"
)
custom_templates_module = fields.String()
header_markdown = fields.Nested(NotebookTemplateConfigSchema, allow_none=True)
footer_markdown = fields.Nested(NotebookTemplateConfigSchema, allow_none=True)
table_expectations_header_markdown = fields.Nested(
NotebookTemplateConfigSchema, allow_none=True
)
column_expectations_header_markdown = fields.Nested(
NotebookTemplateConfigSchema, allow_none=True
)
table_expectations_not_found_markdown = fields.Nested(
NotebookTemplateConfigSchema, allow_none=True
)
column_expectations_not_found_markdown = fields.Nested(
NotebookTemplateConfigSchema, allow_none=True
)
authoring_intro_markdown = fields.Nested(
NotebookTemplateConfigSchema, allow_none=True
)
column_expectations_markdown = fields.Nested(
NotebookTemplateConfigSchema, allow_none=True
)
header_code = fields.Nested(NotebookTemplateConfigSchema, allow_none=True)
footer_code = fields.Nested(NotebookTemplateConfigSchema, allow_none=True)
table_expectation_code = fields.Nested(
NotebookTemplateConfigSchema, allow_none=True
)
column_expectation_code = fields.Nested(
NotebookTemplateConfigSchema, allow_none=True
)
# noinspection PyUnusedLocal
@post_load
def make_notebook_config(self, data, **kwargs):
return NotebookConfig(**data)
class NotebooksConfig(DictDot):
def __init__(self, suite_edit):
self.suite_edit = suite_edit
class NotebooksConfigSchema(Schema):
# for now only suite_edit, could have other customization options for
# notebooks in the future
suite_edit = fields.Nested(NotebookConfigSchema)
# noinspection PyUnusedLocal
@post_load
def make_notebooks_config(self, data, **kwargs):
return NotebooksConfig(**data)
class ConcurrencyConfig(DictDot):
"""WARNING: This class is experimental."""
def __init__(self, enabled: Optional[bool] = False):
"""Initialize a concurrency configuration to control multithreaded execution.
Args:
enabled: Whether or not multithreading is enabled.
"""
self._enabled = enabled
@property
def enabled(self):
"""Whether or not multithreading is enabled."""
return self._enabled
@property
def max_database_query_concurrency(self) -> int:
"""Max number of concurrent database queries to execute with mulithreading."""
# BigQuery has a limit of 100 for "Concurrent rate limit for interactive queries" as described at
# https://cloud.google.com/bigquery/quotas#query_jobs). If necessary, this can later be tuned for other
# databases and/or be manually user configurable.
return 100
def add_sqlalchemy_create_engine_parameters(
self, parameters: MutableMapping[str, Any]
):
"""Update SqlAlchemy parameters to prevent concurrency errors (e.g. http://sqlalche.me/e/14/3o7r) and
bottlenecks.
Args:
parameters: SqlAlchemy create_engine parameters to which we add concurrency appropriate parameters. If the
concurrency parameters are already set, those parameters are left unchanged.
"""
if not self._enabled:
return
if "pool_size" not in parameters:
# https://docs.sqlalchemy.org/en/14/core/engines.html#sqlalchemy.create_engine.params.pool_size
parameters["pool_size"] = 0
if "max_overflow" not in parameters:
# https://docs.sqlalchemy.org/en/14/core/engines.html#sqlalchemy.create_engine.params.max_overflow
parameters["max_overflow"] = -1
class ConcurrencyConfigSchema(Schema):
"""WARNING: This class is experimental."""
enabled = fields.Boolean(default=False)
class GeCloudConfig(DictDot):
def __init__(self, base_url: str, account_id: str, access_token: str):
self.base_url = base_url
self.account_id = account_id
self.access_token = access_token
def to_json_dict(self):
return {
"base_url": self.base_url,
"account_id": self.account_id,
"access_token": self.access_token,
}
class DataContextConfigSchema(Schema):
config_version = fields.Number(
validate=lambda x: 0 < x < 100,
error_messages={"invalid": "config version must " "be a number."},
)
datasources = fields.Dict(
keys=fields.Str(),
values=fields.Nested(DatasourceConfigSchema),
required=False,
allow_none=True,
)
expectations_store_name = fields.Str()
validations_store_name = fields.Str()
evaluation_parameter_store_name = fields.Str()
checkpoint_store_name = fields.Str(required=False, allow_none=True)
plugins_directory = fields.Str(allow_none=True)
validation_operators = fields.Dict(
keys=fields.Str(), values=fields.Dict(), required=False, allow_none=True
)
stores = fields.Dict(keys=fields.Str(), values=fields.Dict())
notebooks = fields.Nested(NotebooksConfigSchema, allow_none=True)
data_docs_sites = fields.Dict(
keys=fields.Str(), values=fields.Dict(), allow_none=True
)
config_variables_file_path = fields.Str(allow_none=True)
anonymous_usage_statistics = fields.Nested(AnonymizedUsageStatisticsConfigSchema)
concurrency = fields.Nested(ConcurrencyConfigSchema)
# noinspection PyMethodMayBeStatic
# noinspection PyUnusedLocal
def handle_error(self, exc, data, **kwargs):
"""Log and raise our custom exception when (de)serialization fails."""
if (
exc
and exc.messages
and isinstance(exc.messages, dict)
and all([key is None for key in exc.messages.keys()])
):
exc.messages = list(itertools.chain.from_iterable(exc.messages.values()))
message: str = (
f"Error while processing DataContextConfig: {' '.join(exc.messages)}"
)
logger.error(message)
raise ge_exceptions.InvalidDataContextConfigError(
message=message,
)
@validates_schema
def validate_schema(self, data, **kwargs):
if "config_version" not in data:
raise ge_exceptions.InvalidDataContextConfigError(
"The key `config_version` is missing; please check your config file.",
validation_error=ValidationError(message="no config_version key"),
)
if not isinstance(data["config_version"], (int, float)):
raise ge_exceptions.InvalidDataContextConfigError(
"The key `config_version` must be a number. Please check your config file.",
validation_error=ValidationError(message="config version not a number"),
)
# When migrating from 0.7.x to 0.8.0
if data["config_version"] == 0 and any(
[
store_config["class_name"] == "ValidationsStore"
for store_config in data["stores"].values()
]
):
raise ge_exceptions.UnsupportedConfigVersionError(
"You appear to be using a config version from the 0.7.x series. This version is no longer supported."
)
if data["config_version"] < MINIMUM_SUPPORTED_CONFIG_VERSION:
raise ge_exceptions.UnsupportedConfigVersionError(
"You appear to have an invalid config version ({}).\n The version number must be at least {}. "
"Please see the migration guide at https://docs.greatexpectations.io/en/latest/guides/how_to_guides/migrating_versions.html".format(
data["config_version"], MINIMUM_SUPPORTED_CONFIG_VERSION
),
)
if data["config_version"] > CURRENT_GE_CONFIG_VERSION:
raise ge_exceptions.InvalidDataContextConfigError(
"You appear to have an invalid config version ({}).\n The maximum valid version is {}.".format(
data["config_version"], CURRENT_GE_CONFIG_VERSION
),
validation_error=ValidationError(message="config version too high"),
)
if data["config_version"] < CURRENT_GE_CONFIG_VERSION and (
"checkpoint_store_name" in data
or any(
[
store_config["class_name"] == "CheckpointStore"
for store_config in data["stores"].values()
]
)
):
raise ge_exceptions.InvalidDataContextConfigError(
"You appear to be using a Checkpoint store with an invalid config version ({}).\n Your data context with this older configuration version specifies a Checkpoint store, which is a new feature. Please update your configuration to the new version number {} before adding a Checkpoint store.\n Visit https://docs.greatexpectations.io/en/latest/how_to_guides/migrating_versions.html to learn more about the upgrade process.".format(
data["config_version"], float(CURRENT_GE_CONFIG_VERSION)
),
validation_error=ValidationError(
message="You appear to be using a Checkpoint store with an invalid config version ({}).\n Your data context with this older configuration version specifies a Checkpoint store, which is a new feature. Please update your configuration to the new version number {} before adding a Checkpoint store.\n Visit https://docs.greatexpectations.io/en/latest/how_to_guides/migrating_versions.html to learn more about the upgrade process.".format(
data["config_version"], float(CURRENT_GE_CONFIG_VERSION)
)
),
)
if (
data["config_version"] >= FIRST_GE_CONFIG_VERSION_WITH_CHECKPOINT_STORE
and "validation_operators" in data
and data["validation_operators"] is not None
):
# TODO: <Alex>Add a URL to the migration guide with instructions for how to replace validation_operators with appropriate actions.</Alex>
logger.warning(
"You appear to be using a legacy capability with the latest config version ({}).\n Your data context with this configuration version uses validation_operators, which are being deprecated. Please update your configuration to be compatible with the version number {}.".format(
data["config_version"], CURRENT_GE_CONFIG_VERSION
),
)
class DataContextConfigDefaults(enum.Enum):
DEFAULT_CONFIG_VERSION = CURRENT_GE_CONFIG_VERSION
DEFAULT_EXPECTATIONS_STORE_NAME = "expectations_store"
EXPECTATIONS_BASE_DIRECTORY = "expectations"
DEFAULT_EXPECTATIONS_STORE_BASE_DIRECTORY_RELATIVE_NAME = (
f"{EXPECTATIONS_BASE_DIRECTORY}/"
)
DEFAULT_VALIDATIONS_STORE_NAME = "validations_store"
VALIDATIONS_BASE_DIRECTORY = "validations"
DEFAULT_VALIDATIONS_STORE_BASE_DIRECTORY_RELATIVE_NAME = (
f"uncommitted/{VALIDATIONS_BASE_DIRECTORY}/"
)
DEFAULT_EVALUATION_PARAMETER_STORE_NAME = "evaluation_parameter_store"
DEFAULT_EVALUATION_PARAMETER_STORE_BASE_DIRECTORY_RELATIVE_NAME = (
"evaluation_parameters/"
)
DEFAULT_CHECKPOINT_STORE_NAME = "checkpoint_store"
CHECKPOINTS_BASE_DIRECTORY = "checkpoints"
DEFAULT_CHECKPOINT_STORE_BASE_DIRECTORY_RELATIVE_NAME = (
f"{CHECKPOINTS_BASE_DIRECTORY}/"
)
DEFAULT_DATA_DOCS_SITE_NAME = "local_site"
DEFAULT_CONFIG_VARIABLES_FILEPATH = "uncommitted/config_variables.yml"
PLUGINS_BASE_DIRECTORY = "plugins"
DEFAULT_PLUGINS_DIRECTORY = f"{PLUGINS_BASE_DIRECTORY}/"
NOTEBOOKS_BASE_DIRECTORY = "notebooks"
DEFAULT_VALIDATION_OPERATORS = {
"action_list_operator": {
"class_name": "ActionListValidationOperator",
"action_list": [
{
"name": "store_validation_result",
"action": {"class_name": "StoreValidationResultAction"},
},
{
"name": "store_evaluation_params",
"action": {"class_name": "StoreEvaluationParametersAction"},
},
{
"name": "update_data_docs",
"action": {"class_name": "UpdateDataDocsAction"},
},
],
}
}
DEFAULT_STORES = {
DEFAULT_EXPECTATIONS_STORE_NAME: {
"class_name": "ExpectationsStore",
"store_backend": {
"class_name": "TupleFilesystemStoreBackend",
"base_directory": DEFAULT_EXPECTATIONS_STORE_BASE_DIRECTORY_RELATIVE_NAME,
},
},
DEFAULT_VALIDATIONS_STORE_NAME: {
"class_name": "ValidationsStore",
"store_backend": {
"class_name": "TupleFilesystemStoreBackend",
"base_directory": DEFAULT_VALIDATIONS_STORE_BASE_DIRECTORY_RELATIVE_NAME,
},
},
DEFAULT_EVALUATION_PARAMETER_STORE_NAME: {
"class_name": "EvaluationParameterStore"
},
DEFAULT_CHECKPOINT_STORE_NAME: {
"class_name": "CheckpointStore",
"store_backend": {
"class_name": "TupleFilesystemStoreBackend",
"suppress_store_backend_id": True,
"base_directory": DEFAULT_CHECKPOINT_STORE_BASE_DIRECTORY_RELATIVE_NAME,
},
},
}
DEFAULT_DATA_DOCS_SITES = {
DEFAULT_DATA_DOCS_SITE_NAME: {
"class_name": "SiteBuilder",
"show_how_to_buttons": True,
"store_backend": {
"class_name": "TupleFilesystemStoreBackend",
"base_directory": "uncommitted/data_docs/local_site/",
},
"site_index_builder": {
"class_name": "DefaultSiteIndexBuilder",
},
}
}
class CheckpointConfigDefaults(enum.Enum):
DEFAULT_CONFIG_VERSION = CURRENT_CHECKPOINT_CONFIG_VERSION
class BaseStoreBackendDefaults(DictDot):
"""
Define base defaults for platform specific StoreBackendDefaults.
StoreBackendDefaults define defaults for specific cases of often used configurations.
For example, if you plan to store expectations, validations, and data_docs in s3 use the S3StoreBackendDefaults and you may be able to specify less parameters.
"""
def __init__(
self,
expectations_store_name: str = DataContextConfigDefaults.DEFAULT_EXPECTATIONS_STORE_NAME.value,
validations_store_name: str = DataContextConfigDefaults.DEFAULT_VALIDATIONS_STORE_NAME.value,
evaluation_parameter_store_name: str = DataContextConfigDefaults.DEFAULT_EVALUATION_PARAMETER_STORE_NAME.value,
checkpoint_store_name: str = DataContextConfigDefaults.DEFAULT_CHECKPOINT_STORE_NAME.value,
data_docs_site_name: str = DataContextConfigDefaults.DEFAULT_DATA_DOCS_SITE_NAME.value,
validation_operators: dict = None,
stores: dict = None,
data_docs_sites: dict = None,
):
self.expectations_store_name = expectations_store_name
self.validations_store_name = validations_store_name
self.evaluation_parameter_store_name = evaluation_parameter_store_name
self.checkpoint_store_name = checkpoint_store_name
self.validation_operators = validation_operators
if stores is None:
stores = deepcopy(DataContextConfigDefaults.DEFAULT_STORES.value)
self.stores = stores
if data_docs_sites is None:
data_docs_sites = deepcopy(
DataContextConfigDefaults.DEFAULT_DATA_DOCS_SITES.value
)
self.data_docs_sites = data_docs_sites
self.data_docs_site_name = data_docs_site_name
class S3StoreBackendDefaults(BaseStoreBackendDefaults):
"""
Default store configs for s3 backends, with some accessible parameters
Args:
default_bucket_name: Use this bucket name for stores that do not have a bucket name provided
expectations_store_bucket_name: Overrides default_bucket_name if supplied
validations_store_bucket_name: Overrides default_bucket_name if supplied
data_docs_bucket_name: Overrides default_bucket_name if supplied
checkpoint_store_bucket_name: Overrides default_bucket_name if supplied
expectations_store_prefix: Overrides default if supplied
validations_store_prefix: Overrides default if supplied
data_docs_prefix: Overrides default if supplied
checkpoint_store_prefix: Overrides default if supplied
expectations_store_name: Overrides default if supplied
validations_store_name: Overrides default if supplied
evaluation_parameter_store_name: Overrides default if supplied
checkpoint_store_name: Overrides default if supplied
"""
def __init__(
self,
default_bucket_name: Optional[str] = None,
expectations_store_bucket_name: Optional[str] = None,
validations_store_bucket_name: Optional[str] = None,
data_docs_bucket_name: Optional[str] = None,
checkpoint_store_bucket_name: Optional[str] = None,
expectations_store_prefix: str = "expectations",
validations_store_prefix: str = "validations",
data_docs_prefix: str = "data_docs",
checkpoint_store_prefix: str = "checkpoints",
expectations_store_name: str = "expectations_S3_store",
validations_store_name: str = "validations_S3_store",
evaluation_parameter_store_name: str = "evaluation_parameter_store",
checkpoint_store_name: str = "checkpoint_S3_store",
):
# Initialize base defaults
super().__init__()
# Use default_bucket_name if separate store buckets are not provided
if expectations_store_bucket_name is None:
expectations_store_bucket_name = default_bucket_name
if validations_store_bucket_name is None:
validations_store_bucket_name = default_bucket_name
if data_docs_bucket_name is None:
data_docs_bucket_name = default_bucket_name
if checkpoint_store_bucket_name is None:
checkpoint_store_bucket_name = default_bucket_name
# Overwrite defaults
self.expectations_store_name = expectations_store_name
self.validations_store_name = validations_store_name
self.evaluation_parameter_store_name = evaluation_parameter_store_name
self.checkpoint_store_name = checkpoint_store_name
self.stores = {
expectations_store_name: {
"class_name": "ExpectationsStore",
"store_backend": {
"class_name": "TupleS3StoreBackend",
"bucket": expectations_store_bucket_name,
"prefix": expectations_store_prefix,
},
},
validations_store_name: {
"class_name": "ValidationsStore",
"store_backend": {
"class_name": "TupleS3StoreBackend",
"bucket": validations_store_bucket_name,
"prefix": validations_store_prefix,
},
},
evaluation_parameter_store_name: {"class_name": "EvaluationParameterStore"},
checkpoint_store_name: {
"class_name": "CheckpointStore",
"store_backend": {
"class_name": "TupleS3StoreBackend",
"bucket": checkpoint_store_bucket_name,
"prefix": checkpoint_store_prefix,
},
},
}
self.data_docs_sites = {
"s3_site": {
"class_name": "SiteBuilder",
"show_how_to_buttons": True,
"store_backend": {
"class_name": "TupleS3StoreBackend",
"bucket": data_docs_bucket_name,
"prefix": data_docs_prefix,
},
"site_index_builder": {
"class_name": "DefaultSiteIndexBuilder",
},
}
}
class FilesystemStoreBackendDefaults(BaseStoreBackendDefaults):
"""
Default store configs for filesystem backends, with some accessible parameters
Args:
root_directory: Absolute directory prepended to the base_directory for each store
plugins_directory: Overrides default if supplied
"""
def __init__(
self,
root_directory: Optional[str] = None,
plugins_directory: Optional[str] = None,
):
# Initialize base defaults
super().__init__()
if plugins_directory is None:
plugins_directory = (
DataContextConfigDefaults.DEFAULT_PLUGINS_DIRECTORY.value
)
self.plugins_directory = plugins_directory
if root_directory is not None:
self.stores[self.expectations_store_name]["store_backend"][
"root_directory"
] = root_directory
self.stores[self.validations_store_name]["store_backend"][
"root_directory"
] = root_directory
self.stores[self.checkpoint_store_name]["store_backend"][
"root_directory"
] = root_directory
self.data_docs_sites[self.data_docs_site_name]["store_backend"][
"root_directory"
] = root_directory
class InMemoryStoreBackendDefaults(BaseStoreBackendDefaults):
"""
Default store configs for in memory backends.
This is useful for testing without persistence.
"""
def __init__(
self,
):
# Initialize base defaults
super().__init__()
self.stores = {
self.expectations_store_name: {
"class_name": "ExpectationsStore",
"store_backend": {
"class_name": "InMemoryStoreBackend",
},
},
self.validations_store_name: {
"class_name": "ValidationsStore",
"store_backend": {
"class_name": "InMemoryStoreBackend",
},
},
self.evaluation_parameter_store_name: {
"class_name": "EvaluationParameterStore"
},
self.checkpoint_store_name: {
"class_name": "CheckpointStore",
"store_backend": {
"class_name": "InMemoryStoreBackend",
},
},
}
self.data_docs_sites = {}
class GCSStoreBackendDefaults(BaseStoreBackendDefaults):
"""
Default store configs for Google Cloud Storage (GCS) backends, with some accessible parameters
Args:
default_bucket_name: Use this bucket name for stores that do not have a bucket name provided
default_project_name: Use this project name for stores that do not have a project name provided
expectations_store_bucket_name: Overrides default_bucket_name if supplied
validations_store_bucket_name: Overrides default_bucket_name if supplied
data_docs_bucket_name: Overrides default_bucket_name if supplied
checkpoint_store_bucket_name: Overrides default_bucket_name if supplied
expectations_store_project_name: Overrides default_project_name if supplied
validations_store_project_name: Overrides default_project_name if supplied
data_docs_project_name: Overrides default_project_name if supplied
checkpoint_store_project_name: Overrides default_project_name if supplied
expectations_store_prefix: Overrides default if supplied
validations_store_prefix: Overrides default if supplied
data_docs_prefix: Overrides default if supplied
checkpoint_store_prefix: Overrides default if supplied
expectations_store_name: Overrides default if supplied
validations_store_name: Overrides default if supplied
evaluation_parameter_store_name: Overrides default if supplied
checkpoint_store_name: Overrides default if supplied
"""
def __init__(
self,
default_bucket_name: Optional[str] = None,
default_project_name: Optional[str] = None,
expectations_store_bucket_name: Optional[str] = None,
validations_store_bucket_name: Optional[str] = None,
data_docs_bucket_name: Optional[str] = None,
checkpoint_store_bucket_name: Optional[str] = None,
expectations_store_project_name: Optional[str] = None,
validations_store_project_name: Optional[str] = None,
data_docs_project_name: Optional[str] = None,
checkpoint_store_project_name: Optional[str] = None,
expectations_store_prefix: str = "expectations",
validations_store_prefix: str = "validations",
data_docs_prefix: str = "data_docs",
checkpoint_store_prefix: str = "checkpoints",
expectations_store_name: str = "expectations_GCS_store",
validations_store_name: str = "validations_GCS_store",
evaluation_parameter_store_name: str = "evaluation_parameter_store",
checkpoint_store_name: str = "checkpoint_GCS_store",
):
# Initialize base defaults
super().__init__()
# Use default_bucket_name if separate store buckets are not provided
if expectations_store_bucket_name is None:
expectations_store_bucket_name = default_bucket_name
if validations_store_bucket_name is None:
validations_store_bucket_name = default_bucket_name
if data_docs_bucket_name is None:
data_docs_bucket_name = default_bucket_name
if checkpoint_store_bucket_name is None:
checkpoint_store_bucket_name = default_bucket_name
# Use default_project_name if separate store projects are not provided
if expectations_store_project_name is None:
expectations_store_project_name = default_project_name
if validations_store_project_name is None:
validations_store_project_name = default_project_name
if data_docs_project_name is None:
data_docs_project_name = default_project_name
if checkpoint_store_project_name is None:
checkpoint_store_project_name = default_project_name
# Overwrite defaults
self.expectations_store_name = expectations_store_name
self.validations_store_name = validations_store_name
self.evaluation_parameter_store_name = evaluation_parameter_store_name
self.checkpoint_store_name = checkpoint_store_name
self.stores = {
expectations_store_name: {
"class_name": "ExpectationsStore",
"store_backend": {
"class_name": "TupleGCSStoreBackend",
"project": expectations_store_project_name,
"bucket": expectations_store_bucket_name,
"prefix": expectations_store_prefix,
},
},
validations_store_name: {
"class_name": "ValidationsStore",
"store_backend": {
"class_name": "TupleGCSStoreBackend",
"project": validations_store_project_name,
"bucket": validations_store_bucket_name,
"prefix": validations_store_prefix,
},
},
evaluation_parameter_store_name: {"class_name": "EvaluationParameterStore"},
checkpoint_store_name: {
"class_name": "CheckpointStore",
"store_backend": {
"class_name": "TupleGCSStoreBackend",
"project": checkpoint_store_project_name,
"bucket": checkpoint_store_bucket_name,
"prefix": checkpoint_store_prefix,
},
},
}
self.data_docs_sites = {
"gcs_site": {
"class_name": "SiteBuilder",
"show_how_to_buttons": True,
"store_backend": {
"class_name": "TupleGCSStoreBackend",
"project": data_docs_project_name,
"bucket": data_docs_bucket_name,
"prefix": data_docs_prefix,
},
"site_index_builder": {
"class_name": "DefaultSiteIndexBuilder",
},
}
}
class DatabaseStoreBackendDefaults(BaseStoreBackendDefaults):
"""
Default store configs for database backends, with some accessible parameters
Args:
default_credentials: Use these credentials for all stores that do not have credentials provided
expectations_store_credentials: Overrides default_credentials if supplied
validations_store_credentials: Overrides default_credentials if supplied
checkpoint_store_credentials: Overrides default_credentials if supplied
expectations_store_name: Overrides default if supplied
validations_store_name: Overrides default if supplied
evaluation_parameter_store_name: Overrides default if supplied
checkpoint_store_name: Overrides default if supplied
"""
def __init__(
self,
default_credentials: Optional[Dict] = None,
expectations_store_credentials: Optional[Dict] = None,
validations_store_credentials: Optional[Dict] = None,
checkpoint_store_credentials: Optional[Dict] = None,
expectations_store_name: str = "expectations_database_store",
validations_store_name: str = "validations_database_store",
evaluation_parameter_store_name: str = "evaluation_parameter_store",
checkpoint_store_name: str = "checkpoint_database_store",
):
# Initialize base defaults
super().__init__()
# Use default credentials if separate credentials not supplied for expectations_store and validations_store
if expectations_store_credentials is None:
expectations_store_credentials = default_credentials
if validations_store_credentials is None:
validations_store_credentials = default_credentials
if checkpoint_store_credentials is None:
checkpoint_store_credentials = default_credentials
# Overwrite defaults
self.expectations_store_name = expectations_store_name
self.validations_store_name = validations_store_name
self.evaluation_parameter_store_name = evaluation_parameter_store_name
self.checkpoint_store_name = checkpoint_store_name
self.stores = {
expectations_store_name: {
"class_name": "ExpectationsStore",
"store_backend": {
"class_name": "DatabaseStoreBackend",
"credentials": expectations_store_credentials,
},
},
validations_store_name: {
"class_name": "ValidationsStore",
"store_backend": {
"class_name": "DatabaseStoreBackend",
"credentials": validations_store_credentials,
},
},
evaluation_parameter_store_name: {"class_name": "EvaluationParameterStore"},
checkpoint_store_name: {
"class_name": "CheckpointStore",
"store_backend": {
"class_name": "DatabaseStoreBackend",
"credentials": checkpoint_store_credentials,
},
},
}
class DataContextConfig(BaseYamlConfig):
# TODO: <Alex>ALEX (does not work yet)</Alex>
# _config_schema_class = DataContextConfigSchema
def __init__(
self,
config_version: Optional[float] = None,
datasources: Optional[
Union[
Dict[str, DatasourceConfig],
Dict[str, Dict[str, Union[Dict[str, str], str, dict]]],
]
] = None,
expectations_store_name: Optional[str] = None,
validations_store_name: Optional[str] = None,
evaluation_parameter_store_name: Optional[str] = None,
checkpoint_store_name: Optional[str] = None,
plugins_directory: Optional[str] = None,
validation_operators=None,
stores: Optional[Dict] = None,
data_docs_sites: Optional[Dict] = None,
notebooks=None,
config_variables_file_path: Optional[str] = None,
anonymous_usage_statistics=None,
store_backend_defaults: Optional[BaseStoreBackendDefaults] = None,
commented_map: Optional[CommentedMap] = None,
concurrency: Optional[Union[ConcurrencyConfig, Dict]] = None,
):
# Set defaults
if config_version is None:
config_version = DataContextConfigDefaults.DEFAULT_CONFIG_VERSION.value
# Set defaults via store_backend_defaults if one is passed in
# Override attributes from store_backend_defaults with any items passed into the constructor:
if store_backend_defaults is not None:
if stores is None:
stores = store_backend_defaults.stores
if expectations_store_name is None:
expectations_store_name = store_backend_defaults.expectations_store_name
if validations_store_name is None:
validations_store_name = store_backend_defaults.validations_store_name
if evaluation_parameter_store_name is None:
evaluation_parameter_store_name = (
store_backend_defaults.evaluation_parameter_store_name
)
if data_docs_sites is None:
data_docs_sites = store_backend_defaults.data_docs_sites
if checkpoint_store_name is None:
checkpoint_store_name = store_backend_defaults.checkpoint_store_name
self._config_version = config_version
if datasources is None:
datasources = {}
self.datasources = datasources
self.expectations_store_name = expectations_store_name
self.validations_store_name = validations_store_name
self.evaluation_parameter_store_name = evaluation_parameter_store_name
if checkpoint_store_name is not None:
self.checkpoint_store_name = checkpoint_store_name
self.plugins_directory = plugins_directory
if validation_operators is not None:
self.validation_operators = validation_operators
self.stores = stores
self.notebooks = notebooks
self.data_docs_sites = data_docs_sites
self.config_variables_file_path = config_variables_file_path
if anonymous_usage_statistics is None:
anonymous_usage_statistics = AnonymizedUsageStatisticsConfig()
elif isinstance(anonymous_usage_statistics, dict):
anonymous_usage_statistics = AnonymizedUsageStatisticsConfig(
**anonymous_usage_statistics
)
self.anonymous_usage_statistics = anonymous_usage_statistics
if concurrency is None:
concurrency = ConcurrencyConfig()
elif isinstance(concurrency, dict):
concurrency = ConcurrencyConfig(**concurrency)
self.concurrency: ConcurrencyConfig = concurrency
super().__init__(commented_map=commented_map)
# TODO: <Alex>ALEX (we still need the next two properties)</Alex>
@classmethod
def get_config_class(cls):
return cls # DataContextConfig
@classmethod
def get_schema_class(cls):
return DataContextConfigSchema
@property
def config_version(self):
return self._config_version
class CheckpointConfigSchema(Schema):
class Meta:
unknown = INCLUDE
fields = (
"name",
"config_version",
"template_name",
"module_name",
"class_name",
"run_name_template",
"expectation_suite_name",
"batch_request",
"action_list",
"evaluation_parameters",
"runtime_configuration",
"validations",
"profilers",
# Next two fields are for LegacyCheckpoint configuration
"validation_operator_name",
"batches",
# Next fields are used by configurators
"site_names",
"slack_webhook",
"notify_on",
"notify_with",
"ge_cloud_id",
"expectation_suite_ge_cloud_id",
)
ordered = True
# if keys have None value, remove in post_dump
REMOVE_KEYS_IF_NONE = [
"site_names",
"slack_webhook",
"notify_on",
"notify_with",
]
ge_cloud_id = fields.UUID(required=False, allow_none=True)
name = fields.String(required=False, allow_none=True)
config_version = fields.Number(
validate=lambda x: (0 < x < 100) or x is None,
error_messages={"invalid": "config version must " "be a number or None."},
required=False,
allow_none=True,
)
template_name = fields.String(required=False, allow_none=True)
module_name = fields.String(required=False, missing="great_expectations.checkpoint")
class_name = fields.Str(required=False, allow_none=True)
run_name_template = fields.String(required=False, allow_none=True)
expectation_suite_name = fields.String(required=False, allow_none=True)
expectation_suite_ge_cloud_id = fields.UUID(required=False, allow_none=True)
batch_request = fields.Dict(required=False, allow_none=True)
action_list = fields.List(
cls_or_instance=fields.Dict(), required=False, allow_none=True
)
evaluation_parameters = fields.Dict(required=False, allow_none=True)
runtime_configuration = fields.Dict(required=False, allow_none=True)
validations = fields.List(
cls_or_instance=fields.Dict(), required=False, allow_none=True
)
profilers = fields.List(
cls_or_instance=fields.Dict(), required=False, allow_none=True
)
# Next two fields are for LegacyCheckpoint configuration
validation_operator_name = fields.Str(required=False, allow_none=True)
batches = fields.List(
cls_or_instance=fields.Dict(
keys=fields.Str(
validate=OneOf(["batch_kwargs", "expectation_suite_names"]),
required=False,
allow_none=True,
)
),
required=False,
allow_none=True,
)
# Next fields are used by configurators
site_names = fields.Raw(required=False, allow_none=True)
slack_webhook = fields.String(required=False, allow_none=True)
notify_on = fields.String(required=False, allow_none=True)
notify_with = fields.String(required=False, allow_none=True)
@validates_schema
def validate_schema(self, data, **kwargs):
if not (
"name" in data or "validation_operator_name" in data or "batches" in data
):
raise ge_exceptions.InvalidConfigError(
f"""Your current Checkpoint configuration is incomplete. Please update your Checkpoint configuration to
continue.
"""
)
if data.get("config_version"):
if "name" not in data:
raise ge_exceptions.InvalidConfigError(
f"""Your Checkpoint configuration requires the "name" field. Please update your current Checkpoint
configuration to continue.
"""
)
@post_dump
def remove_keys_if_none(self, data, **kwargs):
data = deepcopy(data)
for key in self.REMOVE_KEYS_IF_NONE:
if key in data and data[key] is None:
data.pop(key)
return data
class CheckpointConfig(BaseYamlConfig):
# TODO: <Alex>ALEX (does not work yet)</Alex>
# _config_schema_class = CheckpointConfigSchema
def __init__(
self,
name: Optional[str] = None,
config_version: Optional[Union[int, float]] = None,
template_name: Optional[str] = None,
module_name: Optional[str] = None,
class_name: Optional[str] = None,
run_name_template: Optional[str] = None,
expectation_suite_name: Optional[str] = None,
batch_request: Optional[dict] = None,
action_list: Optional[List[dict]] = None,
evaluation_parameters: Optional[dict] = None,
runtime_configuration: Optional[dict] = None,
validations: Optional[List[dict]] = None,
profilers: Optional[List[dict]] = None,
validation_operator_name: Optional[str] = None,
batches: Optional[List[dict]] = None,
commented_map: Optional[CommentedMap] = None,
ge_cloud_id: Optional[str] = None,
# the following four args are used by SimpleCheckpoint
site_names: Optional[Union[list, str]] = None,
slack_webhook: Optional[str] = None,
notify_on: Optional[str] = None,
notify_with: Optional[str] = None,
expectation_suite_ge_cloud_id: Optional[str] = None,
):
self._name = name
self._config_version = config_version
if self.config_version is None:
class_name = class_name or "LegacyCheckpoint"
self.validation_operator_name = validation_operator_name
if batches is not None and isinstance(batches, list):
self.batches = batches
else:
class_name = class_name or "Checkpoint"
self._template_name = template_name
self._run_name_template = run_name_template
self._expectation_suite_name = expectation_suite_name
self._expectation_suite_ge_cloud_id = expectation_suite_ge_cloud_id
self._batch_request = batch_request
self._action_list = action_list or []
self._evaluation_parameters = evaluation_parameters or {}
self._runtime_configuration = runtime_configuration or {}
self._validations = validations or []
self._profilers = profilers or []
self._ge_cloud_id = ge_cloud_id
# the following attributes are used by SimpleCheckpoint
self._site_names = site_names
self._slack_webhook = slack_webhook
self._notify_on = notify_on
self._notify_with = notify_with
self._module_name = module_name or "great_expectations.checkpoint"
self._class_name = class_name
super().__init__(commented_map=commented_map)
def update(
self,
other_config: Optional["CheckpointConfig"] = None,
runtime_kwargs: Optional[dict] = None,
):
assert other_config is not None or runtime_kwargs is not None, (
"other_config and runtime_kwargs cannot both " "be None"
)
if other_config is not None:
# replace
if other_config.name is not None:
self.name = other_config.name
if other_config.module_name is not None:
self.module_name = other_config.module_name
if other_config.class_name is not None:
self.class_name = other_config.class_name
if other_config.run_name_template is not None:
self.run_name_template = other_config.run_name_template
if other_config.expectation_suite_name is not None:
self.expectation_suite_name = other_config.expectation_suite_name
if other_config.expectation_suite_ge_cloud_id is not None:
self.expectation_suite_ge_cloud_id = (
other_config.expectation_suite_ge_cloud_id
)
# update
if other_config.batch_request is not None:
if self.batch_request is None:
batch_request = {}
else:
batch_request = self.batch_request
other_batch_request = other_config.batch_request
updated_batch_request = nested_update(
batch_request,
other_batch_request,
)
self._batch_request = updated_batch_request
if other_config.action_list is not None:
self.action_list = self.get_updated_action_list(
base_action_list=self.action_list,
other_action_list=other_config.action_list,
)
if other_config.evaluation_parameters is not None:
nested_update(
self.evaluation_parameters,
other_config.evaluation_parameters,
)
if other_config.runtime_configuration is not None:
nested_update(
self.runtime_configuration,
other_config.runtime_configuration,
)
if other_config.validations is not None:
self.validations.extend(
filter(
lambda v: v not in self.validations, other_config.validations
)
)
if other_config.profilers is not None:
self.profilers.extend(other_config.profilers)
if runtime_kwargs is not None and any(runtime_kwargs.values()):
# replace
if runtime_kwargs.get("run_name_template") is not None:
self.run_name_template = runtime_kwargs.get("run_name_template")
if runtime_kwargs.get("expectation_suite_name") is not None:
self.expectation_suite_name = runtime_kwargs.get(
"expectation_suite_name"
)
if runtime_kwargs.get("expectation_suite_ge_cloud_id") is not None:
self.expectation_suite_ge_cloud_id = runtime_kwargs.get(
"expectation_suite_ge_cloud_id"
)
# update
if runtime_kwargs.get("batch_request") is not None:
batch_request = self.batch_request
batch_request = batch_request or {}
runtime_batch_request = runtime_kwargs.get("batch_request")
batch_request = nested_update(batch_request, runtime_batch_request)
self._batch_request = batch_request
if runtime_kwargs.get("action_list") is not None:
self.action_list = self.get_updated_action_list(
base_action_list=self.action_list,
other_action_list=runtime_kwargs.get("action_list"),
)
if runtime_kwargs.get("evaluation_parameters") is not None:
nested_update(
self.evaluation_parameters,
runtime_kwargs.get("evaluation_parameters"),
)
if runtime_kwargs.get("runtime_configuration") is not None:
nested_update(
self.runtime_configuration,
runtime_kwargs.get("runtime_configuration"),
)
if runtime_kwargs.get("validations") is not None:
self.validations.extend(
filter(
lambda v: v not in self.validations,
runtime_kwargs.get("validations"),
)
)
if runtime_kwargs.get("profilers") is not None:
self.profilers.extend(runtime_kwargs.get("profilers"))
# TODO: <Alex>ALEX (we still need the next two properties)</Alex>
@classmethod
def get_config_class(cls):
return cls # CheckpointConfig
@classmethod
def get_schema_class(cls):
return CheckpointConfigSchema
@property
def ge_cloud_id(self):
return self._ge_cloud_id
@ge_cloud_id.setter
def ge_cloud_id(self, value: str):
self._ge_cloud_id = value
@property
def expectation_suite_ge_cloud_id(self):
return self._expectation_suite_ge_cloud_id
@expectation_suite_ge_cloud_id.setter
def expectation_suite_ge_cloud_id(self, value: str):
self._expectation_suite_ge_cloud_id = value
@property
def name(self):
return self._name
@name.setter
def name(self, value: str):
self._name = value
@property
def template_name(self):
return self._template_name
@template_name.setter
def template_name(self, value: str):
self._template_name = value
@property
def config_version(self):
return self._config_version
@property
def validations(self):
return self._validations
@property
def profilers(self):
return self._profilers
@property
def module_name(self):
return self._module_name
@module_name.setter
def module_name(self, value: str):
self._module_name = value
@property
def class_name(self):
return self._class_name
@class_name.setter
def class_name(self, value: str):
self._class_name = value
@property
def run_name_template(self):
return self._run_name_template
@run_name_template.setter
def run_name_template(self, value: str):
self._run_name_template = value
@property
def batch_request(self):
return self._batch_request
@batch_request.setter
def batch_request(self, value: dict):
self._batch_request = value
@property
def expectation_suite_name(self):
return self._expectation_suite_name
@expectation_suite_name.setter
def expectation_suite_name(self, value: str):
self._expectation_suite_name = value
@property
def action_list(self):
return self._action_list
@action_list.setter
def action_list(self, value: List[dict]):
self._action_list = value
@property
def site_names(self):
return self._site_names
@property
def slack_webhook(self):
return self._slack_webhook
@property
def notify_on(self):
return self._notify_on
@property
def notify_with(self):
return self._notify_with
@classmethod
def get_updated_action_list(
cls,
base_action_list: list,
other_action_list: list,
) -> List[dict]:
base_action_list_dict = {action["name"]: action for action in base_action_list}
for other_action in other_action_list:
other_action_name = other_action["name"]
if other_action_name in base_action_list_dict:
if other_action["action"] is None:
base_action_list_dict.pop(other_action_name)
else:
nested_update(
base_action_list_dict[other_action_name],
other_action,
dedup=True,
)
else:
base_action_list_dict[other_action_name] = other_action
return list(base_action_list_dict.values())
@property
def evaluation_parameters(self):
return self._evaluation_parameters
@property
def runtime_configuration(self):
return self._runtime_configuration
class CheckpointValidationConfig(DictDot):
pass
class CheckpointValidationConfigSchema(Schema):
pass
dataContextConfigSchema = DataContextConfigSchema()
datasourceConfigSchema = DatasourceConfigSchema()
dataConnectorConfigSchema = DataConnectorConfigSchema()
assetConfigSchema = AssetConfigSchema()
sorterConfigSchema = SorterConfigSchema()
anonymizedUsageStatisticsSchema = AnonymizedUsageStatisticsConfigSchema()
notebookConfigSchema = NotebookConfigSchema()
checkpointConfigSchema = CheckpointConfigSchema()
concurrencyConfigSchema = ConcurrencyConfigSchema()
| return self._class_name |
libcore_result_rs_0024.rs | fn main() {
let x: Result<u32, &str> = Ok(2); |
let x: Result<u32, &str> = Err("early error");
let y: Result<u32, &str> = Ok(2);
assert_eq!(x.or(y), Ok(2));
let x: Result<u32, &str> = Err("not a 2");
let y: Result<u32, &str> = Err("late error");
assert_eq!(x.or(y), Err("late error"));
let x: Result<u32, &str> = Ok(2);
let y: Result<u32, &str> = Ok(100);
assert_eq!(x.or(y), Ok(2));
} | let y: Result<u32, &str> = Err("late error");
assert_eq!(x.or(y), Ok(2)); |
server_test.go | package server
import (
"os"
"path/filepath"
"reflect"
"testing"
"encoding/json"
"github.com/pkg/errors"
"github.com/thales-e-security/contribstats/pkg/cache"
"github.com/thales-e-security/contribstats/pkg/collector"
"github.com/thales-e-security/contribstats/pkg/config"
"net/http"
"net/http/httptest"
"time"
)
var constants config.Config
func setupTestCase(t *testing.T) func(t *testing.T) {
constants = config.Config{
Organizations: []string{"unorepo"},
Domains: []string{"thalesesec.net", "thales-e-security.com"},
Cache: filepath.Join(os.TempDir(), "contribstatstest"),
Interval: 10,
Token: os.Getenv("CONTRIBSTATS_TOKEN"),
}
return func(t *testing.T) {
t.Log("teardown test case")
}
}
func setupSubTest(t *testing.T, origins bool) func(t *testing.T) {
if origins {
constants.Origins = []string{"thalesecurity.com"}
}
return func(t *testing.T) {
t.Log("teardown sub test")
}
}
func TestNewStatServer(t *testing.T) {
teardown := setupTestCase(t)
defer teardown(t)
tests := []struct {
name string
wantSs Server
}{
{
name: "OK",
wantSs: &StatServer{
constants: constants,
collector: collector.NewGitHubCloneCollector(constants, cache.NewGitCache(constants.Cache)),
},
}, {
name: "OK - No Cache",
wantSs: &StatServer{
constants: constants,
collector: collector.NewGitHubCloneCollector(constants, cache.NewGitCache(constants.Cache)),
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if gotSs := NewStatServer(constants); !reflect.DeepEqual(gotSs, tt.wantSs) {
t.Errorf("NewStatServer() = %v, want %v", gotSs, tt.wantSs)
}
})
}
}
func TestStatServer_Start(t *testing.T) {
teardown := setupTestCase(t)
defer teardown(t)
tests := []struct {
name string
ss Server
cancel bool
quit bool
error bool
wantErr bool
}{
{
name: "OK",
ss: NewStatServer(constants),
wantErr: false,
},
{
name: "Error",
ss: NewStatServer(constants),
wantErr: true,
},
{
name: "Cancel",
ss: NewStatServer(constants),
cancel: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
var gotExitCode int
// store old os.Exit
oldOSExit := osExit
// override os.Exit
osExit = func(code int) {
gotExitCode = code
}
if tt.wantErr {
//TODO: find a way to force and error on the listener
}
// Start the server
go tt.ss.Start()
// wait for it...
time.Sleep(10 * time.Millisecond)
// Canceling
if tt.cancel {
// Kill it ...
cancel <- true
}
// repair os.Exit
osExit = oldOSExit
// See what we gotExitCode
if gotExitCode != 0 {
t.Error("Got unhealthy exit")
}
})
}
}
func TestStatServer_startServer(t *testing.T) {
teardown := setupTestCase(t)
defer teardown(t)
type args struct {
errs chan error
}
ss := NewStatServer(constants)
tests := []struct {
name string
ss *StatServer
args args
origins bool
}{
{
name: "OK",
ss: ss.(*StatServer),
args: args{
errs: make(chan error),
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
teardown := setupSubTest(t, tt.origins)
defer teardown(t)
// Only run the server for a few seconds
go func() {
time.Sleep(2 * time.Second)
serverCancel <- true
}()
tt.ss.startServer(tt.args.errs)
})
}
}
func TestStatServer_startCollector(t *testing.T) {
teardown := setupTestCase(t)
defer teardown(t)
type args struct {
errs chan error
}
tests := []struct {
name string
ss *StatServer
args args
wantErr bool
}{
{
name: "OK",
ss: &StatServer{
stats: nil,
collector: collector.NewGitHubCloneCollector(constants, cache.NewGitCache(cache.DefaultCache)),
},
args: args{
errs: errs,
},
wantErr: false,
},
{
name: "Error",
ss: &StatServer{
collector: &MockCollector{
wantErr: true,
},
},
args: args{
errs: errs,
},
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
c := make(chan time.Time)
timeNewTicker = func(d time.Duration) *time.Ticker {
return &time.Ticker{
C: c,
}
}
go tt.ss.startCollector(tt.args.errs)
go func() {
time.Sleep(10 * time.Millisecond)
c <- time.Now()
}()
select {
case <-c:
}
})
}
}
func TestStatServer_cleanup(t *testing.T) {
teardown := setupTestCase(t)
defer teardown(t)
tests := []struct {
name string
ss *StatServer
}{
{
name: "OK",
ss: &StatServer{},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
osExit = func(code int) {
}
tt.ss.cleanup()
})
}
}
func | (t *testing.T) {
teardown := setupTestCase(t)
defer teardown(t)
type args struct {
w http.ResponseWriter
r *http.Request
}
w := httptest.NewRecorder()
tests := []struct {
name string
ss *StatServer
args args
expect string
}{
{
name: "OK",
ss: &StatServer{
stats: &collector.CollectReport{
Repos: nil,
Commits: 0,
Lines: 0,
Projects: 0,
},
},
expect: `{"commits":0,"lines":0,"projects":0}`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
handler := http.HandlerFunc(tt.ss.statsHandler)
req, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatal(err)
}
handler.ServeHTTP(w, req)
if status := w.Code; status != http.StatusOK {
t.Errorf("handler returned wrong status code: got %v want %v",
status, http.StatusOK)
}
// Check the response body is what we expect.
if eq := AreEqualJSON(w.Body.String(), tt.expect); !eq {
t.Errorf("handler returned unexpected body: got %v want %v",
w.Body.String(), tt.expect)
}
})
}
}
func AreEqualJSON(s1, s2 string) bool {
var o1 interface{}
var o2 interface{}
var err error
err = json.Unmarshal([]byte(s1), &o1)
if err != nil {
return false
}
err = json.Unmarshal([]byte(s2), &o2)
if err != nil {
return false
}
return reflect.DeepEqual(o1, o2)
}
type MockCollector struct {
wantErr bool
}
func (mc *MockCollector) Collect() (stats *collector.CollectReport, err error) {
stats = &collector.CollectReport{}
if mc.wantErr {
err = errors.New("expected error")
}
return
}
| TestStatServer_statsHandler |
proto_utils.py | # Copyright 2017 Google Inc.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Utility library for working with protobufs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from google.protobuf.internal import api_implementation
def uses_fast_cpp_protos_or_die():
if api_implementation.Type() != 'cpp':
| raise ValueError('Expected to be using C++ protobuf implementation '
'(api_implementation.Type() == "cpp") but it is {}'.format(
api_implementation.Type())) |
|
convert_from_arm_function_builder.go | /*
* Copyright (c) Microsoft Corporation.
* Licensed under the MIT license.
*/
package armconversion
import (
"fmt"
"go/token"
"strings"
"github.com/dave/dst"
"github.com/Azure/azure-service-operator/v2/tools/generator/internal/astbuilder"
"github.com/Azure/azure-service-operator/v2/tools/generator/internal/astmodel"
)
type convertFromARMBuilder struct {
conversionBuilder
typedInputIdent string
inputIdent string
typeConversionBuilder *astmodel.ConversionFunctionBuilder
locals *astmodel.KnownLocalsSet
}
func | (
c *ARMConversionFunction,
codeGenerationContext *astmodel.CodeGenerationContext,
receiver astmodel.TypeName,
methodName string) *convertFromARMBuilder {
result := &convertFromARMBuilder{
// Note: If we have a property with these names we will have a compilation issue in the generated
// code. Right now that doesn't seem to be the case anywhere but if it does happen we may need
// to harden this logic some to choose an unused name.
typedInputIdent: "typedInput",
inputIdent: "armInput",
conversionBuilder: conversionBuilder{
methodName: methodName,
armType: c.armType,
kubeType: getReceiverObjectType(codeGenerationContext, receiver),
receiverIdent: c.idFactory.CreateReceiver(receiver.Name()),
receiverTypeExpr: receiver.AsType(codeGenerationContext),
armTypeIdent: c.armTypeName.Name(),
idFactory: c.idFactory,
typeKind: c.typeKind,
codeGenerationContext: codeGenerationContext,
},
typeConversionBuilder: astmodel.NewConversionFunctionBuilder(c.idFactory, codeGenerationContext),
locals: astmodel.NewKnownLocalsSet(c.idFactory),
}
// Add the receiver ident into the known locals
result.locals.Add(result.receiverIdent)
// It's a bit awkward that there are two levels of "handler" here, but they serve different purposes:
// The top level propertyConversionHandlers is about determining which properties are involved: given a property on the destination type it
// determines which property (if any) on the source type will be converted to the destination.
// The "inner" handler (typeConversionBuilder) is about determining how to convert between two types: given a
// source type and a destination type, figure out how to make the assignment work. It has no knowledge of broader object structure
// or other properties.
result.typeConversionBuilder.AddConversionHandlers(result.convertComplexTypeNameProperty)
result.propertyConversionHandlers = []propertyConversionHandler{
// Handlers for specific properties come first
result.namePropertyHandler,
result.ownerPropertyHandler,
result.conditionsPropertyHandler,
// Generic handlers come second
result.referencePropertyHandler,
result.secretPropertyHandler,
result.flattenedPropertyHandler,
result.propertiesWithSameNameHandler,
}
return result
}
func (builder *convertFromARMBuilder) functionDeclaration() *dst.FuncDecl {
fn := &astbuilder.FuncDetails{
Name: builder.methodName,
ReceiverIdent: builder.receiverIdent,
ReceiverType: astbuilder.Dereference(builder.receiverTypeExpr),
Body: builder.functionBodyStatements(),
}
fn.AddComments("populates a Kubernetes CRD object from an Azure ARM object")
fn.AddParameter(
builder.idFactory.CreateIdentifier(astmodel.OwnerProperty, astmodel.NotExported),
astmodel.ArbitraryOwnerReference.AsType(builder.codeGenerationContext))
fn.AddParameter(builder.inputIdent, dst.NewIdent("interface{}"))
fn.AddReturns("error")
return fn.DefineFunc()
}
func (builder *convertFromARMBuilder) functionBodyStatements() []dst.Stmt {
var result []dst.Stmt
conversionStmts := generateTypeConversionAssignments(
builder.armType,
builder.kubeType,
builder.propertyConversionHandler)
// We remove empty statements here as they may have been used to store comments or other
// notes about properties which were not transformed. We want to keep these statements in the
// set of statements, but they don't count as a conversion for the purposes of actually
// using the typedInput variable
hasConversions := len(removeEmptyStatements(conversionStmts)) > 0
assertStmts := builder.assertInputTypeIsARM(hasConversions)
// perform a type assert and check its results
result = append(result, assertStmts...)
result = append(result, conversionStmts...)
result = append(result, astbuilder.ReturnNoError())
return result
}
func (builder *convertFromARMBuilder) assertInputTypeIsARM(needsResult bool) []dst.Stmt {
fmtPackage := builder.codeGenerationContext.MustGetImportedPackageName(astmodel.FmtReference)
dest := builder.typedInputIdent
if !needsResult {
dest = "_" // drop result
}
// perform a type assert
// <dest>, ok := <inputIdent>.(<inputIdent>)
typeAssert := astbuilder.TypeAssert(
dst.NewIdent(dest),
dst.NewIdent(builder.inputIdent),
dst.NewIdent(builder.armTypeIdent))
// Check the result of the type assert
// if !ok {
// return fmt.Errorf("unexpected type supplied ...", <inputIdent>)
// }
returnIfNotOk := astbuilder.ReturnIfNotOk(
astbuilder.FormatError(
fmtPackage,
fmt.Sprintf("unexpected type supplied for %s() function. Expected %s, got %%T",
builder.methodName,
builder.armTypeIdent),
dst.NewIdent(builder.inputIdent)))
return astbuilder.Statements(typeAssert, returnIfNotOk)
}
//////////////////////
// Conversion handlers
//////////////////////
func (builder *convertFromARMBuilder) namePropertyHandler(
toProp *astmodel.PropertyDefinition,
fromType *astmodel.ObjectType) ([]dst.Stmt, bool) {
if builder.typeKind != TypeKindSpec || !toProp.HasName(astmodel.AzureNameProperty) {
return nil, false
}
// Check to make sure that the ARM object has a "Name" property (which matches our "AzureName")
fromProp, ok := fromType.Property("Name")
if !ok {
panic("ARM resource missing property 'Name'")
}
// Invoke SetAzureName(ExtractKubernetesResourceNameFromARMName(this.Name)):
return []dst.Stmt{
&dst.ExprStmt{
X: astbuilder.CallQualifiedFunc(
builder.receiverIdent,
"SetAzureName",
astbuilder.CallQualifiedFunc(
astmodel.GenRuntimeReference.PackageName(),
"ExtractKubernetesResourceNameFromARMName",
astbuilder.Selector(dst.NewIdent(builder.typedInputIdent), string(fromProp.PropertyName()))),
),
},
}, true
}
func (builder *convertFromARMBuilder) referencePropertyHandler(
toProp *astmodel.PropertyDefinition,
_ *astmodel.ObjectType) ([]dst.Stmt, bool) {
isResourceReference := astmodel.TypeEquals(toProp.PropertyType(), astmodel.ResourceReferenceType)
isOptionalResourceReference := astmodel.TypeEquals(toProp.PropertyType(), astmodel.NewOptionalType(astmodel.ResourceReferenceType))
if !isResourceReference && !isOptionalResourceReference {
return nil, false
}
// TODO: For now, we are NOT assigning to these. _Status types don't have them and it's unclear what
// TODO: the fromARM functions do for us on Spec types. We may need them for diffing though. If so we will
// TODO: need to revisit this and actually assign something
return nil, true
}
func (builder *convertFromARMBuilder) secretPropertyHandler(
toProp *astmodel.PropertyDefinition,
_ *astmodel.ObjectType) ([]dst.Stmt, bool) {
isSecretReference := astmodel.TypeEquals(toProp.PropertyType(), astmodel.SecretReferenceType)
isOptionalSecretReference := astmodel.TypeEquals(toProp.PropertyType(), astmodel.NewOptionalType(astmodel.SecretReferenceType))
if !isSecretReference && !isOptionalSecretReference {
return nil, false
}
// TODO: For now, we are NOT assigning to these. _Status types don't have them and it's unclear what
// TODO: the fromARM functions do for us on Spec types. We may need them for diffing though. If so we will
// TODO: need to revisit this and actually assign something
return nil, true
}
func (builder *convertFromARMBuilder) ownerPropertyHandler(
toProp *astmodel.PropertyDefinition,
_ *astmodel.ObjectType) ([]dst.Stmt, bool) {
ownerParameter := builder.idFactory.CreateIdentifier(astmodel.OwnerProperty, astmodel.NotExported)
ownerProp := builder.idFactory.CreatePropertyName(astmodel.OwnerProperty, astmodel.Exported)
if toProp.PropertyName() != ownerProp || builder.typeKind != TypeKindSpec {
return nil, false
}
// Confirm that the destination type is the type we expect
ownerNameType, ok := astmodel.AsTypeName(toProp.PropertyType())
if !ok {
var kubeDescription strings.Builder
builder.kubeType.WriteDebugDescription(&kubeDescription, nil)
var armDescription strings.Builder
builder.armType.WriteDebugDescription(&armDescription, nil)
panic(fmt.Sprintf("Owner property was not of type TypeName. Kube: %s, ARM: %s", kubeDescription.String(), armDescription.String()))
}
var convertedOwner dst.Expr
if ownerNameType == astmodel.KnownResourceReferenceType {
compositeLit := astbuilder.NewCompositeLiteralBuilder(astmodel.KnownResourceReferenceType.AsType(builder.codeGenerationContext))
compositeLit.AddField("Name", astbuilder.Selector(dst.NewIdent(ownerParameter), "Name"))
convertedOwner = compositeLit.Build()
} else if ownerNameType == astmodel.ArbitraryOwnerReference {
convertedOwner = dst.NewIdent(ownerParameter)
} else {
panic(fmt.Sprintf("found Owner property on spec with unexpected TypeName %s", ownerNameType.String()))
}
result := astbuilder.QualifiedAssignment(
dst.NewIdent(builder.receiverIdent),
string(toProp.PropertyName()),
token.ASSIGN,
convertedOwner)
return []dst.Stmt{result}, true
}
// conditionsPropertyHandler generates conversions for the "Conditions" status property. This property is set by the controller
// after each reconcile and so does not need to be preserved.
func (builder *convertFromARMBuilder) conditionsPropertyHandler(
toProp *astmodel.PropertyDefinition,
_ *astmodel.ObjectType) ([]dst.Stmt, bool) {
isPropConditions := toProp.PropertyName() == builder.idFactory.CreatePropertyName(astmodel.ConditionsProperty, astmodel.Exported)
if !isPropConditions || builder.typeKind != TypeKindStatus {
return nil, false
}
return nil, true
}
// flattenedPropertyHandler generates conversions for properties that
// were flattened out from inside other properties. The code it generates will
// look something like:
//
// If 'X' is a property that was flattened:
//
// k8sObj.Y1 = armObj.X.Y1;
// k8sObj.Y2 = armObj.X.Y2;
//
// in reality each assignment is likely to be another conversion that is specific
// to the type being converted.
func (builder *convertFromARMBuilder) flattenedPropertyHandler(
toProp *astmodel.PropertyDefinition,
fromType *astmodel.ObjectType) ([]dst.Stmt, bool) {
if !toProp.WasFlattened() {
return nil, false
}
for _, fromProp := range fromType.Properties() {
if toProp.WasFlattenedFrom(fromProp.PropertyName()) {
return builder.buildFlattenedAssignment(toProp, fromProp), true
}
}
panic(fmt.Sprintf("couldn’t find source ARM property ‘%s’ that k8s property ‘%s’ was flattened from", toProp.FlattenedFrom()[0], toProp.PropertyName()))
}
func (builder *convertFromARMBuilder) buildFlattenedAssignment(toProp *astmodel.PropertyDefinition, fromProp *astmodel.PropertyDefinition) []dst.Stmt {
if len(toProp.FlattenedFrom()) > 2 {
// this doesn't appear to happen anywhere in the JSON schemas currently
var props []string
for _, ff := range toProp.FlattenedFrom() {
props = append(props, string(ff))
}
panic(fmt.Sprintf("need to implement multiple levels of flattening: property ‘%s’ on %s was flattened from ‘%s’",
toProp.PropertyName(),
builder.receiverIdent,
strings.Join(props, ".")))
}
allTypes := builder.codeGenerationContext.GetAllReachableTypes()
// the from shape here must be:
// 1. maybe a typename, pointing to…
// 2. maybe optional, wrapping …
// 3. maybe a typename, pointing to…
// 4. an object type
// (1.) resolve any outer typename
fromPropType, err := allTypes.FullyResolve(fromProp.PropertyType())
if err != nil {
panic(err)
}
var fromPropObjType *astmodel.ObjectType
var objOk bool
// (2.) resolve any optional type
generateNilCheck := false
if fromPropOptType, ok := fromPropType.(*astmodel.OptionalType); ok {
generateNilCheck = true
// (3.) resolve any inner typename
elementType, err := allTypes.FullyResolve(fromPropOptType.Element())
if err != nil {
panic(err)
}
// (4.) resolve the inner object type
fromPropObjType, objOk = elementType.(*astmodel.ObjectType)
} else {
// (4.) resolve the inner object type
fromPropObjType, objOk = fromPropType.(*astmodel.ObjectType)
}
if !objOk {
// see pipeline_flatten_properties.go:flattenPropType which will only flatten from (optional) object types
panic(fmt.Sprintf("property ‘%s’ marked as flattened from non-object type %T, which shouldn’t be possible",
toProp.PropertyName(),
fromPropType))
}
// *** Now generate the code! ***
toPropFlattenedFrom := toProp.FlattenedFrom()
originalPropName := toPropFlattenedFrom[len(toPropFlattenedFrom)-1]
nestedProp, ok := fromPropObjType.Property(originalPropName)
if !ok {
panic("couldn't find source of flattened property")
}
// need to make a clone of builder.locals if we are going to nest in an if statement
locals := builder.locals
if generateNilCheck {
locals = locals.Clone()
}
stmts := builder.typeConversionBuilder.BuildConversion(
astmodel.ConversionParameters{
Source: astbuilder.Selector(dst.NewIdent(builder.typedInputIdent), string(fromProp.PropertyName()), string(originalPropName)),
SourceType: nestedProp.PropertyType(),
Destination: astbuilder.Selector(dst.NewIdent(builder.receiverIdent), string(toProp.PropertyName())),
DestinationType: toProp.PropertyType(),
NameHint: string(toProp.PropertyName()),
ConversionContext: nil,
AssignmentHandler: nil,
Locals: locals,
})
// we were unable to generate an inner conversion, so we cannot generate the overall conversion
if len(stmts) == 0 {
return nil
}
if generateNilCheck {
propToCheck := astbuilder.Selector(dst.NewIdent(builder.typedInputIdent), string(fromProp.PropertyName()))
stmts = astbuilder.Statements(
astbuilder.IfNotNil(propToCheck, stmts...))
}
result := []dst.Stmt{
&dst.EmptyStmt{
Decs: dst.EmptyStmtDecorations{
NodeDecs: dst.NodeDecs{
End: []string{"// copying flattened property:"},
},
},
},
}
return append(result, stmts...)
}
func (builder *convertFromARMBuilder) propertiesWithSameNameHandler(
toProp *astmodel.PropertyDefinition,
fromType *astmodel.ObjectType) ([]dst.Stmt, bool) {
fromProp, ok := fromType.Property(toProp.PropertyName())
if !ok {
return nil, false
}
return builder.typeConversionBuilder.BuildConversion(
astmodel.ConversionParameters{
Source: astbuilder.Selector(dst.NewIdent(builder.typedInputIdent), string(fromProp.PropertyName())),
SourceType: fromProp.PropertyType(),
Destination: astbuilder.Selector(dst.NewIdent(builder.receiverIdent), string(toProp.PropertyName())),
DestinationType: toProp.PropertyType(),
NameHint: string(toProp.PropertyName()),
ConversionContext: nil,
AssignmentHandler: nil,
Locals: builder.locals,
}), true
}
//////////////////////////////////////////////////////////////////////////////////
// Complex property conversion (for when properties aren't simple primitive types)
//////////////////////////////////////////////////////////////////////////////////
// convertComplexTypeNameProperty handles conversion of complex TypeName properties.
// This function generates code that looks like this:
// <nameHint>Converted := <destinationType>{}
// err = <nameHint>Converted.FromARM(owner, <source>)
// if err != nil {
// return err
// }
// <destination> = <nameHint>
func (builder *convertFromARMBuilder) convertComplexTypeNameProperty(_ *astmodel.ConversionFunctionBuilder, params astmodel.ConversionParameters) []dst.Stmt {
destinationType, ok := params.DestinationType.(astmodel.TypeName)
if !ok {
return nil
}
sourceType, ok := params.SourceType.(astmodel.TypeName)
if !ok {
return nil
}
// This is for handling type names that aren't equal
if astmodel.TypeEquals(sourceType, destinationType) {
return nil
}
propertyLocalVar := builder.typeConversionBuilder.CreateLocal(params.Locals, "", params.NameHint)
ownerName := builder.idFactory.CreateIdentifier(astmodel.OwnerProperty, astmodel.NotExported)
newVariable := astbuilder.NewVariable(propertyLocalVar, destinationType.Name())
if !destinationType.PackageReference.Equals(builder.codeGenerationContext.CurrentPackage()) {
// struct name has to be qualified
packageName, err := builder.codeGenerationContext.GetImportedPackageName(destinationType.PackageReference)
if err != nil {
panic(err)
}
newVariable = astbuilder.NewVariableQualified(
propertyLocalVar,
packageName,
destinationType.Name())
}
tok := token.ASSIGN
if !params.Locals.HasName("err") {
tok = token.DEFINE
params.Locals.Add("err")
}
var results []dst.Stmt
results = append(results, newVariable)
results = append(
results,
astbuilder.AssignmentStatement(
dst.NewIdent("err"),
tok,
astbuilder.CallQualifiedFunc(
propertyLocalVar, builder.methodName, dst.NewIdent(ownerName), params.GetSource())))
results = append(results, astbuilder.CheckErrorAndReturn())
if params.AssignmentHandler == nil {
results = append(
results,
astbuilder.SimpleAssignment(
params.GetDestination(),
dst.NewIdent(propertyLocalVar)))
} else {
results = append(
results,
params.AssignmentHandler(params.GetDestination(), dst.NewIdent(propertyLocalVar)))
}
return results
}
| newConvertFromARMFunctionBuilder |
69-sqrt.go | package main
import "fmt"
func main_13() {
x := 4
y := mySqrt(x)
fmt.Printf("x:%d y:%d", x, y)
x = 5
y = mySqrt(x)
fmt.Printf("x:%d y:%d", x, y)
x = 100
y = mySqrt(x)
fmt.Printf("x:%d y:%d", x, y)
}
func mySqrtFail(x int) int |
func mySqrt(x int) int {
if x == 0 {
return 0
}
approx := 0.5 * float64(x)
for i := 0; i < 20; i++ {
betterapprox := 0.5 * float64(approx+float64(x)/approx)
approx = betterapprox
}
return int(approx)
}
| {
if x <= 0 {
return 0
}
precision := 1
ret := x / 2
for {
if ret*ret > x {
ret = ret / 2
} else {
if (x - ret*ret) <= precision {
break
}
ret++
}
}
return ret
} |
rule_31.py | def findDecision(obj): #obj[0]: Passanger, obj[1]: Time, obj[2]: Coupon, obj[3]: Education, obj[4]: Occupation, obj[5]: Bar, obj[6]: Restaurant20to50, obj[7]: Direction_same, obj[8]: Distance
# {"feature": "Direction_same", "instances": 23, "metric_value": 0.9986, "depth": 1}
| if obj[7]<=0:
# {"feature": "Coupon", "instances": 18, "metric_value": 0.9641, "depth": 2}
if obj[2]>0:
# {"feature": "Occupation", "instances": 15, "metric_value": 0.8366, "depth": 3}
if obj[4]>1:
# {"feature": "Restaurant20to50", "instances": 13, "metric_value": 0.6194, "depth": 4}
if obj[6]>0.0:
# {"feature": "Time", "instances": 12, "metric_value": 0.4138, "depth": 5}
if obj[1]>0:
return 'True'
elif obj[1]<=0:
# {"feature": "Education", "instances": 2, "metric_value": 1.0, "depth": 6}
if obj[3]>2:
return 'False'
elif obj[3]<=2:
return 'True'
else: return 'True'
else: return 'False'
elif obj[6]<=0.0:
return 'False'
else: return 'False'
elif obj[4]<=1:
return 'False'
else: return 'False'
elif obj[2]<=0:
return 'False'
else: return 'False'
elif obj[7]>0:
return 'False'
else: return 'False' |
|
errors.ts | import * as Errors from 'http-errors';
/**
* Denali uses the **http-errors** package for handling HTTP errors. Check
* [it's documentation](https://github.com/jshttp/http-errors) for how to use
* it.
*
* @package runtime | * @since 0.1.0
*/
export default Errors; |
|
appprotect_common.go | package validation
import (
"fmt"
"net"
"regexp"
"strconv"
"strings"
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
)
// ValidateRequiredSlices validates the required slices.
func ValidateRequiredSlices(obj *unstructured.Unstructured, fieldsList [][]string) error {
for _, fields := range fieldsList {
field, found, err := unstructured.NestedSlice(obj.Object, fields...)
if err != nil {
return fmt.Errorf("Error checking for required field %v: %w", field, err)
}
if !found {
return fmt.Errorf("Required field %v not found", field)
}
}
return nil
}
// ValidateRequiredFields validates the required fields.
func ValidateRequiredFields(obj *unstructured.Unstructured, fieldsList [][]string) error |
var (
logDstEx = regexp.MustCompile(`(?:syslog:server=((?:\d{1,3}\.){3}\d{1,3}|localhost|[a-zA-Z0-9._-]+):\d{1,5})|stderr|(?:\/[\S]+)+`)
logDstFileEx = regexp.MustCompile(`(?:\/[\S]+)+`)
logDstFQDNEx = regexp.MustCompile(`(?:[a-zA-Z0-9_-]+\.)+[a-zA-Z0-9_-]+`)
)
// ValidateAppProtectLogDestination validates destination for log configuration
func ValidateAppProtectLogDestination(dstAntn string) error {
errormsg := "Error parsing App Protect Log config: Destination must follow format: syslog:server=<ip-address | localhost>:<port> or fqdn or stderr or absolute path to file"
if !logDstEx.MatchString(dstAntn) {
return fmt.Errorf("%s Log Destination did not follow format", errormsg)
}
if dstAntn == "stderr" {
return nil
}
if logDstFileEx.MatchString(dstAntn) {
return nil
}
dstchunks := strings.Split(dstAntn, ":")
// This error can be ignored since the regex check ensures this string will be parsable
port, _ := strconv.Atoi(dstchunks[2])
if port > 65535 || port < 1 {
return fmt.Errorf("Error parsing port: %v not a valid port number", port)
}
ipstr := strings.Split(dstchunks[1], "=")[1]
if ipstr == "localhost" {
return nil
}
if logDstFQDNEx.MatchString(ipstr) {
return nil
}
if net.ParseIP(ipstr) == nil {
return fmt.Errorf("Error parsing host: %v is not a valid ip address or host name", ipstr)
}
return nil
}
| {
for _, fields := range fieldsList {
field, found, err := unstructured.NestedMap(obj.Object, fields...)
if err != nil {
return fmt.Errorf("Error checking for required field %v: %w", field, err)
}
if !found {
return fmt.Errorf("Required field %v not found", field)
}
}
return nil
} |
cert.go | /*
Copyright IBM Corp. 2017 All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package msp
import (
"bytes"
"crypto/ecdsa"
"crypto/x509/pkix"
"encoding/asn1"
"encoding/pem"
"fmt"
"math/big"
"time"
"github.com/hyperledger/fabric/bccsp/gm"
"github.com/pkg/errors"
"github.com/tjfoc/gmsm/sm2"
)
type validity struct {
NotBefore, NotAfter time.Time
}
type publicKeyInfo struct {
Raw asn1.RawContent
Algorithm pkix.AlgorithmIdentifier
PublicKey asn1.BitString
}
type certificate struct {
Raw asn1.RawContent
TBSCertificate tbsCertificate
SignatureAlgorithm pkix.AlgorithmIdentifier
SignatureValue asn1.BitString
}
type tbsCertificate struct {
Raw asn1.RawContent
Version int `asn1:"optional,explicit,default:0,tag:0"`
SerialNumber *big.Int
SignatureAlgorithm pkix.AlgorithmIdentifier
Issuer asn1.RawValue
Validity validity
Subject asn1.RawValue
PublicKey publicKeyInfo
UniqueId asn1.BitString `asn1:"optional,tag:1"`
SubjectUniqueId asn1.BitString `asn1:"optional,tag:2"`
Extensions []pkix.Extension `asn1:"optional,explicit,tag:3"`
}
// func isECDSASignedCert(cert *x509.Certificate) bool {
// return cert.SignatureAlgorithm == x509.ECDSAWithSHA1 ||
// cert.SignatureAlgorithm == x509.ECDSAWithSHA256 ||
// cert.SignatureAlgorithm == x509.ECDSAWithSHA384 ||
// cert.SignatureAlgorithm == x509.ECDSAWithSHA512
// }
func isECDSASignedCert(cert *sm2.Certificate) bool {
return cert.SignatureAlgorithm == sm2.ECDSAWithSHA1 ||
cert.SignatureAlgorithm == sm2.ECDSAWithSHA256 ||
cert.SignatureAlgorithm == sm2.ECDSAWithSHA384 ||
cert.SignatureAlgorithm == sm2.ECDSAWithSHA512
}
// sanitizeECDSASignedCert checks that the signatures signing a cert
// is in low-S. This is checked against the public key of parentCert.
// If the signature is not in low-S, then a new certificate is generated
// that is equals to cert but the signature that is in low-S.
// func sanitizeECDSASignedCert(cert *x509.Certificate, parentCert *x509.Certificate) (*x509.Certificate, error) {
func sanitizeECDSASignedCert(cert *sm2.Certificate, parentCert *sm2.Certificate) (*sm2.Certificate, error) {
if cert == nil {
return nil, errors.New("certificate must be different from nil")
}
if parentCert == nil {
return nil, errors.New("parent certificate must be different from nil")
}
// expectedSig, err := utils.SignatureToLowS(parentCert.PublicKey.(*ecdsa.PublicKey), cert.Signature)
expectedSig, err := gm.SignatureToLowS(parentCert.PublicKey.(*ecdsa.PublicKey), cert.Signature)
if err != nil {
return nil, err
}
// if sig == cert.Signature, nothing needs to be done
if bytes.Equal(cert.Signature, expectedSig) {
return cert, nil
}
// otherwise create a new certificate with the new signature
// 1. Unmarshal cert.Raw to get an instance of certificate,
// the lower level interface that represent an x509 certificate
// encoding
var newCert certificate
// newCert, err = certFromX509Cert(cert)
newCert, err = certFromSM2Cert(cert)
if err != nil {
return nil, err
}
// 2. Change the signature
newCert.SignatureValue = asn1.BitString{Bytes: expectedSig, BitLength: len(expectedSig) * 8}
// 3. marshal again newCert. Raw must be nil
newCert.Raw = nil
newRaw, err := asn1.Marshal(newCert)
if err != nil {
return nil, errors.Wrap(err, "marshalling of the certificate failed")
}
// 4. parse newRaw to get an x509 certificate
// return x509.ParseCertificate(newRaw)
return sm2.ParseCertificate(newRaw)
}
// func certFromX509Cert(cert *x509.Certificate) (certificate, error) {
// var newCert certificate
// _, err := asn1.Unmarshal(cert.Raw, &newCert)
// if err != nil {
// return certificate{}, err
// }
// return newCert, nil
// }
func certFromSM2Cert(cert *sm2.Certificate) (certificate, error) {
var newCert certificate
_, err := asn1.Unmarshal(cert.Raw, &newCert)
if err != nil |
return newCert, nil
}
// String returns a PEM representation of a certificate
func (c certificate) String() string {
b, err := asn1.Marshal(c)
if err != nil {
return fmt.Sprintf("Failed marshaling cert: %v", err)
}
block := &pem.Block{
Bytes: b,
Type: "CERTIFICATE",
}
b = pem.EncodeToMemory(block)
return string(b)
}
// certToPEM converts the given x509.Certificate to a PEM
// encoded string
// func certToPEM(certificate *x509.Certificate) string {
func certToPEM(certificate *sm2.Certificate) string {
// cert, err := certFromX509Cert(certificate)
cert, err := certFromSM2Cert(certificate)
if err != nil {
mspIdentityLogger.Warning("Failed converting certificate to asn1", err)
return ""
}
return cert.String()
}
| {
return certificate{}, err
} |
ParametricGeometries.js | //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// WARNING: This file was auto-generated, any change will be overridden in next release. Please use configs/es6.conf.js then run "npm run convert". //
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
import { ArrowHelper } from '../helpers/ArrowHelper.js'
import { Curve } from '../curves/Curve.js'
import { Geometry } from '../core/Geometry.js'
import { Object3D } from '../core/Object3D.js'
import { ParametricGeometry } from './ParametricGeometry.js'
import { Vector3 } from '../math/Vector3.js'
/**
* @author zz85
*
* Experimenting of primitive geometry creation using Surface Parametric equations
*
*/
var ParametricGeometries = {
klein: function ( v, u, target ) {
u *= Math.PI;
v *= 2 * Math.PI;
u = u * 2;
var x, y, z;
if ( u < Math.PI ) {
x = 3 * Math.cos( u ) * ( 1 + Math.sin( u ) ) + ( 2 * ( 1 - Math.cos( u ) / 2 ) ) * Math.cos( u ) * Math.cos( v );
z = - 8 * Math.sin( u ) - 2 * ( 1 - Math.cos( u ) / 2 ) * Math.sin( u ) * Math.cos( v );
} else {
x = 3 * Math.cos( u ) * ( 1 + Math.sin( u ) ) + ( 2 * ( 1 - Math.cos( u ) / 2 ) ) * Math.cos( v + Math.PI );
z = - 8 * Math.sin( u );
}
y = - 2 * ( 1 - Math.cos( u ) / 2 ) * Math.sin( v );
target.set( x, y, z );
},
plane: function ( width, height ) {
return function ( u, v, target ) {
var x = u * width;
var y = 0;
var z = v * height;
target.set( x, y, z );
};
},
mobius: function ( u, t, target ) {
// flat mobius strip
// http://www.wolframalpha.com/input/?i=M%C3%B6bius+strip+parametric+equations&lk=1&a=ClashPrefs_*Surface.MoebiusStrip.SurfaceProperty.ParametricEquations-
u = u - 0.5;
var v = 2 * Math.PI * t;
var x, y, z;
var a = 2;
x = Math.cos( v ) * ( a + u * Math.cos( v / 2 ) );
y = Math.sin( v ) * ( a + u * Math.cos( v / 2 ) );
z = u * Math.sin( v / 2 );
target.set( x, y, z );
},
mobius3d: function ( u, t, target ) {
// volumetric mobius strip
u *= Math.PI;
t *= 2 * Math.PI;
u = u * 2;
var phi = u / 2;
var major = 2.25, a = 0.125, b = 0.65;
var x, y, z;
x = a * Math.cos( t ) * Math.cos( phi ) - b * Math.sin( t ) * Math.sin( phi );
z = a * Math.cos( t ) * Math.sin( phi ) + b * Math.sin( t ) * Math.cos( phi );
y = ( major + x ) * Math.sin( u );
x = ( major + x ) * Math.cos( u );
target.set( x, y, z );
}
};
/*********************************************
*
* Parametric Replacement for TubeGeometry
*
*********************************************/
ParametricGeometries.TubeGeometry = function ( path, segments, radius, segmentsRadius, closed, debug ) {
this.path = path;
this.segments = segments || 64;
this.radius = radius || 1;
this.segmentsRadius = segmentsRadius || 8;
this.closed = closed || false;
if ( debug ) this.debug = new Object3D();
var scope = this, numpoints = this.segments + 1;
var frames = path.computeFrenetFrames( segments, closed ),
tangents = frames.tangents,
normals = frames.normals,
binormals = frames.binormals;
// proxy internals
this.tangents = tangents;
this.normals = normals;
this.binormals = binormals;
var ParametricTube = function ( u, v, target ) {
v *= 2 * Math.PI;
var i = u * ( numpoints - 1 );
i = Math.floor( i );
var pos = path.getPointAt( u );
var tangent = tangents[ i ];
var normal = normals[ i ];
var binormal = binormals[ i ];
if ( scope.debug ) {
scope.debug.add( new ArrowHelper( tangent, pos, radius, 0x0000ff ) );
scope.debug.add( new ArrowHelper( normal, pos, radius, 0xff0000 ) );
scope.debug.add( new ArrowHelper( binormal, pos, radius, 0x00ff00 ) );
}
var cx = - scope.radius * Math.cos( v ); // TODO: Hack: Negating it so it faces outside.
var cy = scope.radius * Math.sin( v );
pos.x += cx * normal.x + cy * binormal.x;
pos.y += cx * normal.y + cy * binormal.y;
pos.z += cx * normal.z + cy * binormal.z;
target.copy( pos );
};
ParametricGeometry.call( this, ParametricTube, segments, segmentsRadius );
};
ParametricGeometries.TubeGeometry.prototype = Object.create( Geometry.prototype );
ParametricGeometries.TubeGeometry.prototype.constructor = ParametricGeometries.TubeGeometry;
/*********************************************
*
* Parametric Replacement for TorusKnotGeometry
*
*********************************************/
ParametricGeometries.TorusKnotGeometry = function ( radius, tube, segmentsT, segmentsR, p, q ) {
this.radius = radius || 200;
this.tube = tube || 40;
this.segmentsT = segmentsT || 64;
this.segmentsR = segmentsR || 8;
this.p = p || 2;
this.q = q || 3;
function | () {
Curve.call( this );
}
TorusKnotCurve.prototype = Object.create( Curve.prototype );
TorusKnotCurve.prototype.constructor = TorusKnotCurve;
TorusKnotCurve.prototype.getPoint = function ( t, optionalTarget ) {
var point = optionalTarget || new Vector3();
t *= Math.PI * 2;
var r = 0.5;
var x = ( 1 + r * Math.cos( q * t ) ) * Math.cos( p * t );
var y = ( 1 + r * Math.cos( q * t ) ) * Math.sin( p * t );
var z = r * Math.sin( q * t );
return point.set( x, y, z ).multiplyScalar( radius );
};
var segments = segmentsT;
var radiusSegments = segmentsR;
var extrudePath = new TorusKnotCurve();
ParametricGeometries.TubeGeometry.call( this, extrudePath, segments, tube, radiusSegments, true, false );
};
ParametricGeometries.TorusKnotGeometry.prototype = Object.create( Geometry.prototype );
ParametricGeometries.TorusKnotGeometry.prototype.constructor = ParametricGeometries.TorusKnotGeometry;
/*********************************************
*
* Parametric Replacement for SphereGeometry
*
*********************************************/
ParametricGeometries.SphereGeometry = function ( size, u, v ) {
function sphere( u, v, target ) {
u *= Math.PI;
v *= 2 * Math.PI;
var x = size * Math.sin( u ) * Math.cos( v );
var y = size * Math.sin( u ) * Math.sin( v );
var z = size * Math.cos( u );
target.set( x, y, z );
}
ParametricGeometry.call( this, sphere, u, v );
};
ParametricGeometries.SphereGeometry.prototype = Object.create( Geometry.prototype );
ParametricGeometries.SphereGeometry.prototype.constructor = ParametricGeometries.SphereGeometry;
/*********************************************
*
* Parametric Replacement for PlaneGeometry
*
*********************************************/
ParametricGeometries.PlaneGeometry = function ( width, depth, segmentsWidth, segmentsDepth ) {
function plane( u, v, target ) {
var x = u * width;
var y = 0;
var z = v * depth;
target.set( x, y, z );
}
ParametricGeometry.call( this, plane, segmentsWidth, segmentsDepth );
};
ParametricGeometries.PlaneGeometry.prototype = Object.create( Geometry.prototype );
ParametricGeometries.PlaneGeometry.prototype.constructor = ParametricGeometries.PlaneGeometry;
export { ParametricGeometries }
| TorusKnotCurve |
data_type.go | package common_data_type
type Page struct {
Page float64 `form:"page" json:"page" binding:"min=1"` // 必填,页面值>=1
Limit float64 `form:"limit" json:"limit" binding:"min=1"` // 必填,每页条数值>=1 | } |
|
readlogin.pb.go | // Code generated by protoc-gen-go.
// source: github.com/hailocab/h2/proto/login/proto/readlogin/readlogin.proto
// DO NOT EDIT!
/*
Package com_hailocab_service_login_readlogin is a generated protocol buffer package.
It is generated from these files:
github.com/hailocab/h2/proto/login/proto/readlogin/readlogin.proto
It has these top-level messages:
Request
Response
Login
*/
package com_hailocab_service_login_readlogin
import proto "github.com/hailocab/protobuf/proto"
import json "encoding/json"
import math "math"
import com_hailocab_service_login "github.com/hailocab/h2/proto/login/proto"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = &json.SyntaxError{}
var _ = math.Inf
type Request struct {
Application *string `protobuf:"bytes,1,req,name=application" json:"application,omitempty"`
Uid *string `protobuf:"bytes,2,req,name=uid" json:"uid,omitempty"`
RangeStart *int64 `protobuf:"varint,3,opt,name=rangeStart" json:"rangeStart,omitempty"`
RangeEnd *int64 `protobuf:"varint,4,opt,name=rangeEnd" json:"rangeEnd,omitempty"`
LastId *string `protobuf:"bytes,5,opt,name=lastId" json:"lastId,omitempty"`
Count *int32 `protobuf:"varint,6,opt,name=count,def=10" json:"count,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (m *Request) Reset() { *m = Request{} }
func (m *Request) String() string { return proto.CompactTextString(m) }
func (*Request) ProtoMessage() {}
const Default_Request_Count int32 = 10
func (m *Request) GetApplication() string {
if m != nil && m.Application != nil {
return *m.Application
}
return ""
}
func (m *Request) GetUid() string {
if m != nil && m.Uid != nil {
return *m.Uid
}
return ""
}
func (m *Request) GetRangeStart() int64 {
if m != nil && m.RangeStart != nil {
return *m.RangeStart
}
return 0
}
func (m *Request) GetRangeEnd() int64 {
if m != nil && m.RangeEnd != nil {
return *m.RangeEnd
}
return 0
}
func (m *Request) GetLastId() string {
if m != nil && m.LastId != nil {
return *m.LastId
}
return ""
}
func (m *Request) GetCount() int32 {
if m != nil && m.Count != nil {
return *m.Count
}
return Default_Request_Count
}
type Response struct {
Login []*Login `protobuf:"bytes,1,rep,name=login" json:"login,omitempty"`
LastId *string `protobuf:"bytes,2,opt,name=lastId" json:"lastId,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (m *Response) Reset() { *m = Response{} }
func (m *Response) String() string { return proto.CompactTextString(m) }
func (*Response) ProtoMessage() {}
func (m *Response) GetLogin() []*Login {
if m != nil {
return m.Login
}
return nil
}
func (m *Response) GetLastId() string {
if m != nil && m.LastId != nil {
return *m.LastId
}
return ""
}
type Login struct {
Application *string `protobuf:"bytes,1,opt,name=application" json:"application,omitempty"`
Uid *string `protobuf:"bytes,2,opt,name=uid" json:"uid,omitempty"`
LoggedInTimestamp *int64 `protobuf:"varint,3,opt,name=loggedInTimestamp" json:"loggedInTimestamp,omitempty"`
Mech *string `protobuf:"bytes,4,opt,name=mech" json:"mech,omitempty"`
DeviceType *string `protobuf:"bytes,5,opt,name=deviceType" json:"deviceType,omitempty"`
Meta []*com_hailocab_service_login.KeyValue `protobuf:"bytes,6,rep,name=meta" json:"meta,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (m *Login) Reset() { *m = Login{} }
func (m *Login) String() string { return proto.CompactTextString(m) }
func (*Login) ProtoMessage() {}
func (m *Login) GetApplication() string {
if m != nil && m.Application != nil {
return *m.Application
}
return ""
}
func (m *Login) GetUid() string {
if m != nil && m.Uid != nil {
return *m.Uid
}
return ""
}
func (m *Login) GetLoggedInTimestamp() int64 {
if m != nil && m.LoggedInTimestamp != nil {
return *m.LoggedInTimestamp
}
return 0
}
func (m *Login) GetMech() string {
if m != nil && m.Mech != nil {
return *m.Mech
}
return ""
}
func (m *Login) GetDeviceType() string {
if m != nil && m.DeviceType != nil |
return ""
}
func (m *Login) GetMeta() []*com_hailocab_service_login.KeyValue {
if m != nil {
return m.Meta
}
return nil
}
func init() {
}
| {
return *m.DeviceType
} |
ssh_conn.go | /*
Copyright 2020 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package kic
import (
"fmt"
"os"
"os/exec"
"runtime"
"github.com/phayes/freeport"
v1 "k8s.io/api/core/v1"
"k8s.io/minikube/pkg/minikube/out"
"k8s.io/minikube/pkg/minikube/style"
)
type sshConn struct {
name string
service string
cmd *exec.Cmd
ports []int
activeConn bool
suppressStdOut bool
}
func createSSHConn(name, sshPort, sshKey string, resourcePorts []int32, resourceIP string, resourceName string) *sshConn {
// extract sshArgs
sshArgs := []string{
// TODO: document the options here
"-o", "UserKnownHostsFile=/dev/null",
"-o", "StrictHostKeyChecking=no",
"-N",
"[email protected]",
"-p", sshPort,
"-i", sshKey,
}
askForSudo := false
var privilegedPorts []int32
for _, port := range resourcePorts {
arg := fmt.Sprintf(
"-L %d:%s:%d",
port,
resourceIP,
port,
)
// check if any port is privileged
if port < 1024 {
privilegedPorts = append(privilegedPorts, port)
askForSudo = true
}
sshArgs = append(sshArgs, arg)
}
command := "ssh"
if askForSudo && runtime.GOOS != "windows" {
out.Styled(
style.Warning,
"The service/ingress {{.resource}} requires privileged ports to be exposed: {{.ports}}",
out.V{"resource": resourceName, "ports": fmt.Sprintf("%v", privilegedPorts)},
)
out.Styled(style.Permissions, "sudo permission will be asked for it.")
command = "sudo"
sshArgs = append([]string{"ssh"}, sshArgs...)
}
if askForSudo && runtime.GOOS == "windows" {
out.WarningT("Access to ports below 1024 may fail on Windows with OpenSSH clients older than v8.1. For more information, see: https://minikube.sigs.k8s.io/docs/handbook/accessing/#access-to-ports-1024-on-windows-requires-root-permission")
}
cmd := exec.Command(command, sshArgs...)
return &sshConn{
name: name,
service: resourceName,
cmd: cmd,
activeConn: false,
}
}
func createSSHConnWithRandomPorts(name, sshPort, sshKey string, svc *v1.Service) (*sshConn, error) {
// extract sshArgs
sshArgs := []string{
// TODO: document the options here
"-o", "UserKnownHostsFile=/dev/null",
"-o", "StrictHostKeyChecking=no",
"-N",
"[email protected]",
"-p", sshPort,
"-i", sshKey,
}
usedPorts := make([]int, 0, len(svc.Spec.Ports))
for _, port := range svc.Spec.Ports {
freeport, err := freeport.GetFreePort()
if err != nil {
return nil, err
}
arg := fmt.Sprintf(
"-L %d:%s:%d",
freeport,
svc.Spec.ClusterIP,
port.Port,
)
sshArgs = append(sshArgs, arg)
usedPorts = append(usedPorts, freeport)
}
cmd := exec.Command("ssh", sshArgs...)
return &sshConn{
name: name,
service: svc.Name,
cmd: cmd,
ports: usedPorts,
activeConn: false,
}, nil
}
func (c *sshConn) startAndWait() error {
if !c.suppressStdOut {
out.Step(style.Running, "Starting tunnel for service {{.service}}.", out.V{"service": c.service})
}
err := c.cmd.Start()
if err != nil |
c.activeConn = true
// we ignore wait error because the process will be killed
_ = c.cmd.Wait()
// Wait is finished for connection, mark false.
c.activeConn = false
return nil
}
func (c *sshConn) stop() error {
if c.activeConn {
c.activeConn = false
if !c.suppressStdOut {
out.Step(style.Stopping, "Stopping tunnel for service {{.service}}.", out.V{"service": c.service})
}
err := c.cmd.Process.Kill()
if err == os.ErrProcessDone {
// No need to return an error here
return nil
}
return err
}
if !c.suppressStdOut {
out.Step(style.Stopping, "Stopped tunnel for service {{.service}}.", out.V{"service": c.service})
}
return nil
}
| {
return err
} |
main.go | /*
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package main
import (
"flag"
"os"
"k8s.io/apimachinery/pkg/runtime"
clientgoscheme "k8s.io/client-go/kubernetes/scheme"
_ "k8s.io/client-go/plugin/pkg/client/auth/gcp"
ctrl "sigs.k8s.io/controller-runtime"
"sigs.k8s.io/controller-runtime/pkg/log/zap"
alibabacloudv1alpha1 "github.com/zzxwill/aliyun-ecs-k8s-operator/api/v1alpha1"
"github.com/zzxwill/aliyun-ecs-k8s-operator/controllers"
// +kubebuilder:scaffold:imports
)
var (
scheme = runtime.NewScheme()
setupLog = ctrl.Log.WithName("setup")
)
func | () {
_ = clientgoscheme.AddToScheme(scheme)
_ = alibabacloudv1alpha1.AddToScheme(scheme)
// +kubebuilder:scaffold:scheme
}
func main() {
var metricsAddr string
var enableLeaderElection bool
flag.StringVar(&metricsAddr, "metrics-addr", ":8080", "The address the metric endpoint binds to.")
flag.BoolVar(&enableLeaderElection, "enable-leader-election", false,
"Enable leader election for controller manager. "+
"Enabling this will ensure there is only one active controller manager.")
flag.Parse()
ctrl.SetLogger(zap.New(zap.UseDevMode(true)))
mgr, err := ctrl.NewManager(ctrl.GetConfigOrDie(), ctrl.Options{
Scheme: scheme,
MetricsBindAddress: metricsAddr,
Port: 9443,
LeaderElection: enableLeaderElection,
LeaderElectionID: "8b246119.zhouzhengxi.com",
})
if err != nil {
setupLog.Error(err, "unable to start manager")
os.Exit(1)
}
if err = (&controllers.ECSInstanceReconciler{
Client: mgr.GetClient(),
Log: ctrl.Log.WithName("controllers").WithName("ECSInstance"),
Scheme: mgr.GetScheme(),
}).SetupWithManager(mgr); err != nil {
setupLog.Error(err, "unable to create controller", "controller", "ECSInstance")
os.Exit(1)
}
// +kubebuilder:scaffold:builder
setupLog.Info("starting manager")
if err := mgr.Start(ctrl.SetupSignalHandler()); err != nil {
setupLog.Error(err, "problem running manager")
os.Exit(1)
}
}
| init |
writer.go | // Copyright 2016, Google
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package b2
import (
"errors"
"fmt"
"io"
"sync"
"sync/atomic"
"time"
"github.com/kurin/blazer/internal/blog"
"golang.org/x/net/context"
)
// Writer writes data into Backblaze. It automatically switches to the large
// file API if the file exceeds ChunkSize bytes. Due to that and other
// Backblaze API details, there is a large buffer.
//
// Changes to public Writer attributes must be made before the first call to
// Write.
type Writer struct {
// ConcurrentUploads is number of different threads sending data concurrently
// to Backblaze for large files. This can increase performance greatly, as
// each thread will hit a different endpoint. However, there is a ChunkSize
// buffer for each thread. Values less than 1 are equivalent to 1.
ConcurrentUploads int
// Resume an upload. If true, and the upload is a large file, and a file of
// the same name was started but not finished, then assume that we are
// resuming that file, and don't upload duplicate chunks.
Resume bool
// ChunkSize is the size, in bytes, of each individual part, when writing
// large files, and also when determining whether to upload a file normally
// or when to split it into parts. The default is 100M (1e8) (which is also
// the minimum). Values less than 100M are not an error, but will fail. The
// maximum is 5GB (5e9).
ChunkSize int
// UseFileBuffer controls whether to use an in-memory buffer (the default) or
// scratch space on the file system. If this is true, b2 will save chunks in
// FileBufferDir.
UseFileBuffer bool
// FileBufferDir specifies the directory where scratch files are kept. If
// blank, os.TempDir() is used.
FileBufferDir string
contentType string
info map[string]string
csize int
ctx context.Context
cancel context.CancelFunc
ready chan chunk
wg sync.WaitGroup
start sync.Once
once sync.Once
done sync.Once
file beLargeFileInterface
seen map[int]string
o *Object
name string
cidx int
w writeBuffer
emux sync.RWMutex
err error
smux sync.RWMutex
smap map[int]*meteredReader
}
type chunk struct {
id int
buf writeBuffer
}
func (w *Writer) getBuffer() (writeBuffer, error) {
if !w.UseFileBuffer {
return newMemoryBuffer(), nil
}
return newFileBuffer(w.FileBufferDir)
}
func (w *Writer) setErr(err error) {
if err == nil {
return
}
w.emux.Lock()
defer w.emux.Unlock()
if w.err == nil {
blog.V(0).Infof("error writing %s: %v", w.name, err)
w.err = err
w.cancel()
}
}
func (w *Writer) getErr() error {
w.emux.RLock()
defer w.emux.RUnlock()
return w.err
}
func (w *Writer) registerChunk(id int, r *meteredReader) {
w.smux.Lock()
w.smap[id] = r
w.smux.Unlock()
}
func (w *Writer) completeChunk(id int) {
w.smux.Lock()
w.smap[id] = nil
w.smux.Unlock()
}
var gid int32
func (w *Writer) thread() {
go func() {
id := atomic.AddInt32(&gid, 1)
fc, err := w.file.getUploadPartURL(w.ctx)
if err != nil {
w.setErr(err)
return
}
w.wg.Add(1)
defer w.wg.Done()
for {
chunk, ok := <-w.ready
if !ok {
return
}
if sha, ok := w.seen[chunk.id]; ok {
if sha != chunk.buf.Hash() {
w.setErr(errors.New("resumable upload was requested, but chunks don't match!"))
return
}
chunk.buf.Close()
w.completeChunk(chunk.id)
blog.V(2).Infof("skipping chunk %d", chunk.id)
continue
}
blog.V(2).Infof("thread %d handling chunk %d", id, chunk.id)
r, err := chunk.buf.Reader()
if err != nil {
w.setErr(err)
return
}
mr := &meteredReader{r: r, size: chunk.buf.Len()}
w.registerChunk(chunk.id, mr)
sleep := time.Millisecond * 15
redo:
n, err := fc.uploadPart(w.ctx, mr, chunk.buf.Hash(), chunk.buf.Len(), chunk.id)
if n != chunk.buf.Len() || err != nil {
if w.o.b.r.reupload(err) {
time.Sleep(sleep)
sleep *= 2
if sleep > time.Second*15 {
sleep = time.Second * 15
}
blog.V(1).Infof("b2 writer: wrote %d of %d: error: %v; retrying", n, chunk.buf.Len(), err)
f, err := w.file.getUploadPartURL(w.ctx)
if err != nil {
w.setErr(err)
w.completeChunk(chunk.id)
chunk.buf.Close() // TODO: log error
return
}
fc = f
goto redo
}
w.setErr(err)
w.completeChunk(chunk.id)
chunk.buf.Close() // TODO: log error
return
}
w.completeChunk(chunk.id)
chunk.buf.Close() // TODO: log error
blog.V(2).Infof("chunk %d handled", chunk.id)
}
}()
}
// Write satisfies the io.Writer interface.
func (w *Writer) Write(p []byte) (int, error) {
w.start.Do(func() {
w.smux.Lock()
w.smap = make(map[int]*meteredReader)
w.smux.Unlock()
w.o.b.c.addWriter(w)
w.csize = w.ChunkSize
if w.csize == 0 {
w.csize = 1e8
}
v, err := w.getBuffer()
if err != nil {
w.setErr(err)
return
}
w.w = v
})
if err := w.getErr(); err != nil {
return 0, err
}
left := w.csize - w.w.Len()
if len(p) < left {
return w.w.Write(p)
}
i, err := w.w.Write(p[:left])
if err != nil {
w.setErr(err)
return i, err
}
if err := w.sendChunk(); err != nil {
w.setErr(err)
return i, w.getErr()
}
k, err := w.Write(p[left:])
if err != nil {
w.setErr(err)
}
return i + k, err
}
func (w *Writer) simpleWriteFile() error {
ue, err := w.o.b.b.getUploadURL(w.ctx)
if err != nil {
return err
}
sha1 := w.w.Hash()
ctype := w.contentType
if ctype == "" {
ctype = "application/octet-stream"
}
r, err := w.w.Reader()
if err != nil {
return err
}
mr := &meteredReader{r: r, size: w.w.Len()}
w.registerChunk(1, mr)
defer w.completeChunk(1)
redo:
f, err := ue.uploadFile(w.ctx, mr, int(w.w.Len()), w.name, ctype, sha1, w.info)
if err != nil {
if w.o.b.r.reupload(err) {
blog.V(1).Infof("b2 writer: %v; retrying", err)
u, err := w.o.b.b.getUploadURL(w.ctx)
if err != nil {
return err
}
ue = u
goto redo
}
return err
}
w.o.f = f
return nil
}
func (w *Writer) getLargeFile() (beLargeFileInterface, error) {
if !w.Resume {
ctype := w.contentType
if ctype == "" {
ctype = "application/octet-stream"
}
return w.o.b.b.startLargeFile(w.ctx, w.name, ctype, w.info)
}
next := 1
seen := make(map[int]string)
var size int64
var fi beFileInterface
for {
cur := &Cursor{name: w.name}
objs, _, err := w.o.b.ListObjects(w.ctx, 1, cur)
if err != nil {
return nil, err
}
if len(objs) < 1 || objs[0].name != w.name {
w.Resume = false
return w.getLargeFile()
}
fi = objs[0].f
parts, n, err := fi.listParts(w.ctx, next, 100)
if err != nil {
return nil, err
}
next = n
for _, p := range parts {
seen[p.number()] = p.sha1()
size += p.size()
}
if len(parts) == 0 {
break
}
if next == 0 {
break
}
}
w.seen = make(map[int]string) // copy the map
for id, sha := range seen {
w.seen[id] = sha
}
return fi.compileParts(size, seen), nil
}
func (w *Writer) sendChunk() error {
var err error
w.once.Do(func() {
lf, e := w.getLargeFile()
if e != nil {
err = e
return
}
w.file = lf
w.ready = make(chan chunk)
if w.ConcurrentUploads < 1 {
w.ConcurrentUploads = 1
}
for i := 0; i < w.ConcurrentUploads; i++ {
w.thread()
}
})
if err != nil {
return err
}
select {
case w.ready <- chunk{
id: w.cidx + 1,
buf: w.w,
}:
case <-w.ctx.Done():
return w.ctx.Err()
}
w.cidx++
v, err := w.getBuffer()
if err != nil {
return err
}
w.w = v
return nil
}
// Close satisfies the io.Closer interface. It is critical to check the return
// value of Close on all writers.
func (w *Writer) Close() error {
w.done.Do(func() {
defer w.o.b.c.removeWriter(w)
if w.cidx == 0 {
w.setErr(w.simpleWriteFile())
return
}
if w.w.Len() > 0 {
if err := w.sendChunk(); err != nil {
w.setErr(err)
return
}
}
close(w.ready)
w.wg.Wait()
f, err := w.file.finishLargeFile(w.ctx)
if err != nil {
w.setErr(err)
return
}
w.w.Close() // TODO: log error
w.o.f = f
})
return w.getErr()
}
// WithAttrs sets the writable attributes of the resulting file to given
// values. WithAttrs must be called before the first call to Write.
func (w *Writer) WithAttrs(attrs *Attrs) *Writer {
w.contentType = attrs.ContentType
w.info = make(map[string]string)
for k, v := range attrs.Info {
w.info[k] = v
}
if len(w.info) < 10 && !attrs.LastModified.IsZero() {
w.info["src_last_modified_millis"] = fmt.Sprintf("%d", attrs.LastModified.UnixNano()/1e6)
}
return w
}
func (w *Writer) status() *WriterStatus {
w.smux.RLock()
defer w.smux.RUnlock()
ws := &WriterStatus{
Progress: make([]float64, len(w.smap)),
}
for i := 1; i <= len(w.smap); i++ {
ws.Progress[i-1] = w.smap[i].done()
}
return ws |
type meteredReader struct {
read int64
size int
r io.ReadSeeker
}
func (mr *meteredReader) Read(p []byte) (int, error) {
n, err := mr.r.Read(p)
atomic.AddInt64(&mr.read, int64(n))
return n, err
}
func (mr *meteredReader) Seek(offset int64, whence int) (int64, error) {
atomic.StoreInt64(&mr.read, offset)
return mr.r.Seek(offset, whence)
}
func (mr *meteredReader) done() float64 {
if mr == nil {
return 1
}
read := float64(atomic.LoadInt64(&mr.read))
return read / float64(mr.size)
} | } |
generateMarkdown.js | // Function that renders the license badge of selected inquirer prompt
const renderLicenseBadge = (license) => {
// If there is no license, return an empty string
if (license == "None") {
return "";
// Else return license badge that was selected during the inquirer prompt
} else if (license == "MIT") {
return ``
} else if (license == "Apache 2.0") {
return ``
} else if (license == "GPL 3.0") {
return ``
} else if (license == "BSD 3") {
return ``
}
}
// Function that renders clickable license link
const renderLicenseLink = (license) => {
// If there is no license, return an empty string
if (license == "None") {
return "";
// Else return the license link that was selected in inquirer prompt
} else if (license == "MIT") {
return `[${license}](https://opensource.org/licenses/MIT)`
} else if (license == "Apache 2.0") {
return `[${license}](https://opensource.org/licenses/Apache-2.0)`
} else if (license == "GPL 3.0") {
return `[${license}](https://www.gnu.org/licenses/gpl-3.0)`
} else if (license == "BSD 3") {
return `[${license}](https://opensource.org/licenses/BSD-3-Clause)`
}
}
// Function that will render a license seciton to the readme
function renderLicenseSection(license) {
// If there is no license, return an empty string
if (license == "None") {
return ""
// Else return license section
} else {
return `
## License
This project is licensed under the ${renderLicenseLink(license)} license. Follow the link for further information regarding this license.
---
`
}
}
// Function that creates the markdown format and inputs to be used in index.js
const generateMarkdown = ({ github, email, projectName, description, license, install, test, use, contribution}) => {
return `
# ${projectName}
${renderLicenseBadge(license)}
---
## Description
${description} |
---
## Table of Contents
* [Installation](#installation)
* [Usage](#usage)
* [License](#license)
* [Contribution](#contributions)
* [Tests](#tests)
* [Questions](#questions)
---
## Installation
To install the necessary dependancies run the following command:
\`\`\`\
${install}
\`\`\`\
---
## Usage
${use}
---
${renderLicenseSection(license)}
## Contributions
${contribution}
---
## Tests
To run tests, run the following command:
\`\`\`\
${test}
\`\`\`\
---
## Questions
For any questions, please contact us via email at ${email}, or GitHub at ${github}.
---
`;
}
// Exports file
module.exports = generateMarkdown; | |
troubleshoot_request_builder.go | package troubleshoot
import (
i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go"
)
// TroubleshootRequestBuilder provides operations to call the troubleshoot method.
type TroubleshootRequestBuilder struct {
// Path parameters for the request
pathParameters map[string]string
// The request adapter to use to execute the requests.
requestAdapter i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestAdapter
// Url template to use to build the URL for the current request builder
urlTemplate string
}
// TroubleshootRequestBuilderPostRequestConfiguration configuration for the request such as headers, query parameters, and middleware options.
type TroubleshootRequestBuilderPostRequestConfiguration struct {
// Request headers
Headers map[string]string
// Request options
Options []i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestOption
}
// NewTroubleshootRequestBuilderInternal instantiates a new TroubleshootRequestBuilder and sets the default values.
func NewTroubleshootRequestBuilderInternal(pathParameters map[string]string, requestAdapter i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestAdapter)(*TroubleshootRequestBuilder) {
m := &TroubleshootRequestBuilder{
}
m.urlTemplate = "{+baseurl}/deviceManagement/virtualEndpoint/cloudPCs/{cloudPC%2Did}/microsoft.graph.troubleshoot";
urlTplParams := make(map[string]string)
for idx, item := range pathParameters {
urlTplParams[idx] = item
}
m.pathParameters = urlTplParams;
m.requestAdapter = requestAdapter;
return m
}
// NewTroubleshootRequestBuilder instantiates a new TroubleshootRequestBuilder and sets the default values.
func NewTroubleshootRequestBuilder(rawUrl string, requestAdapter i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestAdapter)(*TroubleshootRequestBuilder) {
urlParams := make(map[string]string)
urlParams["request-raw-url"] = rawUrl
return NewTroubleshootRequestBuilderInternal(urlParams, requestAdapter)
}
// CreatePostRequestInformation invoke action troubleshoot
func (m *TroubleshootRequestBuilder) CreatePostRequestInformation()(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {
return m.CreatePostRequestInformationWithRequestConfiguration(nil);
}
// CreatePostRequestInformationWithRequestConfiguration invoke action troubleshoot
func (m *TroubleshootRequestBuilder) CreatePostRequestInformationWithRequestConfiguration(requestConfiguration *TroubleshootRequestBuilderPostRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) { | requestInfo.PathParameters = m.pathParameters
requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.POST
if requestConfiguration != nil {
requestInfo.AddRequestHeaders(requestConfiguration.Headers)
requestInfo.AddRequestOptions(requestConfiguration.Options)
}
return requestInfo, nil
}
// Post invoke action troubleshoot
func (m *TroubleshootRequestBuilder) Post()(error) {
return m.PostWithRequestConfigurationAndResponseHandler(nil, nil);
}
// PostWithRequestConfigurationAndResponseHandler invoke action troubleshoot
func (m *TroubleshootRequestBuilder) PostWithRequestConfigurationAndResponseHandler(requestConfiguration *TroubleshootRequestBuilderPostRequestConfiguration, responseHandler i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ResponseHandler)(error) {
requestInfo, err := m.CreatePostRequestInformationWithRequestConfiguration(requestConfiguration);
if err != nil {
return err
}
err = m.requestAdapter.SendNoContentAsync(requestInfo, responseHandler, nil)
if err != nil {
return err
}
return nil
} | requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()
requestInfo.UrlTemplate = m.urlTemplate |
term_bytes.rs | // This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
use std::option::Option;
use std::boxed::Box;
use std::io::Result;
use std::io::Cursor;
use std::vec::Vec;
use std::default::Default;
use kaitai_struct::KaitaiStream;
use kaitai_struct::KaitaiStruct;
#[derive(Default)]
pub struct TermBytes {
pub s1: Vec<u8>,
pub s2: Vec<u8>,
pub s3: Vec<u8>,
}
impl KaitaiStruct for TermBytes {
fn new<S: KaitaiStream>(stream: &mut S,
_parent: &Option<Box<KaitaiStruct>>,
_root: &Option<Box<KaitaiStruct>>)
-> Result<Self>
where Self: Sized {
let mut s: Self = Default::default();
s.stream = stream;
s.read(stream, _parent, _root)?; |
fn read<S: KaitaiStream>(&mut self,
stream: &mut S,
_parent: &Option<Box<KaitaiStruct>>,
_root: &Option<Box<KaitaiStruct>>)
-> Result<()>
where Self: Sized {
self.s1 = self.stream.read_bytes_term(124, false, true, true)?;
self.s2 = self.stream.read_bytes_term(124, false, false, true)?;
self.s3 = self.stream.read_bytes_term(64, true, true, true)?;
}
}
impl TermBytes {
} |
Ok(s)
} |
complicated_code.py | def main(important_parameter, ignored_parameter):
|
if __name__ == "__main__":
#need like
#300+ lines of code
#to give you the Answer to everything
#i think that's fair
f = main(1, 2)
while hasattr(f, '__call__'):
f = f()
| """
:return: The answer to everything
"""
important_field = important_parameter # this way the parameter was actually used, hence making it important.
def realmain():
def actualrealrealmain():
def nownotevenkiddingtherealfunction():
print "The answer to everything"
# a
# comment
return nownotevenkiddingtherealfunction
#every
#now
return actualrealrealmain
#and
#then
return realmain |
__init__.py | from discord.ext import commands
from .root import attach_root
from .base import attach_base
from .system import attach_system
from .region import attach_region
from .friend import attach_friend
from .enemy import attach_enemy
from .unrecognized import attach_unrecognized
from .forum import attach_forum
def | (bot, storage, chanell_controller) -> commands.Bot:
bot = attach_root(bot, storage, chanell_controller)
bot = attach_base(bot, storage)
bot = attach_system(bot, storage)
bot = attach_region(bot, storage)
bot = attach_friend(bot, storage)
bot = attach_enemy(bot, storage)
bot = attach_unrecognized(bot, storage)
bot = attach_forum(bot, storage)
return bot
| attach_commands |
a1_controller_delete_policy_instance.go | /*
==================================================================================
Copyright (c) 2021 Samsung
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
This source code is part of the near-RT RIC (RAN Intelligent Controller)
platform project (RICP).
==================================================================================
*/
// Code generated by go-swagger; DO NOT EDIT.
package a1_mediator
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the generate command
import (
"net/http"
"github.com/go-openapi/runtime/middleware"
)
// A1ControllerDeletePolicyInstanceHandlerFunc turns a function with the right signature into a a1 controller delete policy instance handler
type A1ControllerDeletePolicyInstanceHandlerFunc func(A1ControllerDeletePolicyInstanceParams) middleware.Responder
// Handle executing the request and returning a response
func (fn A1ControllerDeletePolicyInstanceHandlerFunc) Handle(params A1ControllerDeletePolicyInstanceParams) middleware.Responder {
return fn(params)
}
// A1ControllerDeletePolicyInstanceHandler interface for that can handle valid a1 controller delete policy instance params
type A1ControllerDeletePolicyInstanceHandler interface {
Handle(A1ControllerDeletePolicyInstanceParams) middleware.Responder
}
// NewA1ControllerDeletePolicyInstance creates a new http.Handler for the a1 controller delete policy instance operation
func | (ctx *middleware.Context, handler A1ControllerDeletePolicyInstanceHandler) *A1ControllerDeletePolicyInstance {
return &A1ControllerDeletePolicyInstance{Context: ctx, Handler: handler}
}
/*A1ControllerDeletePolicyInstance swagger:route DELETE /a1-p/policytypes/{policy_type_id}/policies/{policy_instance_id} A1 Mediator a1ControllerDeletePolicyInstance
Delete this policy instance
*/
type A1ControllerDeletePolicyInstance struct {
Context *middleware.Context
Handler A1ControllerDeletePolicyInstanceHandler
}
func (o *A1ControllerDeletePolicyInstance) ServeHTTP(rw http.ResponseWriter, r *http.Request) {
route, rCtx, _ := o.Context.RouteInfo(r)
if rCtx != nil {
r = rCtx
}
var Params = NewA1ControllerDeletePolicyInstanceParams()
if err := o.Context.BindValidRequest(r, route, &Params); err != nil { // bind params
o.Context.Respond(rw, r, route.Produces, route, err)
return
}
res := o.Handler.Handle(Params) // actually handle the request
o.Context.Respond(rw, r, route.Produces, route, res)
}
| NewA1ControllerDeletePolicyInstance |
test_project.py | #!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. | # http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from polyaxon_sdk import V1Project
from tests.utils import BaseTestCase
from polyaxon.managers.project import ProjectManager
@pytest.mark.managers_mark
class TestProjectManager(BaseTestCase):
def test_default_props(self):
assert ProjectManager.is_all_visibility() is True
assert ProjectManager.IS_POLYAXON_DIR is True
assert ProjectManager.CONFIG_FILE_NAME == ".project"
assert ProjectManager.CONFIG == V1Project | # You may obtain a copy of the License at
# |
example.rs | fn is_prime(n: u32) -> bool {
let mut i: u32 = 3;
while (i * i) < (n + 1) {
if n % i == 0 {
return false;
}
i += 1;
}
return true;
}
pub fn nth(n: u32) -> Option<u32> { | 0 => None,
1 => Some(2),
_ => {
let mut count: u32 = 1;
let mut candidate: u32 = 1;
while count < n {
candidate += 2;
if is_prime(candidate) {
count += 1;
}
}
Some(candidate)
}
}
} | match n { |
enums_2.js | var searchData= | ['sfx_5fchorus_5fparam',['SFX_CHORUS_PARAM',['../namespace_l_i_d_l.html#a1ec80fcd944bc6f6ff4548c0b2c8d6e1',1,'LIDL']]],
['sfx_5fcompressor_5fparam',['SFX_COMPRESSOR_PARAM',['../namespace_l_i_d_l.html#a4f2a33951eec8b581a3e9b71193848e1',1,'LIDL']]],
['sfx_5fdist_5fparam',['SFX_DIST_PARAM',['../namespace_l_i_d_l.html#af922c0ec8e5e44002b6493d032d727cf',1,'LIDL']]],
['sfx_5fecho_5fparam',['SFX_ECHO_PARAM',['../namespace_l_i_d_l.html#a83f4086bbb41ac05b4563a051359eab7',1,'LIDL']]],
['sfx_5fflanger_5fparam',['SFX_FLANGER_PARAM',['../namespace_l_i_d_l.html#a70d8717e6578321f7d70f4951e3fe3cf',1,'LIDL']]],
['sfx_5fgargle_5fparam',['SFX_GARGLE_PARAM',['../namespace_l_i_d_l.html#a61fb848facad768550bbda2d03cd26de',1,'LIDL']]],
['sfx_5ftype',['SFX_TYPE',['../namespace_l_i_d_l.html#aa96fd8d8599ffe548724f04d8ebc418f',1,'LIDL']]],
['show_5fsettings',['SHOW_SETTINGS',['../namespace_l_i_d_l.html#a20a84de5fc493c7ae2fd83dc6d10f397',1,'LIDL']]]
]; | [ |
pending-order.js | var EventEmitter = require('events').EventEmitter;
var util = require('util'); | var env = require('./env');
// API
module.exports.createPendingOrderStore = function (options, logger) {
return new PendingOrderStore(options, logger);
};
var DEFAULT_STORE_ENGINE = ['redis', 'leveldb'];
var PendingOrderStore = function (options, logger) {
if (!options || !options.type ) {
throw new Error("Missing Pending order");
}
var self = this;
self.logger = logger;
self.client = null;
self.ttl = options.ttl || 3600;
self.status = {
status: 'initing'
};
self.loadKvPlugin(options, logger);
EventEmitter.call(this);
self.on('store-fail', function (msg) {
self.status.status = 'fail';
self.status.msg = msg;
});
self.on('store-ready', function (msg) {
self.status.status = 'ok';
if (self.status.msg) {
delete self.status.msg;
}
});
FuncUnits.register('PendingOrderStore', this);
};
util.inherits(PendingOrderStore, EventEmitter);
/**
* pending order operation callback
* @callback PendingOrderStore~opCallback
* @param {verror~WError} err - operation error
* @param {Object} res - null or the object fetched from storage
*/
PendingOrderStore.prototype.close = function (callback) {
this.client.close(callback);
}
/**
* store pending order
* @name
* @function
* @param {string} orderId - key
* @param {Object} orderInfo - the object to store
* @param {PendingOrderStore~opCallback} callback
*/
PendingOrderStore.prototype.addPendingOrder =
function (orderId, orderInfo, callback) {
var self = this;
if (!self.client) {
throw new WError("The pending order store is not inited");
}
self.logger.trace(
{orderId: orderId, order: orderInfo}, 'add order');
var realCallback = null;
if (callback) {
realCallback = function (err) {
if (err) {
return callback(new WError(err, "store pending order failed"));
}
callback(null, null);
};
}
self.client.set(orderId, JSON.stringify(orderInfo), self.ttl,
realCallback);
};
/**
* fetch pending order
* @name
* @function
* @param {string} orderId - key
* @param {PendingOrderStore~opCallback} callback
*/
PendingOrderStore.prototype.getPendingOrder =
function(orderId, callback) {
var self = this;
if (!self.client) {
throw new WError("The pending order store is not inited");
}
self.client.get(orderId,
function (err, res) {
self.logger.trace(
{orderId: orderId, order: res, err: err}, 'get order');
if (err) {
self.logger.debug({err: err}, "get pending order failed");
return callback(new WError(err, "get pending order failed"));
}
if (!res) {
callback({code: 0});
} else {
callback(null, JSON.parse(res));
}
});
};
/**
* delete pending order
* @name
* @function
* @param {string} orderId - key
* @param {PendingOrderStore~opCallback} callback
*/
PendingOrderStore.prototype.deletePendingOrder =
function(orderId, callback) {
var self = this;
if (!self.client) {
throw new WError("The pending order store is not inited");
}
var realCallback = null;
if (callback) {
realCallback = function (err) {
if (err) {
return callback(new WError(err, "delete pending order failed"));
}
callback(null, null);
};
}
self.logger.trace(
{orderId: orderId}, 'delete order');
try {
self.client.del(orderId, realCallback);
} catch (e) {
self.logger.error({err: err}, 'Fail to delete order ' + orderId);
}
};
PendingOrderStore.prototype.loadKvPlugin = function(option, logger) {
var name = option.type;
var defaultIndex = DEFAULT_STORE_ENGINE.indexOf(name);
var moduleName = name;
if (defaultIndex >= 0) {
moduleName = './kvstore/'+moduleName;
}
try {
var m = require(moduleName);
this.client = m.createClient(this, option, logger, env);
} catch (e) {
this.logger.error({err: e}, "Fail to load plugin");
throw new Error('Fail to load store engine as ' + moduleName + '\n' +
'Please use check the config file!');
}
}; | var WError = require('verror').WError;
var path = require('path');
var FuncUnits = require('./functionunits'); |
utils_test.py | import io
import resource
from pathlib import Path
import numpy as np
import PIL
import pytest
from keras_preprocessing.image import utils
def test_validate_filename(tmpdir):
valid_extensions = ('png', 'jpg')
filename = tmpdir.ensure('test.png')
assert utils.validate_filename(str(filename), valid_extensions)
filename = tmpdir.ensure('test.PnG')
assert utils.validate_filename(str(filename), valid_extensions)
filename = tmpdir.ensure('test.some_extension')
assert not utils.validate_filename(str(filename), valid_extensions)
assert not utils.validate_filename('some_test_file.png', valid_extensions)
def test_load_img(tmpdir):
filename_rgb = str(tmpdir / 'rgb_utils.png')
filename_rgba = str(tmpdir / 'rgba_utils.png')
filename_grayscale_8bit = str(tmpdir / 'grayscale_8bit_utils.png')
filename_grayscale_16bit = str(tmpdir / 'grayscale_16bit_utils.tiff')
filename_grayscale_32bit = str(tmpdir / 'grayscale_32bit_utils.tiff')
original_rgb_array = np.array(255 * np.random.rand(100, 100, 3),
dtype=np.uint8)
original_rgb = utils.array_to_img(original_rgb_array, scale=False)
original_rgb.save(filename_rgb)
original_rgba_array = np.array(255 * np.random.rand(100, 100, 4),
dtype=np.uint8)
original_rgba = utils.array_to_img(original_rgba_array, scale=False)
original_rgba.save(filename_rgba)
original_grayscale_8bit_array = np.array(255 * np.random.rand(100, 100, 1),
dtype=np.uint8)
original_grayscale_8bit = utils.array_to_img(original_grayscale_8bit_array,
scale=False)
original_grayscale_8bit.save(filename_grayscale_8bit)
original_grayscale_16bit_array = np.array(
np.random.randint(-2147483648, 2147483647, (100, 100, 1)), dtype=np.int16
)
original_grayscale_16bit = utils.array_to_img(original_grayscale_16bit_array,
scale=False, dtype='int16')
original_grayscale_16bit.save(filename_grayscale_16bit)
original_grayscale_32bit_array = np.array(
np.random.randint(-2147483648, 2147483647, (100, 100, 1)), dtype=np.int32
)
original_grayscale_32bit = utils.array_to_img(original_grayscale_32bit_array,
scale=False, dtype='int32')
original_grayscale_32bit.save(filename_grayscale_32bit)
# Test that loaded image is exactly equal to original.
loaded_im = utils.load_img(filename_rgb)
loaded_im_array = utils.img_to_array(loaded_im)
assert loaded_im_array.shape == original_rgb_array.shape
assert np.all(loaded_im_array == original_rgb_array)
loaded_im = utils.load_img(filename_rgba, color_mode='rgba')
loaded_im_array = utils.img_to_array(loaded_im)
assert loaded_im_array.shape == original_rgba_array.shape
assert np.all(loaded_im_array == original_rgba_array)
loaded_im = utils.load_img(filename_rgb, color_mode='grayscale')
loaded_im_array = utils.img_to_array(loaded_im)
assert loaded_im_array.shape == (original_rgb_array.shape[0],
original_rgb_array.shape[1], 1)
loaded_im = utils.load_img(filename_grayscale_8bit, color_mode='grayscale')
loaded_im_array = utils.img_to_array(loaded_im)
assert loaded_im_array.shape == original_grayscale_8bit_array.shape
assert np.all(loaded_im_array == original_grayscale_8bit_array)
loaded_im = utils.load_img(filename_grayscale_16bit, color_mode='grayscale')
loaded_im_array = utils.img_to_array(loaded_im, dtype='int16')
assert loaded_im_array.shape == original_grayscale_16bit_array.shape
assert np.all(loaded_im_array == original_grayscale_16bit_array)
# test casting int16 image to float32
loaded_im_array = utils.img_to_array(loaded_im)
assert np.allclose(loaded_im_array, original_grayscale_16bit_array)
loaded_im = utils.load_img(filename_grayscale_32bit, color_mode='grayscale')
loaded_im_array = utils.img_to_array(loaded_im, dtype='int32')
assert loaded_im_array.shape == original_grayscale_32bit_array.shape
assert np.all(loaded_im_array == original_grayscale_32bit_array)
# test casting int32 image to float32
loaded_im_array = utils.img_to_array(loaded_im)
assert np.allclose(loaded_im_array, original_grayscale_32bit_array)
# Test that nothing is changed when target size is equal to original.
loaded_im = utils.load_img(filename_rgb, target_size=(100, 100))
loaded_im_array = utils.img_to_array(loaded_im)
assert loaded_im_array.shape == original_rgb_array.shape
assert np.all(loaded_im_array == original_rgb_array)
loaded_im = utils.load_img(filename_rgba, color_mode='rgba',
target_size=(100, 100))
loaded_im_array = utils.img_to_array(loaded_im)
assert loaded_im_array.shape == original_rgba_array.shape
assert np.all(loaded_im_array == original_rgba_array)
loaded_im = utils.load_img(filename_rgb, color_mode='grayscale',
target_size=(100, 100))
loaded_im_array = utils.img_to_array(loaded_im)
assert loaded_im_array.shape == (original_rgba_array.shape[0],
original_rgba_array.shape[1], 1)
loaded_im = utils.load_img(filename_grayscale_8bit, color_mode='grayscale',
target_size=(100, 100))
loaded_im_array = utils.img_to_array(loaded_im)
assert loaded_im_array.shape == original_grayscale_8bit_array.shape
assert np.all(loaded_im_array == original_grayscale_8bit_array)
loaded_im = utils.load_img(filename_grayscale_16bit, color_mode='grayscale',
target_size=(100, 100))
loaded_im_array = utils.img_to_array(loaded_im, dtype='int16')
assert loaded_im_array.shape == original_grayscale_16bit_array.shape
assert np.all(loaded_im_array == original_grayscale_16bit_array)
loaded_im = utils.load_img(filename_grayscale_32bit, color_mode='grayscale',
target_size=(100, 100))
loaded_im_array = utils.img_to_array(loaded_im, dtype='int32')
assert loaded_im_array.shape == original_grayscale_32bit_array.shape
assert np.all(loaded_im_array == original_grayscale_32bit_array)
# Test down-sampling with bilinear interpolation.
loaded_im = utils.load_img(filename_rgb, target_size=(25, 25))
loaded_im_array = utils.img_to_array(loaded_im)
assert loaded_im_array.shape == (25, 25, 3)
loaded_im = utils.load_img(filename_rgba, color_mode='rgba',
target_size=(25, 25))
loaded_im_array = utils.img_to_array(loaded_im)
assert loaded_im_array.shape == (25, 25, 4)
loaded_im = utils.load_img(filename_rgb, color_mode='grayscale',
target_size=(25, 25))
loaded_im_array = utils.img_to_array(loaded_im)
assert loaded_im_array.shape == (25, 25, 1)
loaded_im = utils.load_img(filename_grayscale_8bit, color_mode='grayscale',
target_size=(25, 25))
loaded_im_array = utils.img_to_array(loaded_im)
assert loaded_im_array.shape == (25, 25, 1)
loaded_im = utils.load_img(filename_grayscale_16bit, color_mode='grayscale',
target_size=(25, 25))
loaded_im_array = utils.img_to_array(loaded_im, dtype='int16')
assert loaded_im_array.shape == (25, 25, 1)
loaded_im = utils.load_img(filename_grayscale_32bit, color_mode='grayscale',
target_size=(25, 25))
loaded_im_array = utils.img_to_array(loaded_im, dtype='int32')
assert loaded_im_array.shape == (25, 25, 1)
# Test down-sampling with nearest neighbor interpolation.
loaded_im_nearest = utils.load_img(filename_rgb, target_size=(25, 25),
interpolation="nearest")
loaded_im_array_nearest = utils.img_to_array(loaded_im_nearest)
assert loaded_im_array_nearest.shape == (25, 25, 3)
assert np.any(loaded_im_array_nearest != loaded_im_array)
loaded_im_nearest = utils.load_img(filename_rgba, color_mode='rgba',
target_size=(25, 25),
interpolation="nearest")
loaded_im_array_nearest = utils.img_to_array(loaded_im_nearest)
assert loaded_im_array_nearest.shape == (25, 25, 4)
assert np.any(loaded_im_array_nearest != loaded_im_array)
loaded_im = utils.load_img(filename_grayscale_8bit, color_mode='grayscale',
target_size=(25, 25), interpolation="nearest")
loaded_im_array = utils.img_to_array(loaded_im)
assert loaded_im_array.shape == (25, 25, 1)
loaded_im = utils.load_img(filename_grayscale_16bit, color_mode='grayscale',
target_size=(25, 25), interpolation="nearest")
loaded_im_array = utils.img_to_array(loaded_im, dtype='int16')
assert loaded_im_array.shape == (25, 25, 1)
loaded_im = utils.load_img(filename_grayscale_32bit, color_mode='grayscale',
target_size=(25, 25), interpolation="nearest")
loaded_im_array = utils.img_to_array(loaded_im, dtype='int32')
assert loaded_im_array.shape == (25, 25, 1)
# Test different path type | assert np.all(loaded_im_array == original_grayscale_32bit_array)
_path = filename_grayscale_32bit # str
loaded_im = utils.load_img(_path, color_mode='grayscale')
loaded_im_array = utils.img_to_array(loaded_im, dtype=np.int32)
assert np.all(loaded_im_array == original_grayscale_32bit_array)
_path = filename_grayscale_32bit.encode() # bytes
loaded_im = utils.load_img(_path, color_mode='grayscale')
loaded_im_array = utils.img_to_array(loaded_im, dtype=np.int32)
assert np.all(loaded_im_array == original_grayscale_32bit_array)
_path = Path(tmpdir / 'grayscale_32bit_utils.tiff') # Path
loaded_im = utils.load_img(_path, color_mode='grayscale')
loaded_im_array = utils.img_to_array(loaded_im, dtype=np.int32)
assert np.all(loaded_im_array == original_grayscale_32bit_array)
# Check that exception is raised if interpolation not supported.
loaded_im = utils.load_img(filename_rgb, interpolation="unsupported")
with pytest.raises(ValueError):
loaded_im = utils.load_img(filename_rgb, target_size=(25, 25),
interpolation="unsupported")
# Check that the aspect ratio of a square is the same
filename_red_square = str(tmpdir / 'red_square_utils.png')
A = np.zeros((50, 100, 3), dtype=np.uint8) # rectangle image 100x50
A[20:30, 45:55, 0] = 255 # red square 10x10
red_square_array = np.array(A)
red_square = utils.array_to_img(red_square_array, scale=False)
red_square.save(filename_red_square)
loaded_im = utils.load_img(filename_red_square, target_size=(25, 25),
keep_aspect_ratio=True)
loaded_im_array = utils.img_to_array(loaded_im)
assert loaded_im_array.shape == (25, 25, 3)
red_channel_arr = loaded_im_array[:, :, 0].astype(np.bool)
square_width = np.sum(np.sum(red_channel_arr, axis=0))
square_height = np.sum(np.sum(red_channel_arr, axis=1))
aspect_ratio_result = square_width / square_height
# original square had 1:1 ratio
assert aspect_ratio_result == pytest.approx(1.0)
def test_list_pictures(tmpdir):
filenames = ['test.png', 'test0.jpg', 'test-1.jpeg', '2test.bmp',
'2-test.ppm', '3.png', '1.jpeg', 'test.bmp', 'test0.ppm',
'test4.tiff', '5-test.tif', 'test.txt', 'foo.csv',
'face.gif', 'bar.txt']
subdirs = ['', 'subdir1', 'subdir2']
filenames = [tmpdir.ensure(subdir, f) for subdir in subdirs
for f in filenames]
found_images = utils.list_pictures(str(tmpdir))
assert len(found_images) == 33
found_images = utils.list_pictures(str(tmpdir), ext='png')
assert len(found_images) == 6
def test_array_to_img_and_img_to_array():
height, width = 10, 8
# Test the data format
# Test RGB 3D
x = np.random.random((3, height, width))
img = utils.array_to_img(x, data_format='channels_first')
assert img.size == (width, height)
x = utils.img_to_array(img, data_format='channels_first')
assert x.shape == (3, height, width)
# Test RGBA 3D
x = np.random.random((4, height, width))
img = utils.array_to_img(x, data_format='channels_first')
assert img.size == (width, height)
x = utils.img_to_array(img, data_format='channels_first')
assert x.shape == (4, height, width)
# Test 2D
x = np.random.random((1, height, width))
img = utils.array_to_img(x, data_format='channels_first')
assert img.size == (width, height)
x = utils.img_to_array(img, data_format='channels_first')
assert x.shape == (1, height, width)
# grayscale 32-bit signed integer
x = np.array(
np.random.randint(-2147483648, 2147483647, (1, height, width)),
dtype=np.int32
)
img = utils.array_to_img(x, data_format='channels_first')
assert img.size == (width, height)
x = utils.img_to_array(img, data_format='channels_first')
assert x.shape == (1, height, width)
# Test tf data format
# Test RGB 3D
x = np.random.random((height, width, 3))
img = utils.array_to_img(x, data_format='channels_last')
assert img.size == (width, height)
x = utils.img_to_array(img, data_format='channels_last')
assert x.shape == (height, width, 3)
# Test RGBA 3D
x = np.random.random((height, width, 4))
img = utils.array_to_img(x, data_format='channels_last')
assert img.size == (width, height)
x = utils.img_to_array(img, data_format='channels_last')
assert x.shape == (height, width, 4)
# Test 2D
x = np.random.random((height, width, 1))
img = utils.array_to_img(x, data_format='channels_last')
assert img.size == (width, height)
x = utils.img_to_array(img, data_format='channels_last')
assert x.shape == (height, width, 1)
# grayscale 16-bit signed integer
x = np.array(
np.random.randint(-2147483648, 2147483647, (height, width, 1)),
dtype=np.int16
)
img = utils.array_to_img(x, data_format='channels_last')
assert img.size == (width, height)
x = utils.img_to_array(img, data_format='channels_last')
assert x.shape == (height, width, 1)
# grayscale 32-bit signed integer
x = np.array(
np.random.randint(-2147483648, 2147483647, (height, width, 1)),
dtype=np.int32
)
img = utils.array_to_img(x, data_format='channels_last')
assert img.size == (width, height)
x = utils.img_to_array(img, data_format='channels_last')
assert x.shape == (height, width, 1)
# Test invalid use case
with pytest.raises(ValueError):
x = np.random.random((height, width)) # not 3D
img = utils.array_to_img(x, data_format='channels_first')
with pytest.raises(ValueError):
x = np.random.random((height, width, 3))
# unknown data_format
img = utils.array_to_img(x, data_format='channels')
with pytest.raises(ValueError):
# neither RGB, RGBA, or gray-scale
x = np.random.random((height, width, 5))
img = utils.array_to_img(x, data_format='channels_last')
with pytest.raises(ValueError):
x = np.random.random((height, width, 3))
# unknown data_format
img = utils.img_to_array(x, data_format='channels')
with pytest.raises(ValueError):
# neither RGB, RGBA, or gray-scale
x = np.random.random((height, width, 5, 3))
img = utils.img_to_array(x, data_format='channels_last')
def write_sample_image(tmpdir):
im = utils.array_to_img(np.random.rand(1, 1, 3))
path = str(tmpdir / 'sample_image.png')
utils.save_img(path, im)
return path
def test_image_file_handlers_close(tmpdir):
path = write_sample_image(tmpdir)
max_open_files, _ = resource.getrlimit(resource.RLIMIT_NOFILE)
for i in range(max_open_files+1):
utils.load_img(path)
def test_load_img_returns_image(tmpdir):
path = write_sample_image(tmpdir)
im = utils.load_img(path)
assert isinstance(im, PIL.Image.Image)
if __name__ == '__main__':
pytest.main([__file__]) | with open(filename_grayscale_32bit, 'rb') as f:
_path = io.BytesIO(f.read()) # io.Bytesio
loaded_im = utils.load_img(_path, color_mode='grayscale')
loaded_im_array = utils.img_to_array(loaded_im, dtype=np.int32) |
cli.py | import click
from pathlib import Path
from . import PythonTouch
@click.command()
@click.argument("directory")
def | (directory):
x = PythonTouch()
x.touch(directory)
| main |
icon.go | // SPDX-License-Identifier: Unlicense OR MIT
package decredmaterial
import (
_ "image/png" //makes png images a decodable format
"gioui.org/widget"
)
type Icon struct {
*widget.Icon
}
// NewIcon returns a new Icon from IconVG data.
func | (data []byte) (*Icon, error) {
icon, err := widget.NewIcon(data)
if err != nil {
return nil, err
}
return &Icon{icon}, nil
}
| NewIcon |
operations.py | from django.contrib.gis.db.backends.base.adapter import WKTAdapter
from django.contrib.gis.db.backends.base.operations import (
BaseSpatialOperations,
)
from django.contrib.gis.db.backends.utils import SpatialOperator
from django.contrib.gis.db.models import GeometryField, aggregates
from django.db.backends.mysql.operations import DatabaseOperations
from django.utils.functional import cached_property
class MySQLOperations(BaseSpatialOperations, DatabaseOperations):
mysql = True
name = 'mysql'
Adapter = WKTAdapter
@cached_property
def geom_func_prefix(self):
return '' if self.is_mysql_5_5 else 'ST_'
@cached_property
def is_mysql_5_5(self):
return self.connection.mysql_version < (5, 6, 1)
@cached_property
def is_mysql_5_6(self):
return self.connection.mysql_version < (5, 7, 6)
@cached_property
def uses_invalid_empty_geometry_collection(self):
return self.connection.mysql_version >= (5, 7, 5)
@cached_property
def select(self):
return self.geom_func_prefix + 'AsText(%s)'
@cached_property
def from_wkb(self):
return self.geom_func_prefix + 'GeomFromWKB'
@cached_property
def from_text(self):
return self.geom_func_prefix + 'GeomFromText'
@cached_property
def gis_operators(self):
MBREquals = 'MBREqual' if self.is_mysql_5_6 else 'MBREquals'
return {
'bbcontains': SpatialOperator(func='MBRContains'), # For consistency w/PostGIS API
'bboverlaps': SpatialOperator(func='MBROverlaps'), # ...
'contained': SpatialOperator(func='MBRWithin'), # ...
'contains': SpatialOperator(func='MBRContains'),
'disjoint': SpatialOperator(func='MBRDisjoint'),
'equals': SpatialOperator(func=MBREquals),
'exact': SpatialOperator(func=MBREquals),
'intersects': SpatialOperator(func='MBRIntersects'),
'overlaps': SpatialOperator(func='MBROverlaps'),
'same_as': SpatialOperator(func=MBREquals),
'touches': SpatialOperator(func='MBRTouches'),
'within': SpatialOperator(func='MBRWithin'),
}
@cached_property
def function_names(self):
return {'Length': 'GLength'} if self.is_mysql_5_5 else {}
disallowed_aggregates = (
aggregates.Collect, aggregates.Extent, aggregates.Extent3D,
aggregates.MakeLine, aggregates.Union,
)
@cached_property
def unsupported_functions(self):
|
def geo_db_type(self, f):
return f.geom_type
def get_geom_placeholder(self, f, value, compiler):
"""
The placeholder here has to include MySQL's WKT constructor. Because
MySQL does not support spatial transformations, there is no need to
modify the placeholder based on the contents of the given value.
"""
if hasattr(value, 'as_sql'):
placeholder, _ = compiler.compile(value)
else:
placeholder = '%s(%%s)' % self.from_text
return placeholder
def get_db_converters(self, expression):
converters = super(MySQLOperations, self).get_db_converters(expression)
if isinstance(expression.output_field, GeometryField) and self.uses_invalid_empty_geometry_collection:
converters.append(self.convert_invalid_empty_geometry_collection)
return converters
# https://dev.mysql.com/doc/refman/en/spatial-function-argument-handling.html
# MySQL 5.7.5 adds support for the empty geometry collections, but they are represented with invalid WKT.
def convert_invalid_empty_geometry_collection(self, value, expression, connection, context):
if value == b'GEOMETRYCOLLECTION()':
return b'GEOMETRYCOLLECTION EMPTY'
return value
| unsupported = {
'AsGeoJSON', 'AsGML', 'AsKML', 'AsSVG', 'BoundingCircle',
'ForceRHR', 'GeoHash', 'IsValid', 'MakeValid', 'MemSize',
'Perimeter', 'PointOnSurface', 'Reverse', 'Scale', 'SnapToGrid',
'Transform', 'Translate',
}
if self.is_mysql_5_5:
unsupported.update({'Difference', 'Distance', 'Intersection', 'SymDifference', 'Union'})
return unsupported |
models.py | from django.utils.translation import ugettext_lazy as _
from django.db.models.fields import CharField
from .in_states import STATE_CHOICES
class INStateField(CharField):
| """
A model field that forms represent as a ``forms.INStateField`` field and
stores the two-letter Indian state abbreviation in the database.
"""
description = _("Indian state (two uppercase letters)")
def __init__(self, *args, **kwargs):
kwargs['choices'] = STATE_CHOICES
kwargs['max_length'] = 2
super(INStateField, self).__init__(*args, **kwargs) |
|
create_storage_types.go | /*
* Copyright (c) Microsoft Corporation.
* Licensed under the MIT license.
*/
package pipeline
import (
"context"
"github.com/pkg/errors"
"github.com/Azure/azure-service-operator/v2/tools/generator/internal/astmodel"
"github.com/Azure/azure-service-operator/v2/tools/generator/internal/codegen/storage"
)
const CreateStorageTypesStageID = "createStorageTypes"
// CreateStorageTypes returns a pipeline stage that creates dedicated storage types for each resource and nested object.
// Storage versions are created for *all* API versions to allow users of older versions of the operator to easily
// upgrade. This is of course a bit odd for the first release, but defining the approach from day one is useful.
func CreateStorageTypes() *Stage | {
stage := NewStage(
CreateStorageTypesStageID,
"Create storage versions of CRD types",
func(ctx context.Context, state *State) (*State, error) {
// Predicate to isolate both resources and complex objects
isResourceOrObject := func(def astmodel.TypeDefinition) bool {
_, isResource := astmodel.AsResourceType(def.Type())
_, isObject := astmodel.AsObjectType(def.Type())
return isResource || isObject
}
// Predicate to filter out ARM types
isNotARMType := func(def astmodel.TypeDefinition) bool {
return !astmodel.ARMFlag.IsOn(def.Type())
}
// Filter to the types we want to process
typesToConvert := state.Definitions().Where(isResourceOrObject).Where(isNotARMType)
storageDefs := make(astmodel.TypeDefinitionSet)
typeConverter := storage.NewTypeConverter(state.Definitions())
// Create storage variants
for name, def := range typesToConvert {
storageDef, err := typeConverter.ConvertDefinition(def)
if err != nil {
return nil, errors.Wrapf(err, "creating storage variant of %q", name)
}
storageDefs.Add(storageDef)
}
defs := state.Definitions().Copy()
defs.AddTypes(storageDefs)
return state.WithDefinitions(defs), nil
})
return stage
} |
|
test_create.py | from tests.system.action.base import BaseActionTestCase
class MotionSubmitterCreateActionTest(BaseActionTestCase):
def test_create(self) -> None:
self.create_model("meeting/111", {"name": "name_m123etrd"})
self.create_model("motion/357", {"title": "title_YIDYXmKj", "meeting_id": 111})
self.create_model(
"user/78", {"username": "username_loetzbfg", "meeting_id": 111}
)
response = self.client.post(
"/",
json=[
{
"action": "motion_submitter.create",
"data": [{"motion_id": 357, "user_id": 78}],
}
],
)
self.assert_status_code(response, 200)
model = self.get_model("motion_submitter/1")
assert model.get("motion_id") == 357
assert model.get("user_id") == 78
assert model.get("weight") == 10000
def test_create_not_unique(self) -> None:
self.create_model("meeting/111", {"name": "name_m123etrd"})
self.create_model("motion/357", {"title": "title_YIDYXmKj", "meeting_id": 111})
self.create_model(
"user/78", {"username": "username_loetzbfg", "meeting_id": 111}
)
self.create_model(
"motion_submitter/12", {"motion_id": 357, "user_id": 78, "meeting_id": 111}
)
response = self.client.post(
"/",
json=[
{
"action": "motion_submitter.create",
"data": [{"motion_id": 357, "user_id": 78}],
}
],
)
self.assert_status_code(response, 400)
assert "(user_id, motion_id) must be unique." in str(response.data)
def test_create_empty_data(self) -> None:
response = self.client.post(
"/",
json=[{"action": "motion_submitter.create", "data": [{}]}],
)
self.assert_status_code(response, 400)
self.assertIn(
"data must contain [\\'motion_id\\', \\'user_id\\'] properties",
str(response.data),
)
def test_create_wrong_field(self) -> None:
self.create_model("meeting/111", {"name": "name_m123etrd"})
self.create_model("motion/357", {"title": "title_YIDYXmKj", "meeting_id": 111})
self.create_model(
"user/78", {"username": "username_lskeuebe", "meeting_id": 111}
)
response = self.client.post(
"/",
json=[
{
"action": "motion_submitter.create",
"data": [
{
"motion_id": 357,
"user_id": 78,
"wrong_field": "text_AefohteiF8",
}
],
}
],
)
self.assert_status_code(response, 400)
self.assertIn(
"data must not contain {\\'wrong_field\\'} properties",
str(response.data),
)
def test_create_not_matching_meeting_ids(self) -> None:
self.create_model("meeting/111", {"name": "name_m123etrd"})
self.create_model("meeting/112", {"name": "name_ewadetrd"})
self.create_model("motion/357", {"title": "title_YIDYXmKj", "meeting_id": 111})
self.create_model(
"user/78", {"username": "username_loetzbfg", "meeting_id": 112}
)
response = self.client.post(
"/",
json=[
{
"action": "motion_submitter.create",
"data": [{"motion_id": 357, "user_id": 78}],
}
],
)
self.assert_status_code(response, 400) | ) | self.assertIn(
"Cannot create motion_submitter, meeting id of motion and (temporary) user don\\'t match.",
str(response.data), |
cluster_role.rs | // Generated from definition io.k8s.api.rbac.v1.ClusterRole
/// ClusterRole is a cluster level, logical grouping of PolicyRules that can be referenced as a unit by a RoleBinding or ClusterRoleBinding.
#[derive(Clone, Debug, Default, PartialEq)]
pub struct ClusterRole {
/// AggregationRule is an optional field that describes how to build the Rules for this ClusterRole. If AggregationRule is set, then the Rules are controller managed and direct changes to Rules will be stomped by the controller.
pub aggregation_rule: Option<crate::v1_16::api::rbac::v1::AggregationRule>,
/// Standard object's metadata.
pub metadata: Option<crate::v1_16::apimachinery::pkg::apis::meta::v1::ObjectMeta>,
/// Rules holds all the PolicyRules for this ClusterRole
pub rules: Option<Vec<crate::v1_16::api::rbac::v1::PolicyRule>>,
}
// Begin rbac.authorization.k8s.io/v1/ClusterRole
// Generated from operation createRbacAuthorizationV1ClusterRole
impl ClusterRole {
/// create a ClusterRole
///
/// Use the returned [`crate::ResponseBody`]`<`[`CreateClusterRoleResponse`]`>` constructor, or [`CreateClusterRoleResponse`] directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `body`
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn create_cluster_role(
body: &crate::v1_16::api::rbac::v1::ClusterRole,
optional: CreateClusterRoleOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<CreateClusterRoleResponse>), crate::RequestError> {
let CreateClusterRoleOptional {
dry_run,
field_manager,
pretty,
} = optional;
let __url = "/apis/rbac.authorization.k8s.io/v1/clusterroles?".to_owned();
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
if let Some(dry_run) = dry_run {
__query_pairs.append_pair("dryRun", dry_run);
}
if let Some(field_manager) = field_manager {
__query_pairs.append_pair("fieldManager", field_manager);
}
if let Some(pretty) = pretty {
__query_pairs.append_pair("pretty", pretty);
}
let __url = __query_pairs.finish();
let mut __request = http::Request::post(__url);
let __body = serde_json::to_vec(body).map_err(crate::RequestError::Json)?;
__request.header(http::header::CONTENT_TYPE, http::header::HeaderValue::from_static("application/json"));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
/// Optional parameters of [`ClusterRole::create_cluster_role`]
#[cfg(feature = "api")]
#[derive(Clone, Copy, Debug, Default)]
pub struct CreateClusterRoleOptional<'a> {
/// When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
pub dry_run: Option<&'a str>,
/// fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint.
pub field_manager: Option<&'a str>,
/// If 'true', then the output is pretty printed.
pub pretty: Option<&'a str>,
}
/// Use `<CreateClusterRoleResponse as Response>::try_from_parts` to parse the HTTP response body of [`ClusterRole::create_cluster_role`]
#[cfg(feature = "api")]
#[derive(Debug)]
pub enum CreateClusterRoleResponse {
Ok(crate::v1_16::api::rbac::v1::ClusterRole),
Created(crate::v1_16::api::rbac::v1::ClusterRole),
Accepted(crate::v1_16::api::rbac::v1::ClusterRole),
Other(Result<Option<serde_json::Value>, serde_json::Error>),
}
#[cfg(feature = "api")]
impl crate::Response for CreateClusterRoleResponse {
fn try_from_parts(status_code: http::StatusCode, buf: &[u8]) -> Result<(Self, usize), crate::ResponseError> {
match status_code {
http::StatusCode::OK => {
let result = match serde_json::from_slice(buf) {
Ok(value) => value,
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => return Err(crate::ResponseError::Json(err)),
};
Ok((CreateClusterRoleResponse::Ok(result), buf.len()))
},
http::StatusCode::CREATED => {
let result = match serde_json::from_slice(buf) {
Ok(value) => value,
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => return Err(crate::ResponseError::Json(err)),
};
Ok((CreateClusterRoleResponse::Created(result), buf.len()))
},
http::StatusCode::ACCEPTED => {
let result = match serde_json::from_slice(buf) {
Ok(value) => value,
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => return Err(crate::ResponseError::Json(err)),
};
Ok((CreateClusterRoleResponse::Accepted(result), buf.len()))
},
_ => {
let (result, read) =
if buf.is_empty() {
(Ok(None), 0)
}
else {
match serde_json::from_slice(buf) {
Ok(value) => (Ok(Some(value)), buf.len()),
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => (Err(err), 0),
}
};
Ok((CreateClusterRoleResponse::Other(result), read))
},
}
}
}
// Generated from operation deleteRbacAuthorizationV1ClusterRole
impl ClusterRole {
/// delete a ClusterRole
///
/// Use the returned [`crate::ResponseBody`]`<`[`DeleteClusterRoleResponse`]`>` constructor, or [`DeleteClusterRoleResponse`] directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the ClusterRole
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn delete_cluster_role(
name: &str,
optional: crate::v1_16::DeleteOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<DeleteClusterRoleResponse>), crate::RequestError> {
let __url = format!("/apis/rbac.authorization.k8s.io/v1/clusterroles/{name}",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __request = http::Request::delete(__url);
let __body = serde_json::to_vec(&optional).map_err(crate::RequestError::Json)?;
__request.header(http::header::CONTENT_TYPE, http::header::HeaderValue::from_static("application/json"));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
/// Use `<DeleteClusterRoleResponse as Response>::try_from_parts` to parse the HTTP response body of [`ClusterRole::delete_cluster_role`]
#[cfg(feature = "api")]
#[derive(Debug)]
pub enum DeleteClusterRoleResponse {
OkStatus(crate::v1_16::apimachinery::pkg::apis::meta::v1::Status),
OkValue(crate::v1_16::api::rbac::v1::ClusterRole),
Accepted(crate::v1_16::apimachinery::pkg::apis::meta::v1::Status),
Other(Result<Option<serde_json::Value>, serde_json::Error>),
}
#[cfg(feature = "api")]
impl crate::Response for DeleteClusterRoleResponse {
fn try_from_parts(status_code: http::StatusCode, buf: &[u8]) -> Result<(Self, usize), crate::ResponseError> {
match status_code {
http::StatusCode::OK => {
let result: serde_json::Map<String, serde_json::Value> = match serde_json::from_slice(buf) {
Ok(value) => value,
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => return Err(crate::ResponseError::Json(err)),
};
let is_status = match result.get("kind") {
Some(serde_json::Value::String(s)) if s == "Status" => true,
_ => false,
};
if is_status {
let result = serde::Deserialize::deserialize(serde_json::Value::Object(result));
let result = result.map_err(crate::ResponseError::Json)?;
Ok((DeleteClusterRoleResponse::OkStatus(result), buf.len()))
}
else {
let result = serde::Deserialize::deserialize(serde_json::Value::Object(result));
let result = result.map_err(crate::ResponseError::Json)?;
Ok((DeleteClusterRoleResponse::OkValue(result), buf.len()))
}
},
http::StatusCode::ACCEPTED => {
let result = match serde_json::from_slice(buf) {
Ok(value) => value,
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => return Err(crate::ResponseError::Json(err)),
};
Ok((DeleteClusterRoleResponse::Accepted(result), buf.len()))
},
_ => {
let (result, read) =
if buf.is_empty() {
(Ok(None), 0)
}
else {
match serde_json::from_slice(buf) {
Ok(value) => (Ok(Some(value)), buf.len()),
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => (Err(err), 0),
}
};
Ok((DeleteClusterRoleResponse::Other(result), read))
},
}
}
}
// Generated from operation deleteRbacAuthorizationV1CollectionClusterRole
impl ClusterRole {
/// delete collection of ClusterRole
///
/// Use the returned [`crate::ResponseBody`]`<`[`DeleteCollectionClusterRoleResponse`]`>` constructor, or [`DeleteCollectionClusterRoleResponse`] directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `delete_optional`
///
/// Delete options. Use `Default::default()` to not pass any.
///
/// * `list_optional`
///
/// List options. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn delete_collection_cluster_role(
delete_optional: crate::v1_16::DeleteOptional<'_>,
list_optional: crate::v1_16::ListOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<DeleteCollectionClusterRoleResponse>), crate::RequestError> {
let __url = "/apis/rbac.authorization.k8s.io/v1/clusterroles?".to_owned();
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
list_optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let mut __request = http::Request::delete(__url);
let __body = serde_json::to_vec(&delete_optional).map_err(crate::RequestError::Json)?;
__request.header(http::header::CONTENT_TYPE, http::header::HeaderValue::from_static("application/json"));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
/// Use `<DeleteCollectionClusterRoleResponse as Response>::try_from_parts` to parse the HTTP response body of [`ClusterRole::delete_collection_cluster_role`]
#[cfg(feature = "api")]
#[derive(Debug)]
pub enum DeleteCollectionClusterRoleResponse {
OkStatus(crate::v1_16::apimachinery::pkg::apis::meta::v1::Status),
OkValue(crate::v1_16::api::rbac::v1::ClusterRoleList),
Other(Result<Option<serde_json::Value>, serde_json::Error>),
}
#[cfg(feature = "api")]
impl crate::Response for DeleteCollectionClusterRoleResponse {
fn try_from_parts(status_code: http::StatusCode, buf: &[u8]) -> Result<(Self, usize), crate::ResponseError> {
match status_code {
http::StatusCode::OK => {
let result: serde_json::Map<String, serde_json::Value> = match serde_json::from_slice(buf) {
Ok(value) => value,
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => return Err(crate::ResponseError::Json(err)),
};
let is_status = match result.get("kind") {
Some(serde_json::Value::String(s)) if s == "Status" => true,
_ => false,
};
if is_status {
let result = serde::Deserialize::deserialize(serde_json::Value::Object(result));
let result = result.map_err(crate::ResponseError::Json)?;
Ok((DeleteCollectionClusterRoleResponse::OkStatus(result), buf.len()))
}
else {
let result = serde::Deserialize::deserialize(serde_json::Value::Object(result));
let result = result.map_err(crate::ResponseError::Json)?;
Ok((DeleteCollectionClusterRoleResponse::OkValue(result), buf.len()))
}
},
_ => {
let (result, read) =
if buf.is_empty() {
(Ok(None), 0)
}
else {
match serde_json::from_slice(buf) {
Ok(value) => (Ok(Some(value)), buf.len()),
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => (Err(err), 0),
}
};
Ok((DeleteCollectionClusterRoleResponse::Other(result), read))
},
}
}
}
// Generated from operation listRbacAuthorizationV1ClusterRole
impl ClusterRole {
/// list or watch objects of kind ClusterRole
///
/// This operation only supports listing all items of this type.
///
/// Use the returned [`crate::ResponseBody`]`<`[`ListClusterRoleResponse`]`>` constructor, or [`ListClusterRoleResponse`] directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn list_cluster_role(
optional: crate::v1_16::ListOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<ListClusterRoleResponse>), crate::RequestError> {
let __url = "/apis/rbac.authorization.k8s.io/v1/clusterroles?".to_owned();
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let mut __request = http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
/// Use `<ListClusterRoleResponse as Response>::try_from_parts` to parse the HTTP response body of [`ClusterRole::list_cluster_role`]
#[cfg(feature = "api")]
#[derive(Debug)]
pub enum ListClusterRoleResponse {
Ok(crate::v1_16::api::rbac::v1::ClusterRoleList),
Other(Result<Option<serde_json::Value>, serde_json::Error>),
}
#[cfg(feature = "api")]
impl crate::Response for ListClusterRoleResponse {
fn try_from_parts(status_code: http::StatusCode, buf: &[u8]) -> Result<(Self, usize), crate::ResponseError> {
match status_code {
http::StatusCode::OK => {
let result = match serde_json::from_slice(buf) {
Ok(value) => value,
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => return Err(crate::ResponseError::Json(err)),
};
Ok((ListClusterRoleResponse::Ok(result), buf.len()))
},
_ => {
let (result, read) =
if buf.is_empty() {
(Ok(None), 0)
}
else {
match serde_json::from_slice(buf) {
Ok(value) => (Ok(Some(value)), buf.len()),
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => (Err(err), 0),
}
};
Ok((ListClusterRoleResponse::Other(result), read))
},
}
}
}
// Generated from operation patchRbacAuthorizationV1ClusterRole
impl ClusterRole {
/// partially update the specified ClusterRole
///
/// Use the returned [`crate::ResponseBody`]`<`[`PatchClusterRoleResponse`]`>` constructor, or [`PatchClusterRoleResponse`] directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the ClusterRole
///
/// * `body`
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn patch_cluster_role(
name: &str,
body: &crate::v1_16::apimachinery::pkg::apis::meta::v1::Patch,
optional: crate::v1_16::PatchOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<PatchClusterRoleResponse>), crate::RequestError> {
let __url = format!("/apis/rbac.authorization.k8s.io/v1/clusterroles/{name}?",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let mut __request = http::Request::patch(__url);
let __body = serde_json::to_vec(body).map_err(crate::RequestError::Json)?;
__request.header(http::header::CONTENT_TYPE, http::header::HeaderValue::from_static(match body {
crate::v1_16::apimachinery::pkg::apis::meta::v1::Patch::Json(_) => "application/json-patch+json",
crate::v1_16::apimachinery::pkg::apis::meta::v1::Patch::Merge(_) => "application/merge-patch+json",
crate::v1_16::apimachinery::pkg::apis::meta::v1::Patch::StrategicMerge(_) => "application/strategic-merge-patch+json",
}));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
/// Use `<PatchClusterRoleResponse as Response>::try_from_parts` to parse the HTTP response body of [`ClusterRole::patch_cluster_role`]
#[cfg(feature = "api")]
#[derive(Debug)]
pub enum PatchClusterRoleResponse {
Ok(crate::v1_16::api::rbac::v1::ClusterRole),
Other(Result<Option<serde_json::Value>, serde_json::Error>),
}
#[cfg(feature = "api")]
impl crate::Response for PatchClusterRoleResponse {
fn try_from_parts(status_code: http::StatusCode, buf: &[u8]) -> Result<(Self, usize), crate::ResponseError> {
match status_code {
http::StatusCode::OK => {
let result = match serde_json::from_slice(buf) {
Ok(value) => value,
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => return Err(crate::ResponseError::Json(err)),
};
Ok((PatchClusterRoleResponse::Ok(result), buf.len()))
},
_ => {
let (result, read) =
if buf.is_empty() {
(Ok(None), 0)
}
else {
match serde_json::from_slice(buf) {
Ok(value) => (Ok(Some(value)), buf.len()),
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => (Err(err), 0),
}
};
Ok((PatchClusterRoleResponse::Other(result), read))
},
}
}
}
// Generated from operation readRbacAuthorizationV1ClusterRole
impl ClusterRole {
/// read the specified ClusterRole
///
/// Use the returned [`crate::ResponseBody`]`<`[`ReadClusterRoleResponse`]`>` constructor, or [`ReadClusterRoleResponse`] directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the ClusterRole
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn read_cluster_role(
name: &str,
optional: ReadClusterRoleOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<ReadClusterRoleResponse>), crate::RequestError> {
let ReadClusterRoleOptional {
pretty,
} = optional;
let __url = format!("/apis/rbac.authorization.k8s.io/v1/clusterroles/{name}?",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
if let Some(pretty) = pretty {
__query_pairs.append_pair("pretty", pretty);
}
let __url = __query_pairs.finish();
let mut __request = http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
/// Optional parameters of [`ClusterRole::read_cluster_role`]
#[cfg(feature = "api")]
#[derive(Clone, Copy, Debug, Default)]
pub struct ReadClusterRoleOptional<'a> {
/// If 'true', then the output is pretty printed.
pub pretty: Option<&'a str>,
}
/// Use `<ReadClusterRoleResponse as Response>::try_from_parts` to parse the HTTP response body of [`ClusterRole::read_cluster_role`]
#[cfg(feature = "api")]
#[derive(Debug)]
pub enum ReadClusterRoleResponse {
Ok(crate::v1_16::api::rbac::v1::ClusterRole),
Other(Result<Option<serde_json::Value>, serde_json::Error>),
}
#[cfg(feature = "api")]
impl crate::Response for ReadClusterRoleResponse {
fn try_from_parts(status_code: http::StatusCode, buf: &[u8]) -> Result<(Self, usize), crate::ResponseError> {
match status_code {
http::StatusCode::OK => {
let result = match serde_json::from_slice(buf) {
Ok(value) => value,
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => return Err(crate::ResponseError::Json(err)),
};
Ok((ReadClusterRoleResponse::Ok(result), buf.len()))
},
_ => {
let (result, read) =
if buf.is_empty() {
(Ok(None), 0)
}
else {
match serde_json::from_slice(buf) {
Ok(value) => (Ok(Some(value)), buf.len()),
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => (Err(err), 0),
}
};
Ok((ReadClusterRoleResponse::Other(result), read))
},
}
}
}
// Generated from operation replaceRbacAuthorizationV1ClusterRole
impl ClusterRole {
/// replace the specified ClusterRole
///
/// Use the returned [`crate::ResponseBody`]`<`[`ReplaceClusterRoleResponse`]`>` constructor, or [`ReplaceClusterRoleResponse`] directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the ClusterRole
///
/// * `body`
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn replace_cluster_role(
name: &str,
body: &crate::v1_16::api::rbac::v1::ClusterRole,
optional: ReplaceClusterRoleOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<ReplaceClusterRoleResponse>), crate::RequestError> {
let ReplaceClusterRoleOptional {
dry_run,
field_manager,
pretty,
} = optional;
let __url = format!("/apis/rbac.authorization.k8s.io/v1/clusterroles/{name}?",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
if let Some(dry_run) = dry_run {
__query_pairs.append_pair("dryRun", dry_run);
}
if let Some(field_manager) = field_manager {
__query_pairs.append_pair("fieldManager", field_manager);
}
if let Some(pretty) = pretty {
__query_pairs.append_pair("pretty", pretty);
}
let __url = __query_pairs.finish();
let mut __request = http::Request::put(__url);
let __body = serde_json::to_vec(body).map_err(crate::RequestError::Json)?;
__request.header(http::header::CONTENT_TYPE, http::header::HeaderValue::from_static("application/json"));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
/// Optional parameters of [`ClusterRole::replace_cluster_role`]
#[cfg(feature = "api")]
#[derive(Clone, Copy, Debug, Default)]
pub struct ReplaceClusterRoleOptional<'a> {
/// When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
pub dry_run: Option<&'a str>,
/// fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint.
pub field_manager: Option<&'a str>,
/// If 'true', then the output is pretty printed.
pub pretty: Option<&'a str>,
}
/// Use `<ReplaceClusterRoleResponse as Response>::try_from_parts` to parse the HTTP response body of [`ClusterRole::replace_cluster_role`]
#[cfg(feature = "api")]
#[derive(Debug)]
pub enum ReplaceClusterRoleResponse {
Ok(crate::v1_16::api::rbac::v1::ClusterRole),
Created(crate::v1_16::api::rbac::v1::ClusterRole),
Other(Result<Option<serde_json::Value>, serde_json::Error>),
}
#[cfg(feature = "api")]
impl crate::Response for ReplaceClusterRoleResponse {
fn try_from_parts(status_code: http::StatusCode, buf: &[u8]) -> Result<(Self, usize), crate::ResponseError> {
match status_code {
http::StatusCode::OK => {
let result = match serde_json::from_slice(buf) {
Ok(value) => value,
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => return Err(crate::ResponseError::Json(err)),
};
Ok((ReplaceClusterRoleResponse::Ok(result), buf.len()))
},
http::StatusCode::CREATED => {
let result = match serde_json::from_slice(buf) {
Ok(value) => value,
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => return Err(crate::ResponseError::Json(err)),
};
Ok((ReplaceClusterRoleResponse::Created(result), buf.len()))
},
_ => {
let (result, read) =
if buf.is_empty() {
(Ok(None), 0)
}
else {
match serde_json::from_slice(buf) {
Ok(value) => (Ok(Some(value)), buf.len()),
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => (Err(err), 0),
}
};
Ok((ReplaceClusterRoleResponse::Other(result), read))
},
}
}
}
// Generated from operation watchRbacAuthorizationV1ClusterRole
impl ClusterRole {
/// list or watch objects of kind ClusterRole
///
/// This operation only supports watching one item, or a list of items, of this type for changes.
///
/// Use the returned [`crate::ResponseBody`]`<`[`WatchClusterRoleResponse`]`>` constructor, or [`WatchClusterRoleResponse`] directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn watch_cluster_role(
optional: crate::v1_16::WatchOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<WatchClusterRoleResponse>), crate::RequestError> {
let __url = "/apis/rbac.authorization.k8s.io/v1/clusterroles?".to_owned();
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let mut __request = http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
/// Use `<WatchClusterRoleResponse as Response>::try_from_parts` to parse the HTTP response body of [`ClusterRole::watch_cluster_role`]
#[cfg(feature = "api")]
#[derive(Debug)]
pub enum WatchClusterRoleResponse {
Ok(crate::v1_16::apimachinery::pkg::apis::meta::v1::WatchEvent<ClusterRole>),
Other(Result<Option<serde_json::Value>, serde_json::Error>),
}
#[cfg(feature = "api")]
impl crate::Response for WatchClusterRoleResponse {
fn try_from_parts(status_code: http::StatusCode, buf: &[u8]) -> Result<(Self, usize), crate::ResponseError> {
match status_code {
http::StatusCode::OK => {
let mut deserializer = serde_json::Deserializer::from_slice(buf).into_iter();
let (result, byte_offset) = match deserializer.next() {
Some(Ok(value)) => (value, deserializer.byte_offset()),
Some(Err(ref err)) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Some(Err(err)) => return Err(crate::ResponseError::Json(err)),
None => return Err(crate::ResponseError::NeedMoreData),
};
Ok((WatchClusterRoleResponse::Ok(result), byte_offset))
},
_ => {
let (result, read) =
if buf.is_empty() {
(Ok(None), 0)
}
else {
match serde_json::from_slice(buf) {
Ok(value) => (Ok(Some(value)), buf.len()),
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => (Err(err), 0),
}
};
Ok((WatchClusterRoleResponse::Other(result), read))
},
}
}
}
// End rbac.authorization.k8s.io/v1/ClusterRole
impl crate::Resource for ClusterRole {
fn api_version() -> &'static str {
"rbac.authorization.k8s.io/v1"
}
fn group() -> &'static str {
"rbac.authorization.k8s.io"
}
fn kind() -> &'static str {
"ClusterRole"
}
fn version() -> &'static str {
"v1"
}
}
impl crate::Metadata for ClusterRole {
type Ty = crate::v1_16::apimachinery::pkg::apis::meta::v1::ObjectMeta;
fn metadata(&self) -> Option<&<Self as crate::Metadata>::Ty> {
self.metadata.as_ref()
}
}
impl<'de> serde::Deserialize<'de> for ClusterRole { | Key_api_version,
Key_kind,
Key_aggregation_rule,
Key_metadata,
Key_rules,
Other,
}
impl<'de> serde::Deserialize<'de> for Field {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> {
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = Field;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "field identifier")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: serde::de::Error {
Ok(match v {
"apiVersion" => Field::Key_api_version,
"kind" => Field::Key_kind,
"aggregationRule" => Field::Key_aggregation_rule,
"metadata" => Field::Key_metadata,
"rules" => Field::Key_rules,
_ => Field::Other,
})
}
}
deserializer.deserialize_identifier(Visitor)
}
}
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = ClusterRole;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "struct ClusterRole")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: serde::de::MapAccess<'de> {
let mut value_aggregation_rule: Option<crate::v1_16::api::rbac::v1::AggregationRule> = None;
let mut value_metadata: Option<crate::v1_16::apimachinery::pkg::apis::meta::v1::ObjectMeta> = None;
let mut value_rules: Option<Vec<crate::v1_16::api::rbac::v1::PolicyRule>> = None;
while let Some(key) = serde::de::MapAccess::next_key::<Field>(&mut map)? {
match key {
Field::Key_api_version => {
let value_api_version: String = serde::de::MapAccess::next_value(&mut map)?;
if value_api_version != <Self::Value as crate::Resource>::api_version() {
return Err(serde::de::Error::invalid_value(serde::de::Unexpected::Str(&value_api_version), &<Self::Value as crate::Resource>::api_version()));
}
},
Field::Key_kind => {
let value_kind: String = serde::de::MapAccess::next_value(&mut map)?;
if value_kind != <Self::Value as crate::Resource>::kind() {
return Err(serde::de::Error::invalid_value(serde::de::Unexpected::Str(&value_kind), &<Self::Value as crate::Resource>::kind()));
}
},
Field::Key_aggregation_rule => value_aggregation_rule = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_metadata => value_metadata = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_rules => value_rules = serde::de::MapAccess::next_value(&mut map)?,
Field::Other => { let _: serde::de::IgnoredAny = serde::de::MapAccess::next_value(&mut map)?; },
}
}
Ok(ClusterRole {
aggregation_rule: value_aggregation_rule,
metadata: value_metadata,
rules: value_rules,
})
}
}
deserializer.deserialize_struct(
"ClusterRole",
&[
"apiVersion",
"kind",
"aggregationRule",
"metadata",
"rules",
],
Visitor,
)
}
}
impl serde::Serialize for ClusterRole {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer {
let mut state = serializer.serialize_struct(
"ClusterRole",
2 +
self.aggregation_rule.as_ref().map_or(0, |_| 1) +
self.metadata.as_ref().map_or(0, |_| 1) +
self.rules.as_ref().map_or(0, |_| 1),
)?;
serde::ser::SerializeStruct::serialize_field(&mut state, "apiVersion", <Self as crate::Resource>::api_version())?;
serde::ser::SerializeStruct::serialize_field(&mut state, "kind", <Self as crate::Resource>::kind())?;
if let Some(value) = &self.aggregation_rule {
serde::ser::SerializeStruct::serialize_field(&mut state, "aggregationRule", value)?;
}
if let Some(value) = &self.metadata {
serde::ser::SerializeStruct::serialize_field(&mut state, "metadata", value)?;
}
if let Some(value) = &self.rules {
serde::ser::SerializeStruct::serialize_field(&mut state, "rules", value)?;
}
serde::ser::SerializeStruct::end(state)
}
} | fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> {
#[allow(non_camel_case_types)]
enum Field { |
client.rs | #[cfg(test)]
mod tests {
use ursa_key_utils::*;
#[test]
fn test_default_keys_generation() {
let (public_key, private_key) = generate_keypair().expect("Failed to generate key pair.");
dbg!(&Vec::from(&public_key));
dbg!(&Vec::from(&private_key));
assert!(!&Vec::from(&public_key).is_empty());
assert!(!&Vec::from(&private_key).is_empty());
}
#[test]
fn test_default_keys_generation_with_seed() {
let (public_key, private_key) =
generate_keypair_with_seed(vec![1, 2, 3]).expect("Failed to generate key pair.");
dbg!(&Vec::from(&public_key));
dbg!(&Vec::from(&private_key));
assert!(!&Vec::from(&public_key).is_empty());
assert!(!&Vec::from(&private_key).is_empty());
}
#[test]
fn | () {
let (public_key, private_key) =
generate_keypair_with_secret_key(vec![128; 64]).expect("Failed to generate key pair.");
dbg!(&Vec::from(&public_key));
dbg!(&Vec::from(&private_key));
assert!(!&Vec::from(&public_key).is_empty());
assert!(!&Vec::from(&private_key).is_empty());
}
}
| test_default_keys_generation_with_secret_key |
yaml_test.go | package yaml_test
import (
"io/ioutil"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/aquasecurity/fanal/config/parser/yaml"
)
func | (t *testing.T) {
tests := []struct {
name string
inputFile string
want interface{}
wantErr string
}{
{
name: "happy path",
inputFile: "testdata/deployment.yaml",
want: map[string]interface{}{
"apiVersion": "apps/v1",
"kind": "Deployment",
"metadata": map[string]interface{}{
"name": "hello-kubernetes",
},
"spec": map[string]interface{}{
"replicas": float64(4),
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
b, err := ioutil.ReadFile(tt.inputFile)
require.NoError(t, err)
p := yaml.Parser{}
got, err := p.Parse(b)
if tt.wantErr != "" {
require.NotNil(t, err)
assert.Contains(t, err.Error(), tt.wantErr)
return
}
assert.NoError(t, err)
assert.Equal(t, tt.want, got)
})
}
}
func TestParser_SeparateSubDocuments(t *testing.T) {
tests := []struct {
name string
data []byte
want [][]byte
}{
{
name: "happy path",
data: []byte(`kind: Pod
---
kind: Service`),
want: [][]byte{
[]byte(`kind: Pod`),
[]byte(`kind: Service`),
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
p := &yaml.Parser{}
got := p.SeparateSubDocuments(tt.data)
assert.Equal(t, tt.want, got)
})
}
}
| TestParser_Parse |
phillips_ji.js | {"frequencies":[261.6255653006,275.93321340298,286.15296204753,294.32876096318,305.5744765615,306.59245933664,327.03195662575,331.11985608357,349.22797321314,367.91095120397,386.30649876417,392.4383479509,407.76797091773,408.78994578219,429.2294430713,441.49314144476,457.84473927605,459.88868900496,490.54793493862,496.67978412536,515.07533168556,523.2511306012],"description":"Pauline Phillips, JI 0 #/b \"C\" scale (2002), TL 8-10-2002"} |
||
agent_maniplate.go | // Copyright 2018 MOBIKE, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
package checker
import (
"bytes"
"database/sql"
"fmt"
"io"
"net/http"
"net/url"
"os/exec"
"path"
"strings"
"time"
"github.com/juju/errors"
"github.com/moiot/moha/pkg/log"
)
func (s *Server) runChangeMaster() error {
// get leader
leaderID, _, err := s.etcdClient.Get(s.ctx, leaderPath)
if err != nil {
return nil
}
parsedURL, err := url.Parse("http://" + string(leaderID))
// hack logic
u := parsedURL.Hostname()
port := "1" + parsedURL.Port()
_, err = http.Get(fmt.Sprintf("http://%s:%s/setReadOnly", u, port))
time.Sleep(3 * time.Second)
_, err = http.Get(fmt.Sprintf("http://%s:%s/changeMaster", u, port))
return err
}
func (s *Server) deleteTerm(agentID string) error {
return s.etcdClient.Delete(s.ctx, path.Join("election", "terms", agentID), false)
}
func (s *Server) getASlave() string {
masterID, _ := s.getDBConnection()
// hack ways
var slaveID string
for id := range s.cfg.IDContainerMapping {
if id == masterID {
continue
}
slaveID = id
break
}
return slaveID
}
// RunStopSlave runs `stop slave` on the given DB
func RunStopSlave(rootConn *sql.DB) error {
_, err := rootConn.Exec("STOP SLAVE;")
return errors.Trace(err)
}
// RunStopAgent runs `docker stop <containerName>`
func RunStopAgent(containerName string) error {
log.Info("run docker stop ", containerName)
cmd := exec.Command("docker", "stop", containerName)
return cmd.Run()
}
// RunStartAgent runs `docker start <containerName>`
func RunStartAgent(containerName string) error {
log.Info("run docker start ", containerName)
cmd := exec.Command("docker", "start", containerName)
return cmd.Run()
}
// RunKill9Agent runs `docker exec <containerName> kill -9 <agentPID>`
func RunKill9Agent(containerName string) error {
cmd := exec.Command("docker", "exec", containerName,
"pgrep", "-f", "mysql-agent")
var out bytes.Buffer
var stderr bytes.Buffer
cmd.Stdout = &out
cmd.Stderr = &stderr
err := cmd.Run()
if err != nil {
return errors.Errorf(fmt.Sprintf("err: %v ,stdout: %s, stderr: %s",
err, out.String(), stderr.String()))
}
length := 0
var pid string
for length == 0 {
bs, e := out.ReadBytes('\n')
if length = len(bs); length == 0 {
continue
}
pid = string(bs)
if e != nil {
if e == io.EOF {
break
}
return errors.Trace(e)
}
}
pid = strings.Trim(pid, "\n")
log.Info("mysql agent pid in ", containerName, " is ", pid)
cmd = exec.Command("docker", "exec", containerName,
"kill", "-9", pid)
var stderrBuf bytes.Buffer
var stdoutBuf bytes.Buffer
cmd.Stderr = &stderrBuf
cmd.Stdout = &stdoutBuf
err = cmd.Run()
if err != nil {
return errors.Errorf(fmt.Sprintf("err: %v ,stdout: %s, stderr: %s",
err, stdoutBuf.String(), stderrBuf.String()))
}
return nil
}
// IPOf returns the ip of the container, given the container name/ID
func IPOf(node string) (string, error) {
cmd := exec.Command("docker", "inspect", "-f",
"'{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}'",
node)
var stderrBuf bytes.Buffer
var stdoutBuf bytes.Buffer
cmd.Stderr = &stderrBuf
cmd.Stdout = &stdoutBuf
err := cmd.Run()
if err != nil {
return "", errors.Errorf(fmt.Sprintf("err: %v ,stdout: %s, stderr: %s",
err, stdoutBuf.String(), stderrBuf.String()))
}
r := stdoutBuf.String()
r = strings.Replace(r, "\n", "", -1)
r = strings.Replace(r, "'", "", -1)
return r, nil
}
// CheckCall runs the command, returned with the result and error
func CheckCall(command string) (result string, err error) {
if command == "" {
log.Error("command is empty")
return "", errors.NotValidf("command is empty")
}
log.Info("going to run command: ", command)
split := strings.Split(command, " ")
splitCommand := make([]string, 0)
for _, c := range split {
if c == "" {
continue
}
splitCommand = append(splitCommand, c)
}
log.Info("going to run split command: ", splitCommand)
cmd := exec.Command(splitCommand[0], splitCommand[1:]...)
var stderrBuf bytes.Buffer
var stdoutBuf bytes.Buffer
cmd.Stderr = &stderrBuf
cmd.Stdout = &stdoutBuf
err = cmd.Run()
if err != nil {
return "", errors.Errorf(fmt.Sprintf("err: %v ,stdout: %s, stderr: %s",
err, stdoutBuf.String(), stderrBuf.String()))
}
result = stdoutBuf.String()
return result, err
}
// PartitionOutgoing partitions the outgoing network for the partitionedAZ,
// with the partitionType
func PartitionOutgoing(partitionTemplate, partitionedAZ, partitionType string) error {
if _, ok := azSet[partitionedAZ]; !ok {
log.Errorf("the az %s is not in azSet %+v", partitionedAZ, azSet)
return errors.Errorf("the az %s is not in azSet %+v", partitionedAZ, azSet)
}
query := partitionTemplate
for ip, az := range ipAZMapping {
if ip == "" || az == partitionedAZ {
continue
}
query += fmt.Sprintf("--target %s ", ip)
}
partitionedNodes := azNodeMapping[partitionedAZ]
for node := range partitionedNodes {
nodeQuery := fmt.Sprintf("%s %s %s",
query, partitionType, node)
_, err := CheckCall(nodeQuery)
if err != nil {
log.Errorf("has error in executing %s, %+v",
nodeQuery, err)
return err
}
}
return nil
}
// PartitionIncoming partitions the incoming network for the partitionedAZ,
// with the partitionType
func PartitionIncoming(partitionTemplate, partitionedAZ, partitionType string) error {
if _, ok := azSet[partitionedAZ]; !ok |
query := partitionTemplate
for ip := range azIPMapping[partitionedAZ] {
query += fmt.Sprintf("--target %s ", ip)
}
for az, nodes := range azNodeMapping {
if az == partitionedAZ {
continue
}
for node := range nodes {
nodeQuery := fmt.Sprintf("%s %s %s",
query, partitionType, node)
_, err := CheckCall(nodeQuery)
if err != nil {
log.Errorf("has error in executing %s, %+v",
nodeQuery, err)
return err
}
}
}
return nil
}
| {
log.Errorf("the az %s is not in azSet %+v", partitionedAZ, azSet)
return errors.Errorf("the az %s is not in azSet %+v", partitionedAZ, azSet)
} |
run.go | package main
import (
"context"
"io/ioutil"
"os"
"path/filepath"
"strings"
"github.com/containerd/containerd"
"github.com/containerd/containerd/errdefs"
"github.com/containerd/containerd/log"
"github.com/containerd/containerd/mount"
"github.com/opencontainers/image-spec/identity"
"github.com/pkg/errors"
)
func | (ctx context.Context, client *containerd.Client, ref, snapshotter string, opts ...containerd.RemoteOpt) ([]string, error) {
ctx = log.WithLogger(ctx, log.G(ctx).WithField("ref", ref))
log.G(ctx).Debug("Pulling image")
opts = append(opts, containerd.WithPullUnpack)
img, err := client.Pull(ctx, ref, opts...)
if err != nil {
return nil, errors.Wrap(err, "error pulling image")
}
diffIDs, err := img.RootFS(ctx)
if err != nil {
return nil, errors.Wrap(err, "error getting rootfs layer entries for image")
}
chainID := identity.ChainID(diffIDs).String()
ctx = log.WithLogger(ctx, log.G(ctx).WithField("image.ChainID", chainID))
target, err := ioutil.TempDir("", chainID)
if err != nil {
return nil, errors.Wrap(err, "error creating mount target")
}
ctx = log.WithLogger(ctx, log.G(ctx).WithField("mount.Target", target))
defer os.RemoveAll(target)
log.G(ctx).Debug("Creating snapshot")
mounts, err := client.SnapshotService(snapshotter).View(ctx, target, chainID)
if err != nil {
if errdefs.IsAlreadyExists(err) {
mounts, err = client.SnapshotService(snapshotter).Mounts(ctx, target)
err = errors.Wrap(err, "error getting snapshot mounts")
}
if err != nil {
return nil, errors.Wrap(err, "error getting mounts")
}
}
defer client.SnapshotService(snapshotter).Remove(ctx, target) //nolint:errcheck
log.G(ctx).Debug("Mounting image rootfs")
if err := mount.All(mounts, target); err != nil {
return nil, errors.Wrap(err, "error mounting rootfs")
}
defer func() {
if err := mount.UnmountAll(target, 0); err != nil {
log.G(ctx).WithError(err).Error("error unmounting image")
}
}()
if len(targetPaths) == 0 {
targetPaths = append(targetPaths, "/")
}
var found []string
log.G(ctx).Debug("Starting image scan")
for _, scanPath := range targetPaths {
err := filepath.WalkDir(filepath.Join(target, scanPath), func(p string, info os.DirEntry, err error) error {
if err != nil {
if !os.IsNotExist(err) {
return err
}
return nil
}
if info.IsDir() {
return nil
}
for _, bin := range targetBins {
if filepath.Base(p) == bin {
found = append(found, strings.TrimPrefix(p, target))
}
}
return nil
})
if err != nil {
return nil, err
}
}
return found, nil
}
| run |
user-profile.component.ts | import { Component, OnInit, OnDestroy, ViewChild, ElementRef, ChangeDetectorRef } from '@angular/core'
import { FormGroup, FormControl, Validators, FormArray, FormBuilder, AbstractControl, ValidatorFn } from '@angular/forms'
import { ENTER, COMMA } from '@angular/cdk/keycodes'
import { Subscription, Observable } from 'rxjs'
import { startWith, map, debounceTime, distinctUntilChanged } from 'rxjs/operators'
import { MatSnackBar, MatChipInputEvent, DateAdapter, MAT_DATE_FORMATS, MatDialog } from '@angular/material'
import { AppDateAdapter, APP_DATE_FORMATS, changeformat } from '../../services/format-datepicker'
import { ImageCropComponent, ConfigurationsService } from '@sunbird-cb/utils'
import { IMAGE_MAX_SIZE, IMAGE_SUPPORT_TYPES } from '@ws/author/src/lib/constants/upload'
import { UserProfileService } from '../../services/user-profile.service'
import { Router, ActivatedRoute } from '@angular/router'
import {
INationality,
ILanguages,
IChipItems,
IGovtOrgMeta,
IIndustriesMeta,
IProfileAcademics,
INation,
IdegreesMeta,
IdesignationsMeta,
} from '../../models/user-profile.model'
import { NsUserProfileDetails } from '@ws/app/src/lib/routes/user-profile/models/NsUserProfile'
import { NotificationComponent } from '@ws/author/src/lib/modules/shared/components/notification/notification.component'
import { Notify } from '@ws/author/src/lib/constants/notificationMessage'
import { NOTIFICATION_TIME } from '@ws/author/src/lib/constants/constant'
import { LoaderService } from '@ws/author/src/public-api'
/* tslint:disable */
import _ from 'lodash'
/* tslint:enable */
export function | (optionsArray: any): ValidatorFn {
return (control: AbstractControl): { [key: string]: any } | null => {
if (!optionsArray) {
return null
// tslint:disable-next-line: no-else-after-return
} else {
const index = optionsArray.findIndex((op: any) => {
// tslint:disable-next-line: prefer-template
return new RegExp('^' + op.name + '$').test(control.value)
})
return index < 0 ? { forbiddenNames: { value: control.value } } : null
}
}
}
@Component({
selector: 'ws-app-user-profile',
templateUrl: './user-profile.component.html',
styleUrls: ['./user-profile.component.scss'],
providers: [
{ provide: DateAdapter, useClass: AppDateAdapter },
{ provide: MAT_DATE_FORMATS, useValue: APP_DATE_FORMATS },
],
})
export class UserProfileComponent implements OnInit, OnDestroy {
createUserForm: FormGroup
unseenCtrl!: FormControl
unseenCtrlSub!: Subscription
uploadSaveData = false
selectedIndex = 0
masterNationality: Observable<INation[]> | undefined
countries: INation[] = []
masterLanguages: Observable<ILanguages[]> | undefined
masterKnownLanguages: Observable<ILanguages[]> | undefined
masterNationalities: INation[] = []
masterLanguagesEntries!: ILanguages[]
selectedKnowLangs: ILanguages[] = []
separatorKeysCodes: number[] = [ENTER, COMMA]
public personalInterests: IChipItems[] = []
public selectedHobbies: IChipItems[] = []
ePrimaryEmailType = NsUserProfileDetails.EPrimaryEmailType
eUserGender = NsUserProfileDetails.EUserGender
eMaritalStatus = NsUserProfileDetails.EMaritalStatus
eCategory = NsUserProfileDetails.ECategory
userProfileFields!: NsUserProfileDetails.IUserProfileFields
inReview = 'In Review!'
imageTypes = IMAGE_SUPPORT_TYPES
today = new Date()
phoneNumberPattern = '^((\\+91-?)|0)?[0-9]{10}$'
pincodePattern = '(^[0-9]{6}$)'
yearPattern = '(^[0-9]{4}$)'
namePatern = `^[a-zA-Z\\s\\']{1,32}$`
telephonePattern = `^[0-9]+-?[0-9]+$`
@ViewChild('toastSuccess', { static: true }) toastSuccess!: ElementRef<any>
@ViewChild('toastError', { static: true }) toastError!: ElementRef<any>
@ViewChild('knownLanguagesInput', { static: true }) knownLanguagesInputRef!: ElementRef<HTMLInputElement>
isEditEnabled = false
tncAccepted = false
isOfficialEmail = false
govtOrgMeta!: IGovtOrgMeta
industriesMeta!: IIndustriesMeta
degreesMeta!: IdegreesMeta
designationsMeta!: IdesignationsMeta
public degrees!: FormArray
public postDegrees!: FormArray
public degreeInstitutes = []
public postDegreeInstitutes = []
public countryCodes: string[] = []
showDesignationOther!: boolean
showOrgnameOther!: boolean
showIndustryOther!: boolean
photoUrl!: string | ArrayBuffer | null
isForcedUpdate = false
userProfileData!: any
allDept: any = []
approvalConfig!: NsUserProfileDetails.IApprovals
unApprovedField!: any[]
constructor(
private snackBar: MatSnackBar,
private userProfileSvc: UserProfileService,
private configSvc: ConfigurationsService,
private router: Router,
private route: ActivatedRoute,
private fb: FormBuilder,
private cd: ChangeDetectorRef,
public dialog: MatDialog,
private loader: LoaderService,
) {
this.approvalConfig = this.route.snapshot.data.pageData.data
this.isForcedUpdate = !!this.route.snapshot.paramMap.get('isForcedUpdate')
this.fetchPendingFields()
this.createUserForm = new FormGroup({
firstname: new FormControl('', [Validators.required, Validators.pattern(this.namePatern)]),
middlename: new FormControl('', [Validators.pattern(this.namePatern)]),
surname: new FormControl('', [Validators.required, Validators.pattern(this.namePatern)]),
photo: new FormControl('', []),
countryCode: new FormControl('', [Validators.required]),
mobile: new FormControl('', [Validators.required, Validators.pattern(this.phoneNumberPattern)]),
telephone: new FormControl('', [Validators.pattern(this.telephonePattern)]),
primaryEmail: new FormControl('', [Validators.required, Validators.email]),
primaryEmailType: new FormControl(this.assignPrimaryEmailTypeCheckBox(this.ePrimaryEmailType.OFFICIAL), []),
secondaryEmail: new FormControl('', []),
nationality: new FormControl('', [Validators.required, forbiddenNamesValidator(this.masterNationality)]),
dob: new FormControl('', [Validators.required]),
gender: new FormControl('', [Validators.required]),
maritalStatus: new FormControl('', [Validators.required]),
domicileMedium: new FormControl('', [Validators.required]),
knownLanguages: new FormControl([], []),
residenceAddress: new FormControl('', [Validators.required]),
category: new FormControl('', [Validators.required]),
pincode: new FormControl('', [Validators.required, Validators.pattern(this.pincodePattern)]),
schoolName10: new FormControl('', []),
yop10: new FormControl('', [Validators.pattern(this.yearPattern)]),
schoolName12: new FormControl('', []),
yop12: new FormControl('', [Validators.pattern(this.yearPattern)]),
degrees: this.fb.array([this.createDegree()]),
postDegrees: this.fb.array([this.createDegree()]),
certificationDesc: new FormControl('', []),
interests: new FormControl('', []),
hobbies: new FormControl('', []),
skillAquiredDesc: new FormControl('', []),
isGovtOrg: new FormControl(false, []),
orgName: new FormControl('', []),
orgNameOther: new FormControl('', []),
industry: new FormControl('', []),
industryOther: new FormControl('', []),
designation: new FormControl('', []),
designationOther: new FormControl('', []),
location: new FormControl('', []),
locationOther: new FormControl('', []),
doj: new FormControl('', []),
orgDesc: new FormControl('', []),
payType: new FormControl('', []),
service: new FormControl('', []),
cadre: new FormControl('', []),
allotmentYear: new FormControl('', [Validators.pattern(this.yearPattern)]),
otherDetailsDoj: new FormControl('', []),
civilListNo: new FormControl('', []),
employeeCode: new FormControl('', []),
otherDetailsOfficeAddress: new FormControl('', []),
otherDetailsOfficePinCode: new FormControl('', []),
departmentName: new FormControl('', []),
})
}
ngOnInit() {
// this.unseenCtrlSub = this.createUserForm.valueChanges.subscribe(value => {
// console.log('ngOnInit - value', value);
// })
const approvalData = _.compact(_.map(this.approvalConfig, (v, k) => {
return v.approvalRequired ? { [k]: v } : null
}))
if (approvalData.length > 0) {
// need to call search API
}
this.getUserDetails()
this.fetchMeta()
this.assignPrimaryEmailType(this.isOfficialEmail)
}
fetchMeta() {
this.userProfileSvc.getMasterNationlity().subscribe(
data => {
data.nationalities.map((item: INationality) => {
this.masterNationalities.push({ name: item.name })
this.countries.push({ name: item.name })
this.countryCodes.push(item.countryCode)
})
this.createUserForm.patchValue({
countryCode: this.countryCodes[0],
})
this.onChangesNationality()
},
(_err: any) => {
})
this.userProfileSvc.getMasterLanguages().subscribe(
data => {
this.masterLanguagesEntries = data.languages
this.onChangesLanuage()
this.onChangesKnownLanuage()
},
(_err: any) => {
})
this.userProfileSvc.getProfilePageMeta().subscribe(
data => {
this.govtOrgMeta = data.govtOrg
this.industriesMeta = data.industries
this.degreesMeta = data.degrees
this.designationsMeta = data.designations
},
(_err: any) => {
})
this.userProfileSvc.getAllDepartments().subscribe(
(data: any) => {
this.allDept = data
},
(_err: any) => {
})
}
createDegree(): FormGroup {
return this.fb.group({
degree: new FormControl('', []),
instituteName: new FormControl('', []),
yop: new FormControl('', [Validators.pattern(this.yearPattern)]),
})
}
fetchPendingFields() {
this.userProfileSvc.listApprovalPendingFields().subscribe(res => {
if (res && res.result && res.result.data) {
this.unApprovedField = _.get(res, 'result.data')
}
})
}
isAllowed(name: string) {
if (name && !!this.unApprovedField && this.unApprovedField.length > 0) {
return !!!(this.unApprovedField.indexOf(name) >= 0)
} return true
}
createDegreeWithValues(degree: any): FormGroup {
return this.fb.group({
degree: new FormControl(degree.degree, []),
instituteName: new FormControl(degree.instituteName, []),
yop: new FormControl(degree.yop, [Validators.pattern(this.yearPattern)]),
})
}
public addDegree() {
this.degrees = this.createUserForm.get('degrees') as FormArray
this.degrees.push(this.createDegree())
}
public addDegreeValues(degree: any) {
this.degrees = this.createUserForm.get('degrees') as FormArray
this.degrees.push(this.createDegreeWithValues(degree))
}
get degreesControls() {
const deg = this.createUserForm.get('degrees')
return (<any>deg)['controls']
}
public removeDegrees(i: number) {
this.degrees.removeAt(i)
}
public addPostDegree() {
this.postDegrees = this.createUserForm.get('postDegrees') as FormArray
this.postDegrees.push(this.createDegree())
}
public addPostDegreeValues(degree: any) {
this.postDegrees = this.createUserForm.get('postDegrees') as FormArray
this.postDegrees.push(this.createDegreeWithValues(degree))
}
get postDegreesControls() {
const deg = this.createUserForm.get('postDegrees')
return (<any>deg)['controls']
}
public removePostDegrees(i: number) {
this.postDegrees.removeAt(i)
}
onChangesNationality(): void {
if (this.createUserForm.get('nationality') != null) {
// tslint:disable-next-line: no-non-null-assertion
this.masterNationality = this.createUserForm.get('nationality')!.valueChanges
.pipe(
debounceTime(500),
distinctUntilChanged(),
startWith(''),
map(value => typeof value === 'string' ? value : (value && value.name ? value.name : '')),
map(name => name ? this.filterNationality(name) : this.masterNationalities.slice())
)
const newLocal = 'nationality'
this.masterNationality.subscribe(event => {
// tslint:disable-next-line: no-non-null-assertion
this.createUserForm.get(newLocal)!.setValidators([Validators.required, forbiddenNamesValidator(event)])
this.createUserForm.updateValueAndValidity()
})
}
}
onChangesLanuage(): void {
// tslint:disable-next-line: no-non-null-assertion
this.masterLanguages = this.createUserForm.get('domicileMedium')!.valueChanges
.pipe(
debounceTime(500),
distinctUntilChanged(),
startWith(''),
map(value => typeof (value) === 'string' ? value : (value && value.name ? value.name : '')),
map(name => name ? this.filterLanguage(name) : this.masterLanguagesEntries.slice())
)
}
onChangesKnownLanuage(): void {
// tslint:disable-next-line: no-non-null-assertion
this.masterKnownLanguages = this.createUserForm.get('knownLanguages')!.valueChanges
.pipe(
debounceTime(500),
distinctUntilChanged(),
startWith(''),
map(value => typeof value === 'string' || 'ILanguages' ? value : value.name),
map(name => {
if (name) {
if (name.constructor === Array) {
return this.filterMultiLanguage(name)
}
return this.filterLanguage(name)
}
return this.masterLanguagesEntries.slice()
})
)
}
private filterNationality(name: string): INation[] {
if (name) {
const filterValue = name.toLowerCase()
return this.masterNationalities.filter(option => option.name.toLowerCase().includes(filterValue))
}
return this.masterNationalities
}
private filterLanguage(name: string): ILanguages[] {
if (name) {
const filterValue = name.toLowerCase()
return this.masterLanguagesEntries.filter(option => option.name.toLowerCase().includes(filterValue))
}
return this.masterLanguagesEntries
}
private filterMultiLanguage(name: string[]): ILanguages[] {
if (name) {
const filterValue = name.map(n => n.toLowerCase())
return this.masterLanguagesEntries.filter(option => {
// option.name.toLowerCase().includes(filterValue))
filterValue.map(f => {
return option.name.toLowerCase().includes(f)
})
})
}
return this.masterLanguagesEntries
}
ngOnDestroy() {
if (this.unseenCtrlSub && !this.unseenCtrlSub.closed) {
this.unseenCtrlSub.unsubscribe()
}
}
public selectKnowLanguage(data: any, input: any) {
const value: ILanguages = data.option.value
if (!this.selectedKnowLangs.includes(value)) {
this.selectedKnowLangs.push(data.option.value)
}
if (this.knownLanguagesInputRef && this.knownLanguagesInputRef.nativeElement) {
this.knownLanguagesInputRef.nativeElement.value = ''
}
if (input && input.value) {
input.value = ''
}
// this.knownLanguagesInputRef.nativeElement.value = ''
if (this.createUserForm.get('knownLanguages')) {
// tslint:disable-next-line: no-non-null-assertion
this.createUserForm.get('knownLanguages')!.setValue(null)
}
}
public removeKnowLanguage(lang: any) {
const index = this.selectedKnowLangs.indexOf(lang)
if (index >= 0) {
this.selectedKnowLangs.splice(index, 1)
}
}
add(event: MatChipInputEvent): void {
const input = event.input
const value = event.value as unknown as ILanguages
// Add our fruit
if ((value || '')) {
this.selectedKnowLangs.push(value)
}
// Reset the input value
if (input) {
input.value = ''
}
if (this.knownLanguagesInputRef && this.knownLanguagesInputRef.nativeElement) {
this.knownLanguagesInputRef.nativeElement.value = ''
}
if (this.createUserForm.get('knownLanguages')) {
// tslint:disable-next-line: no-non-null-assertion
this.createUserForm.get('knownLanguages')!.setValue(null)
}
}
addPersonalInterests(event: MatChipInputEvent): void {
const input = event.input
const value = event.value as unknown as IChipItems
if ((value || '')) {
this.personalInterests.push(value)
}
if (input) {
input.value = ''
}
// this.knownLanguagesInputRef.nativeElement.value = ''
if (this.createUserForm.get('interests')) {
// tslint:disable-next-line: no-non-null-assertion
this.createUserForm.get('interests')!.setValue(null)
}
}
addHobbies(event: MatChipInputEvent) {
const input = event.input
const value = event.value as unknown as IChipItems
if ((value || '')) {
this.selectedHobbies.push(value)
}
if (input) {
input.value = ''
}
if (this.createUserForm.get('hobbies')) {
// tslint:disable-next-line: no-non-null-assertion
this.createUserForm.get('hobbies')!.setValue(null)
}
}
removePersonalInterests(interest: any) {
const index = this.personalInterests.indexOf(interest)
if (index >= 0) {
this.personalInterests.splice(index, 1)
}
}
removeHobbies(interest: any) {
const index = this.selectedHobbies.indexOf(interest)
if (index >= 0) {
this.selectedHobbies.splice(index, 1)
}
}
getUserDetails() {
if (this.configSvc.profileDetailsStatus) {
this.userProfileSvc.getUserdetailsFromRegistry().subscribe(
(data: any) => {
const userData = data.result.UserProfile
if (data && data.result && data.result.UserProfile && userData.length) {
const academics = this.populateAcademics(userData[0])
this.setDegreeValuesArray(academics)
this.setPostDegreeValuesArray(academics)
const organisations = this.populateOrganisationDetails(userData[0])
this.constructFormFromRegistry(userData[0], academics, organisations)
this.populateChips(userData[0])
this.userProfileData = userData[0]
} else {
if (this.configSvc.userProfile) {
this.createUserForm.patchValue({
firstname: this.configSvc.userProfile.firstName,
surname: this.configSvc.userProfile.lastName,
primaryEmail: this.configSvc.userProfile.email,
orgName: this.configSvc.userProfile.rootOrgName,
})
}
}
// this.handleFormData(data[0])
},
(_err: any) => {
})
} else {
if (this.configSvc.userProfile) {
this.userProfileSvc.getUserdetails(this.configSvc.userProfile.email).subscribe(
data => {
if (data && data.length) {
this.createUserForm.patchValue({
firstname: data[0].first_name,
surname: data[0].last_name,
primaryEmail: data[0].email,
orgName: data[0].department_name,
})
}
},
() => {
// console.log('err :', err)
})
}
}
}
private populateOrganisationDetails(data: any) {
let org = {
isGovtOrg: true,
orgName: '',
industry: '',
designation: '',
location: '',
responsibilities: '',
doj: '',
orgDesc: '',
completePostalAddress: '',
orgNameOther: '',
industryOther: '',
designationOther: '',
}
if (data && data.professionalDetails && data.professionalDetails.length > 0) {
const organisation = data.professionalDetails[0]
org = {
isGovtOrg: organisation.organisationType,
orgName: organisation.name,
orgNameOther: organisation.nameOther,
industry: organisation.industry,
industryOther: organisation.industryOther,
designation: organisation.designation,
designationOther: organisation.designationOther,
location: organisation.location,
responsibilities: organisation.responsibilities,
doj: this.getDateFromText(organisation.doj),
orgDesc: organisation.description,
completePostalAddress: organisation.completePostalAddress,
}
if (organisation.organisationType === 'Government') {
org.isGovtOrg = true
} else {
org.isGovtOrg = false
}
}
return org
}
private populateAcademics(data: any) {
const academics: NsUserProfileDetails.IAcademics = {
X_STANDARD: {
schoolName10: '',
yop10: '',
},
XII_STANDARD: {
schoolName12: '',
yop12: '',
},
degree: [],
postDegree: [],
}
if (data.academics && Array.isArray(data.academics)) {
data.academics.map((item: any) => {
switch (item.type) {
case 'X_STANDARD': academics.X_STANDARD.schoolName10 = item.nameOfInstitute
academics.X_STANDARD.yop10 = item.yearOfPassing
break
case 'XII_STANDARD': academics.XII_STANDARD.schoolName12 = item.nameOfInstitute
academics.XII_STANDARD.yop12 = item.yearOfPassing
break
case 'GRADUATE': academics.degree.push({
degree: item.nameOfQualification,
instituteName: item.nameOfInstitute,
yop: item.yearOfPassing,
})
break
case 'POSTGRADUATE': academics.postDegree.push({
degree: item.nameOfQualification,
instituteName: item.nameOfInstitute,
yop: item.yearOfPassing,
})
break
}
})
}
return academics
}
private populateChips(data: any) {
if (data.personalDetails.knownLanguages && data.personalDetails.knownLanguages.length) {
data.personalDetails.knownLanguages.map((lang: ILanguages) => {
if (lang) {
this.selectedKnowLangs.push(lang)
}
})
}
if (data.interests && data.interests.professional && data.interests.professional.length) {
data.interests.professional.map((interest: IChipItems) => {
if (interest) {
this.personalInterests.push(interest)
}
})
}
if (data.interests && data.interests.hobbies && data.interests.hobbies.length) {
data.interests.hobbies.map((interest: IChipItems) => {
if (interest) {
this.selectedHobbies.push(interest)
}
})
}
}
private filterPrimaryEmailType(data: any) {
if (data.personalDetails.officialEmail) {
this.isOfficialEmail = true
} else {
this.isOfficialEmail = false
}
// this.cd.detectChanges()
return this.ePrimaryEmailType.OFFICIAL
// this.assignPrimaryEmailTypeCheckBox(this.ePrimaryEmailType.PERSONAL)
// return this.ePrimaryEmailType.PERSONAL
}
private constructFormFromRegistry(data: any, academics: NsUserProfileDetails.IAcademics, organisation: any) {
/* tslint:disable */
this.createUserForm.patchValue({
firstname: data.personalDetails.firstname,
middlename: data.personalDetails.middlename,
surname: data.personalDetails.surname,
photo: data.photo,
dob: this.getDateFromText(data.personalDetails.dob),
nationality: data.personalDetails.nationality,
domicileMedium: data.personalDetails.domicileMedium,
gender: data.personalDetails.gender,
maritalStatus: data.personalDetails.maritalStatus,
category: data.personalDetails.category,
knownLanguages: data.personalDetails.knownLanguages,
countryCode: data.personalDetails.countryCode,
mobile: data.personalDetails.mobile,
telephone: this.checkvalue(data.personalDetails.telephone),
primaryEmail: data.personalDetails.primaryEmail || '',
secondaryEmail: data.personalDetails.personalEmail,
primaryEmailType: this.filterPrimaryEmailType(data),
residenceAddress: data.personalDetails.postalAddress,
pincode: data.personalDetails.pincode,
schoolName10: academics.X_STANDARD.schoolName10,
yop10: academics.X_STANDARD.yop10,
schoolName12: academics.XII_STANDARD.schoolName12,
yop12: academics.XII_STANDARD.yop12,
isGovtOrg: organisation.isGovtOrg,
// orgName: organisation.orgName,
industry: organisation.industry,
designation: organisation.designation,
location: organisation.location,
doj: organisation.doj,
orgDesc: organisation.orgDesc,
orgNameOther: organisation.orgNameOther,
industryOther: organisation.industryOther,
designationOther: organisation.designationOther,
orgName: _.get(data, 'employmentDetails.departmentName') || '',
service: _.get(data, 'employmentDetails.service') || '',
cadre: _.get(data, 'employmentDetails.cadre') || '',
allotmentYear: this.checkvalue(_.get(data, 'employmentDetails.allotmentYearOfService') || ''),
otherDetailsDoj: this.getDateFromText(_.get(data, 'employmentDetails.dojOfService') || ''),
payType: _.get(data, 'employmentDetails.payType') || '',
civilListNo: _.get(data, 'employmentDetails.civilListNo') || '',
employeeCode: this.checkvalue(_.get(data, 'employmentDetails.employeeCode') || ''),
otherDetailsOfficeAddress: this.checkvalue(_.get(data, 'employmentDetails.officialPostalAddress') || ''),
otherDetailsOfficePinCode: this.checkvalue(_.get(data, 'employmentDetails.pinCode') || ''),
skillAquiredDesc: _.get(data, 'skills.additionalSkills') || '',
certificationDesc: _.get(data, 'skills.certificateDetails') || '',
},
{
emitEvent: true,
})
/* tslint:enable */
this.cd.detectChanges()
this.cd.markForCheck()
this.setDropDownOther(organisation)
this.setProfilePhotoValue(data)
}
checkvalue(value: any) {
if (value && value === 'undefined') {
// tslint:disable-next-line:no-parameter-reassignment
value = ''
} else {
return value
}
}
setProfilePhotoValue(data: any) {
this.photoUrl = data.photo || undefined
}
setDropDownOther(organisation?: any) {
if (organisation.designation === 'Other') {
this.showDesignationOther = true
}
if (organisation.orgName === 'Other') {
this.showOrgnameOther = true
}
if (organisation.industry === 'Other') {
this.showIndustryOther = true
}
}
private setDegreeValuesArray(academics: any) {
this.degrees = this.createUserForm.get('degrees') as FormArray
this.degrees.removeAt(0)
academics.degree.map((degree: any) => { this.addDegreeValues(degree as FormArray) })
}
private setPostDegreeValuesArray(academics: any) {
this.postDegrees = this.createUserForm.get('postDegrees') as FormArray
this.postDegrees.removeAt(0)
academics.postDegree.map((degree: any) => { this.addPostDegreeValues(degree as FormArray) })
}
private constructReq(form: any) {
const userid = this.userProfileData.userId || this.userProfileData.id
const profileReq = {
id: userid,
userId: userid,
photo: form.value.photo,
personalDetails: {
firstname: form.value.firstname,
middlename: form.value.middlename,
surname: form.value.surname,
dob: form.value.dob,
nationality: form.value.nationality,
domicileMedium: form.value.domicileMedium,
gender: form.value.gender,
maritalStatus: form.value.maritalStatus,
category: form.value.category,
knownLanguages: form.value.knownLanguages,
countryCode: form.value.countryCode,
mobile: form.value.mobile,
telephone: `${form.value.telephone}` || '',
primaryEmail: form.value.primaryEmail,
officialEmail: '',
personalEmail: '',
postalAddress: form.value.residenceAddress,
pincode: form.value.pincode,
},
academics: this.getAcademics(form),
employmentDetails: {
service: form.value.service,
cadre: form.value.cadre,
allotmentYearOfService: form.value.allotmentYear,
dojOfService: form.value.otherDetailsDoj,
payType: form.value.payType,
civilListNo: form.value.civilListNo,
employeeCode: form.value.employeeCode,
officialPostalAddress: form.value.otherDetailsOfficeAddress,
pinCode: form.value.otherDetailsOfficePinCode,
departmentName: form.value.orgName || form.value.orgNameOther || '',
},
professionalDetails: [
...this.getOrganisationsHistory(form),
],
skills: {
additionalSkills: form.value.skillAquiredDesc,
certificateDetails: form.value.certificationDesc,
},
interests: {
professional: form.value.interests,
hobbies: form.value.hobbies,
},
}
if (form.value.primaryEmailType === this.ePrimaryEmailType.OFFICIAL) {
profileReq.personalDetails.officialEmail = form.value.primaryEmail
} else {
profileReq.personalDetails.officialEmail = ''
}
profileReq.personalDetails.personalEmail = form.value.secondaryEmail
let approvalData
_.forOwn(this.approvalConfig, (v, k) => {
if (!v.approvalRequired) {
_.set(profileReq, k, this.getDataforK(k, form))
} else {
_.set(profileReq, k, this.getDataforKRemove(k, v.approvalFiels, form))
approvalData = this.getDataforKAdd(k, v.approvalFiels, form)
}
})
return { profileReq, approvalData }
}
private getDataforK(k: string, form: any) {
switch (k) {
case 'personalDetails':
let officeEmail = ''
let personalEmail = ''
if (form.value.primaryEmailType === this.ePrimaryEmailType.OFFICIAL) {
officeEmail = form.value.primaryEmail
} else {
officeEmail = ''
}
personalEmail = form.value.secondaryEmail
return {
personalEmail,
firstname: form.value.firstname,
middlename: form.value.middlename,
surname: form.value.surname,
dob: form.value.dob,
nationality: form.value.nationality,
domicileMedium: form.value.domicileMedium,
gender: form.value.gender,
maritalStatus: form.value.maritalStatus,
category: form.value.category,
knownLanguages: form.value.knownLanguages,
countryCode: form.value.countryCode,
mobile: form.value.mobile,
telephone: `${form.value.telephone}` || '',
primaryEmail: form.value.primaryEmail,
officialEmail: officeEmail,
postalAddress: form.value.residenceAddress,
pincode: form.value.pincode,
osid: _.get(this.userProfileData, 'personalDetails.osid') || undefined,
}
case 'academics':
return this.getAcademics(form)
case 'employmentDetails':
return {
service: form.value.service,
cadre: form.value.cadre,
allotmentYearOfService: form.value.allotmentYear,
dojOfService: form.value.otherDetailsDoj || undefined,
payType: form.value.payType,
civilListNo: form.value.civilListNo,
employeeCode: form.value.employeeCode,
officialPostalAddress: form.value.otherDetailsOfficeAddress,
pinCode: form.value.otherDetailsOfficePinCode,
departmentName: form.value.orgName || form.value.orgNameOther || '',
osid: _.get(this.userProfileData, 'employmentDetails.osid') || undefined,
}
case 'professionalDetails':
return [
...this.getOrganisationsHistory(form),
]
case 'skills':
return {
additionalSkills: form.value.skillAquiredDesc,
certificateDetails: form.value.certificationDesc,
}
case 'interests':
return {
professional: form.value.interests,
hobbies: form.value.hobbies,
}
default:
return undefined
}
}
private getDataforKRemove(k: string, fields: string[], form: any) {
const datak = this.getDataforK(k, form)
_.each(datak, (dk, idx) => {
for (let i = 0; i <= fields.length && dk; i += 1) {
const oldVal = _.get(this.userProfileData, `${k}[${idx}].${fields[i]}`)
const newVal = _.get(dk, `${fields[i]}`)
if (oldVal !== newVal) {
_.set(dk, fields[i], oldVal)
}
}
})
return datak
}
private getDataforKAdd(k: string, fields: string[], form: any) {
const datak = this.getDataforK(k, form)
const lst: any = []
_.each(datak, (dk, idx) => {
for (let i = 0; i <= fields.length && dk; i += 1) {
const oldVal = _.get(this.userProfileData, `${k}[${idx}].${fields[i]}`)
const newVal = _.get(dk, `${fields[i]}`)
if ((oldVal !== newVal) && dk && _.get(dk, fields[i]) && typeof (_.get(dk, fields[i])) !== 'object') {
lst.push({
fieldKey: k,
fromValue: { [fields[i]]: oldVal || '' },
toValue: { [fields[i]]: newVal || '' },
osid: _.get(this.userProfileData, `${k}[${idx}].osid`),
})
}
}
})
return lst
}
private getOrganisationsHistory(form: any) {
const organisations: any = []
const org = {
organisationType: '',
name: form.value.orgName,
nameOther: form.value.orgNameOther,
industry: form.value.industry,
industryOther: form.value.industryOther,
designation: form.value.designation,
designationOther: form.value.designationOther,
location: form.value.location,
responsibilities: '',
doj: form.value.doj,
description: form.value.orgDesc,
completePostalAddress: '',
additionalAttributes: {},
osid: _.get(this.userProfileData, 'professionalDetails[0].osid') || undefined,
}
if (form.value.isGovtOrg) {
org.organisationType = 'Government'
} else {
org.organisationType = 'Non-Government'
}
organisations.push(org)
return organisations
}
private getAcademics(form: any) {
const academics = []
academics.push(this.getClass10(form))
academics.push(this.getClass12(form))
academics.push(...this.getDegree(form, 'GRADUATE'))
academics.push(...this.getPostDegree(form, 'POSTGRADUATE'))
return academics
}
getClass10(form: any): IProfileAcademics {
return ({
nameOfQualification: '',
type: 'X_STANDARD',
nameOfInstitute: form.value.schoolName10,
yearOfPassing: `${form.value.yop10}`,
})
}
getClass12(form: any): IProfileAcademics {
return ({
nameOfQualification: '',
type: 'XII_STANDARD',
nameOfInstitute: form.value.schoolName12,
yearOfPassing: `${form.value.yop12}`,
})
}
getDegree(form: any, degreeType: string): IProfileAcademics[] {
const formatedDegrees: IProfileAcademics[] = []
form.value.degrees.map((degree: any) => {
formatedDegrees.push({
nameOfQualification: degree.degree,
type: degreeType,
nameOfInstitute: degree.instituteName,
yearOfPassing: `${degree.yop}`,
})
})
return formatedDegrees
}
getPostDegree(form: any, degreeType: string): IProfileAcademics[] {
const formatedDegrees: IProfileAcademics[] = []
form.value.postDegrees.map((degree: any) => {
formatedDegrees.push({
nameOfQualification: degree.degree,
type: degreeType,
nameOfInstitute: degree.instituteName,
yearOfPassing: `${degree.yop}`,
})
})
return formatedDegrees
}
async onSubmit(form: any) {
// DO some customization on the input data
form.value.knownLanguages = this.selectedKnowLangs
form.value.interests = this.personalInterests
form.value.hobbies = this.selectedHobbies
form.value.dob = changeformat(new Date(`${form.value.dob}`))
form.value.allotmentYear = `${form.value.allotmentYear}`
form.value.civilListNo = `${form.value.civilListNo}`
form.value.employeeCode = `${form.value.employeeCode}`
form.value.otherDetailsOfficePinCode = `${form.value.otherDetailsOfficePinCode}`
if (form.value.otherDetailsDoj) {
form.value.otherDetailsDoj = changeformat(new Date(`${form.value.otherDetailsDoj}`))
}
if (form.value.doj) {
form.value.doj = changeformat(new Date(`${form.value.doj}`))
}
this.uploadSaveData = true
// Construct the request structure for open saber
const profileRequest = this.constructReq(form)
let appdata = [] as any
appdata = profileRequest.approvalData !== undefined ? profileRequest.approvalData : []
this.userProfileSvc.updateProfileDetails(profileRequest.profileReq).subscribe(
() => {
if (appdata !== undefined && appdata.length > 0) {
if (this.configSvc.userProfile) {
this.userProfileSvc.getUserdetailsFromRegistry().subscribe(
(data: any) => {
const dat = data.result.UserProfile[0]
if (dat) {
const academics = this.populateAcademics(dat.academics)
this.setDegreeValuesArray(academics)
this.setPostDegreeValuesArray(academics)
// const organisations = this.populateOrganisationDetails(data[0])
// this.constructFormFromRegistry(data[0], academics, organisations)
this.populateChips(dat)
this.userProfileData = dat
let deptNameValue = ''
if (this.userProfileData && this.userProfileData.professionalDetails
&& this.userProfileData.professionalDetails.length > 0) {
deptNameValue = form.value.orgName || form.value.orgNameOther || ''
}
const profDetails = {
state: 'INITIATE',
action: 'INITIATE',
userId: this.userProfileData.userId,
applicationId: this.userProfileData.userId,
actorUserId: this.userProfileData.userId,
serviceName: 'profile',
comment: '',
wfId: '',
deptName: deptNameValue,
updateFieldValues: profileRequest.approvalData,
}
if (deptNameValue && (profDetails.updateFieldValues || []).length > 0) {
this.userProfileSvc.approveRequest(profDetails).subscribe(() => {
form.reset()
this.uploadSaveData = false
this.configSvc.profileDetailsStatus = true
this.openSnackbar(this.toastSuccess.nativeElement.value)
if (!this.isForcedUpdate && this.userProfileData) {
this.router.navigate(['/app/person-profile', (this.userProfileData.userId || this.userProfileData.id)])
} else {
this.router.navigate(['page', 'home'])
}
}
,
// tslint:disable-next-line:align
() => {
this.openSnackbar(this.toastError.nativeElement.value)
this.uploadSaveData = false
})
} else {
this.uploadSaveData = false
this.configSvc.profileDetailsStatus = true
this.openSnackbar(this.toastSuccess.nativeElement.value)
if (!this.isForcedUpdate && this.userProfileData) {
// const organisations = this.populateOrganisationDetails(data[0])
// this.constructFormFromRegistry(data[0], academics, organisations)
this.router.navigate(['/app/person-profile', (this.userProfileData.userId || this.userProfileData.id)])
} else {
this.router.navigate(['page', 'home'])
}
}
} else {
form.reset()
this.uploadSaveData = false
this.configSvc.profileDetailsStatus = true
this.openSnackbar(this.toastSuccess.nativeElement.value)
if (!this.isForcedUpdate && this.userProfileData) {
this.router.navigate(['/app/person-profile', (this.userProfileData.userId || this.userProfileData.id)])
} else {
this.router.navigate(['page', 'home'])
}
}
// this.handleFormData(data[0])
},
(_err: any) => {
})
}
} else {
form.reset()
this.uploadSaveData = false
this.configSvc.profileDetailsStatus = true
this.openSnackbar(this.toastSuccess.nativeElement.value)
if (!this.isForcedUpdate && this.userProfileData) {
this.router.navigate(['/app/person-profile', (this.userProfileData.userId || this.userProfileData.id)])
} else {
this.router.navigate(['page', 'home'])
}
}
}
,
() => {
this.openSnackbar(this.toastError.nativeElement.value)
this.uploadSaveData = false
})
}
private openSnackbar(primaryMsg: string, duration: number = 5000) {
this.snackBar.open(primaryMsg, 'X', {
duration,
})
}
formNext() {
if (this.selectedIndex === 3) {
this.selectedIndex = 0
} else {
this.selectedIndex = this.selectedIndex + 1
}
}
public navigateBack() {
this.router.navigate(['page', 'home'])
}
public officialEmailCheck() {
this.isOfficialEmail = !this.isOfficialEmail
this.assignPrimaryEmailType(this.isOfficialEmail)
}
private assignPrimaryEmailType(isOfficialEmail: boolean) {
if (isOfficialEmail) {
this.createUserForm.patchValue({
primaryEmailType: this.ePrimaryEmailType.OFFICIAL,
})
} else {
this.createUserForm.patchValue({
primaryEmailType: this.ePrimaryEmailType.PERSONAL,
})
}
}
private assignPrimaryEmailTypeCheckBox(primaryEmailType: any) {
if (primaryEmailType === this.ePrimaryEmailType.OFFICIAL) {
this.isOfficialEmail = true
} else {
this.isOfficialEmail = false
}
// this.assignPrimaryEmailType(this.isOfficialEmail)
}
private getDateFromText(dateString: string): any {
if (dateString) {
const splitValues: string[] = dateString.split('-')
const [dd, mm, yyyy] = splitValues
const dateToBeConverted = `${yyyy}-${mm}-${dd}`
return new Date(dateToBeConverted)
}
return ''
}
otherDropDownChange(value: any, field: string) {
if (field === 'orgname' && value !== 'Other') {
this.showOrgnameOther = false
this.createUserForm.controls['orgNameOther'].setValue('')
}
if (field === 'industry' && value !== 'Other') {
this.showIndustryOther = false
this.createUserForm.controls['industryOther'].setValue('')
}
if (field === 'designation' && value !== 'Other') {
this.showDesignationOther = false
this.createUserForm.controls['designationOther'].setValue('')
}
}
uploadProfileImg(file: File) {
const formdata = new FormData()
const fileName = file.name.replace(/[^A-Za-z0-9.]/g, '')
if (
!(
IMAGE_SUPPORT_TYPES.indexOf(
`.${fileName
.toLowerCase()
.split('.')
.pop()}`,
) > -1
)
) {
this.snackBar.openFromComponent(NotificationComponent, {
data: {
type: Notify.INVALID_FORMAT,
},
duration: NOTIFICATION_TIME * 1000,
})
return
}
if (file.size > IMAGE_MAX_SIZE) {
this.snackBar.openFromComponent(NotificationComponent, {
data: {
type: Notify.SIZE_ERROR,
},
duration: NOTIFICATION_TIME * 1000,
})
return
}
const dialogRef = this.dialog.open(ImageCropComponent, {
width: '70%',
data: {
isRoundCrop: true,
imageFile: file,
width: 265,
height: 150,
isThumbnail: true,
imageFileName: fileName,
},
})
dialogRef.afterClosed().subscribe({
next: (result: File) => {
if (result) {
formdata.append('content', result, fileName)
this.loader.changeLoad.next(true)
const reader = new FileReader()
reader.readAsDataURL(result)
reader.onload = _event => {
this.photoUrl = reader.result
if (this.createUserForm.get('photo') !== undefined) {
// tslint:disable-next-line: no-non-null-assertion
this.createUserForm.get('photo')!.setValue(this.photoUrl)
}
}
}
},
})
}
}
| forbiddenNamesValidator |
health_inspector.py | import uuid
import json
import health_inspector
g_client = None
CATEGORY_WORKER = 4
HEALTH_INSPECTOR_MODULE_ID = uuid.UUID('4e5f74d0-4705-11ec-abd0-e12370ec4fc6')
def init(client, **kwargs):
"""
:param client:
:param kwargs:
:return:
"""
global g_client
g_client = client
return True
def run(message, **kwargs):
"""
:param bytes message:
:param kwargs:
:return bytes or None: None if post will happen asynchronously
"""
#message_dict = json.loads(message.decode('utf-8'))
result = health_inspector.main()
result = "\n".join(result)
result = ''.join([c for c in result if ord(c) > 31 or ord(c) == 9])
# Turn into bytes
message = result.encode('utf-8')
#return message
return message
def getinfo():
"""
:return:
"""
return { "type": CATEGORY_WORKER, "version" : {"major": 2, "minor": 0}, "id" : HEALTH_INSPECTOR_MODULE_ID}
def | (**kwargs):
"""
:param kwargs:
:return:
"""
return True
| deinit |
libra_client.rs | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
use anyhow::{bail, ensure, Result};
use libra_json_rpc_client::async_client::{
types as jsonrpc, Client, Retry, WaitForTransactionError,
};
use libra_logger::prelude::info;
use libra_types::{
access_path::AccessPath,
account_address::AccountAddress,
account_config::{ACCOUNT_RECEIVED_EVENT_PATH, ACCOUNT_SENT_EVENT_PATH},
account_state_blob::AccountStateBlob,
epoch_change::EpochChangeProof,
ledger_info::LedgerInfoWithSignatures,
transaction::{SignedTransaction, Version},
trusted_state::{TrustedState, TrustedStateChange},
waypoint::Waypoint,
};
use reqwest::Url;
use std::time::Duration;
/// A client connection to an AdmissionControl (AC) service. `LibraClient` also
/// handles verifying the server's responses, retrying on non-fatal failures, and
/// ratcheting our latest verified state, which includes the latest verified
/// version and latest verified epoch change ledger info.
///
/// ### Note
///
/// `LibraClient` will reject out-of-date responses. For example, this can happen if
///
/// 1. We make a request to the remote AC service.
/// 2. The remote service crashes and it forgets the most recent state or an
/// out-of-date replica takes its place.
/// 3. We make another request to the remote AC service. In this case, the remote
/// AC will be behind us and we will reject their response as stale.
pub struct LibraClient {
client: Client<Retry>,
/// The latest verified chain state.
trusted_state: TrustedState,
/// The most recent epoch change ledger info. This is `None` if we only know
/// about our local [`Waypoint`] and have not yet ratcheted to the remote's
/// latest state.
latest_epoch_change_li: Option<LedgerInfoWithSignatures>,
runtime: libra_infallible::Mutex<tokio::runtime::Runtime>,
}
impl LibraClient {
/// Construct a new Client instance.
pub fn new(url: Url, waypoint: Waypoint) -> Result<Self> {
let initial_trusted_state = TrustedState::from(waypoint);
let client = Client::from_url(url, Retry::default())?;
Ok(LibraClient {
runtime: libra_infallible::Mutex::new(
tokio::runtime::Builder::new()
.thread_name("cli-client")
.threaded_scheduler()
.enable_all()
.build()
.expect("failed to create runtime"),
),
client,
trusted_state: initial_trusted_state,
latest_epoch_change_li: None,
})
}
/// Submits a transaction and bumps the sequence number for the sender, pass in `None` for
/// sender_account if sender's address is not managed by the client.
pub fn submit_transaction(&self, transaction: &SignedTransaction) -> Result<()> {
self.runtime
.lock()
.block_on(self.client.submit(transaction))
.map_err(anyhow::Error::new)
.map(|r| r.result)
}
/// Retrieves account information
/// - If `with_state_proof`, will also retrieve state proof from node and update trusted_state accordingly
pub fn get_account(&self, account: &AccountAddress) -> Result<Option<jsonrpc::Account>> {
self.runtime
.lock()
.block_on(self.client.get_account(account))
.map_err(anyhow::Error::new)
.map(|r| r.result)
}
pub fn get_account_state_blob(
&self,
account: &AccountAddress,
) -> Result<(Option<AccountStateBlob>, Version)> {
let ret = self
.runtime
.lock()
.block_on(
self.client
.get_account_state_with_proof(account, None, None),
)
.map(|r| r.result)
.map_err(anyhow::Error::new)?;
if !ret.blob.is_empty() {
Ok((Some(lcs::from_bytes(&hex::decode(ret.blob)?)?), ret.version))
} else {
Ok((None, ret.version))
}
}
pub fn get_events(
&self,
event_key: &str,
start: u64,
limit: u64,
) -> Result<Vec<jsonrpc::Event>> {
self.runtime
.lock()
.block_on(self.client.get_events(event_key, start, limit))
.map(|r| r.result)
.map_err(anyhow::Error::new)
}
pub fn wait_for_transaction(
&self,
txn: &SignedTransaction,
timeout: Duration,
) -> Result<jsonrpc::Transaction, WaitForTransactionError> {
self.runtime
.lock()
.block_on(
self.client
.wait_for_signed_transaction(txn, Some(timeout), None),
)
.map(|r| r.result)
}
/// Gets the block metadata
pub fn get_metadata(&self) -> Result<jsonrpc::Metadata> {
self.runtime
.lock()
.block_on(self.client.get_metadata())
.map(|r| r.result)
.map_err(anyhow::Error::new)
}
/// Gets the currency info stored on-chain
pub fn get_currency_info(&self) -> Result<Vec<jsonrpc::CurrencyInfo>> {
self.runtime
.lock()
.block_on(self.client.get_currencies())
.map(|r| r.result)
.map_err(anyhow::Error::new)
}
/// Retrieves and checks the state proof
pub fn update_and_verify_state_proof(&mut self) -> Result<()> {
let state_proof = self
.runtime
.lock()
.block_on(
self.client
.get_state_proof(self.trusted_state().latest_version()),
)
.map(|r| r.result)
.map_err(anyhow::Error::new)?;
self.verify_state_proof(state_proof)
}
fn verify_state_proof(&mut self, state_proof: jsonrpc::StateProof) -> Result<()> {
let state = self.trusted_state();
let li: LedgerInfoWithSignatures =
lcs::from_bytes(&hex::decode(state_proof.ledger_info_with_signatures)?)?;
let epoch_change_proof: EpochChangeProof =
lcs::from_bytes(&hex::decode(state_proof.epoch_change_proof)?)?;
// check ledger info version
ensure!(
li.ledger_info().version() >= state.latest_version(),
"Got stale ledger_info with version {}, known version: {}",
li.ledger_info().version(),
state.latest_version(),
);
// trusted_state_change
match state.verify_and_ratchet(&li, &epoch_change_proof)? {
TrustedStateChange::Epoch {
new_state,
latest_epoch_change_li,
} => {
info!(
"Verified epoch changed to {}",
latest_epoch_change_li
.ledger_info()
.next_epoch_state()
.expect("no validator set in epoch change ledger info"),
);
// Update client state
self.update_trusted_state(new_state);
self.update_latest_epoch_change_li(latest_epoch_change_li.clone());
}
TrustedStateChange::Version { new_state } => {
if state.latest_version() < new_state.latest_version() {
info!("Verified version change to: {}", new_state.latest_version());
}
self.update_trusted_state(new_state);
}
TrustedStateChange::NoChange => (),
}
Ok(())
}
/// LedgerInfo corresponding to the latest epoch change.
pub(crate) fn latest_epoch_change_li(&self) -> Option<&LedgerInfoWithSignatures> {
self.latest_epoch_change_li.as_ref()
}
/// Latest trusted state
pub(crate) fn trusted_state(&self) -> TrustedState {
self.trusted_state.clone()
}
fn update_latest_epoch_change_li(&mut self, ledger: LedgerInfoWithSignatures) {
self.latest_epoch_change_li = Some(ledger);
}
fn update_trusted_state(&mut self, state: TrustedState) {
self.trusted_state = state
}
/// Get transaction from validator by account and sequence number.
pub fn get_txn_by_acc_seq(
&self,
account: &AccountAddress,
sequence_number: u64,
fetch_events: bool,
) -> Result<Option<jsonrpc::Transaction>> {
self.runtime
.lock()
.block_on(
self.client
.get_account_transaction(&account, sequence_number, fetch_events),
)
.map(|r| r.result)
.map_err(anyhow::Error::new)
}
/// Get transactions in range (start_version..start_version + limit - 1) from validator.
pub fn | (
&self,
start_version: u64,
limit: u64,
fetch_events: bool,
) -> Result<Vec<jsonrpc::Transaction>> {
self.runtime
.lock()
.block_on(
self.client
.get_transactions(start_version, limit, fetch_events),
)
.map(|r| r.result)
.map_err(anyhow::Error::new)
}
pub fn get_events_by_access_path(
&self,
access_path: AccessPath,
start_event_seq_num: u64,
limit: u64,
) -> Result<(Vec<jsonrpc::Event>, jsonrpc::Account)> {
// get event key from access_path
match self.get_account(&access_path.address)? {
None => bail!("No account found for address {:?}", access_path.address),
Some(account_view) => {
let path = access_path.path;
let event_key = if path == ACCOUNT_SENT_EVENT_PATH.to_vec() {
&account_view.sent_events_key
} else if path == ACCOUNT_RECEIVED_EVENT_PATH.to_vec() {
&account_view.received_events_key
} else {
bail!("Unexpected event path found in access path");
};
// get_events
let events = self.get_events(event_key, start_event_seq_num, limit)?;
Ok((events, account_view))
}
}
}
}
| get_txn_by_range |
test_config_spec.py | import pytest
from dagster import DagsterInvalidConfigDefinitionError, Noneable, Selector, execute_solid, solid
def test_kitchen_sink():
@solid(
config_schema={
'str_field': str,
'int_field': int,
'list_int': [int],
'list_list_int': [[int]],
'dict_field': {'a_string': str},
'list_dict_field': [{'an_int': int}],
'selector_of_things': Selector(
{'select_list_dict_field': [{'an_int': int}], 'select_int': int}
), | }
)
def kitchen_sink(context):
return context.solid_config
solid_config_one = {
'str_field': 'kjf',
'int_field': 2,
'list_int': [3],
'list_list_int': [[1], [2, 3]],
'dict_field': {'a_string': 'kdjfkd'},
'list_dict_field': [{'an_int': 2}, {'an_int': 4}],
'selector_of_things': {'select_int': 3},
'optional_list_of_optional_string': ['foo', None],
}
assert (
execute_solid(
kitchen_sink, run_config={'solids': {'kitchen_sink': {'config': solid_config_one}}},
).output_value()
== solid_config_one
)
solid_config_two = {
'str_field': 'kjf',
'int_field': 2,
'list_int': [3],
'list_list_int': [[1], [2, 3]],
'dict_field': {'a_string': 'kdjfkd'},
'list_dict_field': [{'an_int': 2}, {'an_int': 4}],
'selector_of_things': {'select_list_dict_field': [{'an_int': 5}]},
'optional_list_of_optional_string': None,
}
assert (
execute_solid(
kitchen_sink, run_config={'solids': {'kitchen_sink': {'config': solid_config_two}}},
).output_value()
== solid_config_two
)
def test_bad_solid_config_argument():
with pytest.raises(DagsterInvalidConfigDefinitionError) as exc_info:
@solid(config='dkjfkd')
def _bad_config(_):
pass
assert str(exc_info.value).startswith(
"Error defining config. Original value passed: 'dkjfkd'. 'dkjfkd' cannot be resolved."
)
def test_bad_solid_config_argument_nested():
with pytest.raises(DagsterInvalidConfigDefinitionError) as exc_info:
@solid(config={'field': 'kdjkfjd'})
def _bad_config(_):
pass
assert str(exc_info.value).startswith(
"Error defining config. Original value passed: {'field': 'kdjkfjd'}. "
"Error at stack path :field. 'kdjkfjd' cannot be resolved."
)
def test_bad_solid_config_argument_list_wrong_length():
with pytest.raises(DagsterInvalidConfigDefinitionError) as exc_info:
@solid(config={'bad_list': []})
def _bad_list_config(_):
pass
assert str(exc_info.value).startswith(
"Error defining config. Original value passed: {'bad_list': []}. "
"Error at stack path :bad_list. [] cannot be resolved. "
"Reason: List must be of length 1."
)
def test_bad_solid_config_argument_list_bad_item():
with pytest.raises(DagsterInvalidConfigDefinitionError) as exc_info:
@solid(config={'bad_list': ['kdjfkd']})
def _bad_list_config(_):
pass
assert str(exc_info.value).startswith(
"Error defining config. Original value passed: {'bad_list': ['kdjfkd']}. "
"Error at stack path :bad_list. ['kdjfkd'] cannot be resolved. "
"Reason: List have a single item and contain a valid type i.e. [int]. "
"Got item 'kdjfkd'."
)
def test_bad_solid_config_argument_list_bad_nested_item():
with pytest.raises(DagsterInvalidConfigDefinitionError) as exc_info:
@solid(config={'bad_nested_list': [{'bad_field': 'kjdkfd'}]})
def _bad_list_config(_):
pass
assert str(exc_info.value).startswith(
"Error defining config. Original value passed: {'bad_nested_list': "
"[{'bad_field': 'kjdkfd'}]}. Error at stack path "
":bad_nested_list:bad_field. 'kjdkfd' cannot be resolved."
) | # this is a good argument to use () instead of [] for type parameterization in
# the config system
'optional_list_of_optional_string': Noneable([Noneable(str)]), |
helpers.rs | use clap::ArgMatches;
use hex;
use std::path::PathBuf;
use std::time::{SystemTime, UNIX_EPOCH};
use types::Address;
pub fn time_now() -> Result<u64, String> {
SystemTime::now()
.duration_since(UNIX_EPOCH)
.map(|duration| duration.as_secs())
.map_err(|e| format!("Unable to get time: {:?}", e))
}
pub fn parse_path_with_default_in_home_dir(
matches: &ArgMatches,
name: &'static str,
default: PathBuf,
) -> Result<PathBuf, String> |
pub fn parse_u64(matches: &ArgMatches, name: &'static str) -> Result<u64, String> {
matches
.value_of(name)
.ok_or_else(|| format!("{} not specified", name))?
.parse::<u64>()
.map_err(|e| format!("Unable to parse {}: {}", name, e))
}
pub fn parse_u64_opt(matches: &ArgMatches, name: &'static str) -> Result<Option<u64>, String> {
matches
.value_of(name)
.map(|val| {
val.parse::<u64>()
.map_err(|e| format!("Unable to parse {}: {}", name, e))
})
.transpose()
}
pub fn parse_address(matches: &ArgMatches, name: &'static str) -> Result<Address, String> {
matches
.value_of(name)
.ok_or_else(|| format!("{} not specified", name))
.and_then(|val| {
if val.starts_with("0x") {
val[2..]
.parse()
.map_err(|e| format!("Unable to parse {}: {:?}", name, e))
} else {
Err(format!("Unable to parse {}, must have 0x prefix", name))
}
})
}
pub fn parse_fork_opt(matches: &ArgMatches, name: &'static str) -> Result<Option<[u8; 4]>, String> {
matches
.value_of(name)
.map(|val| {
if val.starts_with("0x") {
let vec = hex::decode(&val[2..])
.map_err(|e| format!("Unable to parse {} as hex: {:?}", name, e))?;
if vec.len() != 4 {
Err(format!("{} must be exactly 4 bytes", name))
} else {
let mut arr = [0; 4];
arr.copy_from_slice(&vec);
Ok(arr)
}
} else {
Err(format!("Unable to parse {}, must have 0x prefix", name))
}
})
.transpose()
}
| {
matches
.value_of(name)
.map(|dir| {
dir.parse::<PathBuf>()
.map_err(|e| format!("Unable to parse {}: {}", name, e))
})
.unwrap_or_else(|| {
dirs::home_dir()
.map(|home| home.join(default))
.ok_or_else(|| format!("Unable to locate home directory. Try specifying {}", name))
})
} |
hackeme_base_parser.py | from komparse import Parser, Grammar, Sequence, OneOf, \
Optional, OneOrMore, Many
class _Grammar(Grammar):
def __init__(self):
Grammar.__init__(self, case_sensitive=True)
self._init_tokens()
self._init_rules()
def _init_tokens(self):
self.add_comment(';', '\n')
self.add_comment('#|', '|#')
self.add_string('"', '"', '\\', 'STRING')
self.add_token('LIST_BEGIN', '\'\(')
self.add_token('LPAR', '\(')
self.add_token('RPAR', '\)')
self.add_token('LSQBR', '\[')
self.add_token('RSQBR', '\]')
self.add_token('PLUS', '\+')
self.add_token('MINUS', '\-')
self.add_token('MULT', '\*')
self.add_token('DIV', '/')
self.add_token('MOD', '%')
self.add_token('EQ', '=')
self.add_token('NE', '<>')
self.add_token('GT', '>')
self.add_token('GE', '>=')
self.add_token('LT', '<')
self.add_token('LE', '<=')
self.add_token('IDENT', '[a-z][a-z0-9]*(-[a-z0-9]+)*[!?]?')
self.add_token('VARARG', '[a-z][a-z0-9]*(-[a-z0-9]+)*\*')
self.add_token('NUMBER', '\d+')
self.add_token('BOOLEAN', '#t(rue)?|#f(alse)?')
self.add_keyword('define')
self.add_keyword('if')
self.add_keyword('cond')
def _init_rules(self):
self.rule('start', Many(self._oneof_1()), is_root=True)
self.rule('definition', self._oneof_2())
self.rule('vardef', self._seq_1())
self.rule('fundef', self._seq_2())
self.rule('expr', self._oneof_3())
self.rule('no_list', self._oneof_4())
self.rule('if_expr', self._seq_3())
self.rule('cond_expr', self._seq_4())
self.rule('cond_branch', self._seq_5())
self.rule('call', self._seq_6())
self.rule('operator', self._oneof_6())
self.rule('boolean', self.BOOLEAN())
self.rule('list', self._seq_7())
self.rule('list_item', self._oneof_7())
def _seq_1(self):
return Sequence(
self.LPAR(),
self.DEFINE(),
self.IDENT('name'),
self.expr('value'),
self.RPAR())
def _seq_2(self):
return Sequence(
self.LPAR(),
self.DEFINE(),
self.LPAR(),
self.IDENT('name'),
Many(self.IDENT('param')),
Optional(self.VARARG('vararg')),
self.RPAR(),
Many(self.definition('localdef')),
OneOrMore(self.expr('body')),
self.RPAR())
def _seq_3(self):
|
def _seq_4(self):
return Sequence(
self.LPAR(),
self.COND(),
OneOrMore(self.cond_branch('branch')),
self.RPAR())
def _seq_5(self):
return Sequence(
self.LSQBR(),
self.expr('test'),
self.expr('consequent'),
self.RSQBR())
def _seq_6(self):
return Sequence(
self.LPAR(),
self._oneof_5(),
Many(self.expr('arg')),
self.RPAR())
def _seq_7(self):
return Sequence(
self.LIST_BEGIN(),
OneOrMore(self.list_item('li')),
self.RPAR())
def _seq_8(self):
return Sequence(
self.LPAR(),
OneOrMore(self.list_item('li')),
self.RPAR())
def _oneof_1(self):
return OneOf(
self.definition(),
self.expr())
def _oneof_2(self):
return OneOf(
self.vardef(),
self.fundef())
def _oneof_3(self):
return OneOf(
self.no_list(),
self.list())
def _oneof_4(self):
return OneOf(
self.if_expr(),
self.cond_expr(),
self.call(),
self.IDENT(),
self.NUMBER(),
self.boolean(),
self.STRING())
def _oneof_5(self):
return OneOf(
self.IDENT('callee'),
self.call('callee'),
self.operator('callee'))
def _oneof_6(self):
return OneOf(
self.PLUS(),
self.MINUS(),
self.MULT(),
self.DIV(),
self.MOD(),
self.EQ(),
self.NE(),
self.GT(),
self.GE(),
self.LT(),
self.LE())
def _oneof_7(self):
return OneOf(
self._seq_8(),
self.no_list('single'))
class HackemeBaseParser(Parser):
def __init__(self):
Parser.__init__(self, _Grammar())
| return Sequence(
self.LPAR(),
self.IF(),
self.expr('test'),
self.expr('consequent'),
self.expr('alternate'),
self.RPAR()) |
other_config.py | from dataclasses import dataclass, field
from typing import List
import tensorflow as tf
from graph_networks.utilities import *
import logging
import os
ATOM_FEATURE_DIM = DGIN4_ATOM_FEATURE_DIM
EDGE_FEATURE_DIM = DGIN4_EDGE_FEATURE_DIM
@dataclass
class BasicModelConfig:
"""
Config for model1/2/3 run file.
General model parameters
"""
model_name: str = 'only_logs_dmpnn4_1' # without h_w in DGIN gin part - added h_v_0 instead
# whole train/eval split - no more double split within train data set
# random train/test split in get_data_sd - only change overall_seed
# CHANGES dgin3 10.02.2021:
# *added new bondFeaturesDGIN2 and atomFeaturesDGIN2; DGIN2_ATOM_FEATURE_DIM; DGIN2_EDGE_FEATURE_DIM
# *from project_path+'data/processed/lipo/pickled/train_frags3/' to project_path+'data/processed/lipo/pickled/test_frags3/'
# CHANGES dgin3 16.02.2021:
# *added new bondFeaturesDGIN3 and atomFeaturesDGIN3; DGIN3_ATOM_FEATURE_DIM; DGIN3_EDGE_FEATURE_DIM
# *from project_path+'data/processed/lipo/pickled/train_frags_dgin3/' to project_path+'data/processed/lipo/pickled/test_frags_dgin3/'
# CHANGES dgin4 16.02.2021:
# *added add_species bool in model1 config - previously not there; for dgin2 featurization adds the species type after the dgin
# encoding before logD prediction
# test_frags_dgin4 was added for species inclusion in model2 call()
batch_size: int =15
override_if_exists: bool = True
overall_seed: int = 2
# path to the project folder
project_path:str = "./"
retrain_model: bool = False
retrain_model_name: str = ''
retrain_model_epoch: str = ''
retrain_model_weights_dir: str = project_path+'reports/model_weights/'+retrain_model_name+'/epoch_'+retrain_model_epoch+'/checkp_'+retrain_model_epoch
train_data_dir: str = project_path+'data/processed/lipo/pickled/train_dgin4_logs/'
test_data_dir: str = project_path+'data/processed/lipo/pickled/test_dgin4_logs/'
combined_dataset: bool = False
add_train_data_dir: str = project_path+'data/processed/lipo/pickled/train_dgin4_logs/'
add_test_data_dir: str = project_path+'data/processed/lipo/pickled/test_dgin4_logs/'
test_model: bool = False
test_model_epoch: str = '887'
# define the number or test runs for the CI.
# the mean and std of the RMSE and r^2 of the combined runs are taken as the output.
test_n_times: int = 1
# do you want to test the model with consensus mode?
# if yes, a defined ML model will be included in the consensus predictions during the testing.
consensus: bool = False
# include dropout during testing?
include_dropout: bool = False
test_model_weights_dir: str = project_path+'reports/model_weights/'+model_name+'/epoch_'+test_model_epoch+'/checkp_'+test_model_epoch
# To save the prediction values for each property set to True
# When this flag is True - the whole test dataset is taken an test_n_times is set to zero!
save_predictions: bool = False
# define the folder where you want to save the predictions.
# For each property, a file is created under the property name ("./logd.txt","./logs.txt","./logp.txt","./others.txt")
test_prediction_output_folder: str = project_path+"reports/predictions/"+model_name+"/"
encode_hidden: bool = False
log_dir: str = project_path+'reports/logs/'+model_name+'.log'
verbosity_level = logging.INFO
model_type: str = 'DMPNN' # added 31.03.2021 to compare models like 'GIN' 'DMPNN' 'DGIN' 'MLP'
plot_dir: str = project_path+'reports/figures/'+model_name+'/'
tensorboard_log_dir: str = project_path+'reports/tensorboard/'+model_name+'/'
config_log_dir: str = project_path+'reports/configs/'+model_name+'/'
model_weights_dir: str = project_path+'reports/model_weights/'+model_name+'/'
stats_log_dir: str = project_path+'reports/stats/'+model_name+'/'
@dataclass
class DGINConfig:
"""
Config for direcpted-mpnn class.
"""
dropout_aggregate_dmpnn: bool = False
layernorm_aggregate_dmpnn: bool = True
dropout_passing_dmpnn: bool = False
layernorm_passing_dmpnn: bool = True
dropout_aggregate_gin: bool = False
layernorm_aggregate_gin: bool = True
dropout_passing_gin: bool = False
layernorm_passing_gin: bool = True
gin_aggregate_bias: bool = False
dmpnn_passing_bias: bool = False
init_bias: bool = False
massge_iteration_dmpnn: int = 4 | message_iterations_gin: int = 4
dropout_rate: float = 0.15
input_size: int = (ATOM_FEATURE_DIM+EDGE_FEATURE_DIM) # combination of node feature len (33) and edge feature len (12)
passing_hidden_size: int = 56 # this can be changed
input_size_gin: int = (ATOM_FEATURE_DIM) # changed 31.03.2021
return_hv: bool = True # model3 parameter
@dataclass
class Model1Config:
"""
Config model1 class - no subclass configs are defined here.
"""
validation_split: float = 0.90
learning_rate: float = 0.004
clip_rate: float = 0.6
optimizer = tf.keras.optimizers.Adam(learning_rate)
lipo_loss_mse = tf.keras.losses.mse
lipo_loss_mae = tf.keras.losses.mae
logP_loss_mse = tf.keras.losses.mse
logS_loss_mse = tf.keras.losses.mse
other_loss_mse = tf.keras.losses.mse
mw_loss_mse = tf.keras.losses.mse
metric = tf.keras.losses.mae
epochs: int = 1600
# define the number of epochs for each test run.
save_after_epoch: int = 3
# dropout rate for the general model - mainly the MLP for the different log predictions
dropout_rate: float = 0.15 # the overall dropout rate of the readout functions
# the seed to shuffle the training/validation dataset; For the same dataset, even when
# combined_dataset is True, it is the same training/valiation instances
train_data_seed: int = 0
dropout_rate: float = 0.15 # the overall dropout rate of the readout functions
train_data_seed: int = 0
hidden_readout_1: int = 32
hidden_readout_2: int = 14
activation_func_readout = tf.nn.relu
include_logD: bool = False
include_logS: bool = True
include_logP: bool = False
include_other: bool = False
include_mw: bool = False
include_rot_bond: bool = False
include_HBA: bool = False
include_HBD: bool = False
# define the starting threshold for the RMSE of the model. When the comnbined RMSE
# is below this threshold, the model weights are being safed and a new threshold
# is set. It only serves as a starting threshold so that not too many models
# are being safed. Depends on how many log endpoints are being taken into
# consideration - as three endpoints have a higher combined RMSE as only one
# endpoint.
best_evaluation_threshold: float = 2.45 #was introduced on the 25.03.2021/
# define the individual thresholds. If one model is better, the corresponding
# model weights are being saved.
best_evaluation_threshold_logd: float = 1.85
best_evaluation_threshold_logp: float = 1.65
best_evaluation_threshold_logs: float = 2.15
best_evaluation_threshold_other: float = 2.15
# 2.45 for all_logs
# 0.70 logP
# 0.75 logD
# 1.00 logS
# 1.75 logSD
# 1.70 logSP
# 1.45 logDP
include_fragment_conv: bool = False # was introduced on the 4.12.2020
use_rmse: bool = True # uses RMSE instead of MSE for only lipo_loss
shuffle_inside: bool = True # reshuffles the train/valid test seach in each epoch (generalizes)
add_species: bool = False # 16.02 introduction; previously not there; for dgin3 adds the species type after the dgin encoding before logD prediction
@dataclass
class FrACConfig:
"""
Config fragment aggregation class - no subclass configs are defined here.
"""
input_size_gin: int = 28
layernorm_aggregate: bool = True
reduce_mean: bool = True # when false -> reduce_sum
@dataclass
class MLConfig:
"""
Configs for the ML algorithm
"""
# which algorithm do you want to use for the consensus?
# possibilities are: "SVM", "RF", "KNN" or "LR" - all are regression models!
# SVM: Support Vector Machine; RF: Random Forest, KNN: K-Nearest Neigbors; LR: Linear Regression;
algorithm: str = "SVM"
# which fingerprint to use - possibilities are: "ECFP" or "MACCS"
fp_types: str = "ECFP"
# If 'ECFP' fingerprint is used, define the number of bits - maximum is 2048!
n_bits: int = 2048
# If "ECFP" fingerprint is used, define the radius
radius: int = 4
# define if descriptors should be included into the non-GNN molecular representation
include_descriptors: bool = True
# define if the descriptors should be standardizedby scaling and centering (Sklearn)
standardize: bool = True
@dataclass
class Config():
"""
Overall config class for model2 and run file.
Includes all submodels config
"""
basic_model_config: BasicModelConfig
model1_config: Model1Config
d_gin_config: DGINConfig
frag_acc_config: FrACConfig
ml_config: MLConfig
model: str = 'model11' | |
697.js | "use strict";(self.webpackChunk=self.webpackChunk||[]).push([[697],{5515:(t,e,s)=>{s.d(e,{Z:()=>a});var l=s(3645),i=s.n(l)()((function(t){return t[1]}));i.push([t.id,".p-inputtextarea-resizable{overflow:hidden;resize:none}.p-fluid .p-inputtextarea{width:100%}",""]);const a=i},5693:(t,e,s)=>{t.exports=s(4007)},8407:(t,e,s)=>{t.exports=s(3097)},4007:(t,e,s)=>{s.d(e,{default:()=>a});var l=s(1322);const i={inheritAttrs:!1,props:{value:null,modelValue:null,binary:Boolean},model:{prop:"modelValue",event:"input"},data:()=>({focused:!1}),methods:{onClick(t){if(!this.$attrs.disabled){let e;e=this.binary?!this.modelValue:this.checked?this.modelValue.filter((t=>!l.default.equals(t,this.value))):this.modelValue?[...this.modelValue,this.value]:[this.value],this.$emit("click",t),this.$emit("input",e),this.$emit("change",t),this.$refs.input.focus()}},onFocus(t){this.focused=!0,this.$emit("focus",t)},onBlur(t){this.focused=!1,this.$emit("blur",t)}},computed:{checked(){return this.binary?this.modelValue:l.default.contains(this.value,this.modelValue)},containerClass(){return["p-checkbox p-component",{"p-checkbox-checked":this.checked,"p-checkbox-disabled":this.$attrs.disabled,"p-checkbox-focused":this.focused}]}}};const a=(0,s(1900).Z)(i,(function(){var t=this,e=t.$createElement,s=t._self._c||e;return s("div",{class:t.containerClass,on:{click:function(e){return t.onClick(e)}}},[s("div",{staticClass:"p-hidden-accessible"},[s("input",t._b({ref:"input",attrs:{type:"checkbox"},domProps:{checked:t.checked,value:t.value},on:{focus:function(e){return t.onFocus(e)},blur:function(e){return t.onBlur(e)}}},"input",t.$attrs,!1))]),t._v(" "),s("div",{ref:"box",class:["p-checkbox-box",{"p-highlight":t.checked,"p-disabled":t.$attrs.disabled,"p-focus":t.focused}],attrs:{role:"checkbox","aria-checked":t.checked}},[s("span",{class:["p-checkbox-icon",{"pi pi-check":t.checked}]})])])}),[],!1,null,null,null).exports},3097:(t,e,s)=>{s.d(e,{default:()=>c});const l={props:{value:null,autoResize:Boolean},mounted(){this.$el.offsetParent&&this.autoResize&&this.resize()},updated(){this.$el.offsetParent&&this.autoResize&&this.resize()},methods:{resize(){const t=window.getComputedStyle(this.$el);this.$el.style.height="auto",this.$el.style.height=`calc(${t.borderTopWidth} + ${t.borderBottomWidth} + ${this.$el.scrollHeight}px)`,parseFloat(this.$el.style.height)>=parseFloat(this.$el.style.maxHeight)?(this.$el.style.overflow="scroll",this.$el.style.height=this.$el.style.maxHeight):this.$el.style.overflow="hidden"}},computed:{listeners(){return{...this.$listeners,input:t=>{this.autoResize&&this.resize(),this.$emit("input",t.target.value)}}},filled(){return null!=this.value&&this.value.toString().length>0}}};var i=s(3379),a=s.n(i),o=s(5515),n={insert:"head",singleton:!1};a()(o.Z,n);o.Z.locals;const c=(0,s(1900).Z)(l,(function(){var t=this,e=t.$createElement;return(t._self._c||e)("textarea",t._g({class:["p-inputtextarea p-inputtext p-component",{"p-filled":t.filled,"p-inputtextarea-resizable ":t.autoResize}],domProps:{value:t.value}},t.listeners))}),[],!1,null,null,null).exports},697:(t,e,s)=>{s.r(e),s.d(e,{default:()=>o});var l=s(5693),i=s(8407);const a={name:"CreateCompany",components:{Checkbox:l.default,Textarea:i.default},data:function(){return{description:"",status:!0,home:{icon:"pi pi-home",to:"/"},items:[{label:"CRM"},{label:"Customer Group"},{label:"List Company",to:"/list-company"},{label:"Create Company"}]}}};const o=(0,s(1900).Z)(a,(function(){var t=this,e=t.$createElement,s=t._self._c||e;return s("div",[s("div",{staticClass:"p-mb-4"},[s("Breadcrumb",{attrs:{home:t.home,model:t.items}})],1),t._v(" "),s("div",{staticClass:"p-d-flex p-jc-between p-mb-4"},[s("h2",[t._v("Create Company")]),t._v(" "),s("button",{staticClass:"btn btn-success"},[s("router-link",{staticClass:"text-white",attrs:{to:"/list-company"}},[t._v("List Company")])],1)]),t._v(" "),s("Card",{scopedSlots:t._u([{key:"content",fn:function(){return[s("div",{staticClass:"p-fluid p-col-6 p-m-auto"},[s("div",{staticClass:"p-field p-col-12 p-mb-4"},[s("span",{staticClass:"p-float-label"},[s("InputText",{attrs:{id:"title",type:"text"}}),t._v(" "),s("label",{attrs:{for:"title"}},[t._v("Title")])],1)]),t._v(" "),s("div",{staticClass:"p-field p-col-12 p-mb-4"},[s("span",{staticClass:"p-float-label"},[s("Textarea",{attrs:{autoResize:!0,rows:"5"},model:{value:t.description,callback:function(e){t.description=e},expression:"description"}}),t._v(" "),s("label",{attrs:{for:"lastname"}},[t._v("Description")])],1)]),t._v(" "),s("div",{staticClass:"p-field p-grid p-ai-center p-col-12"},[s("div",{staticClass:"p-d-flex p-col-12 p-ai-start"},[s("label",{staticClass:"p-mr-4",attrs:{for:"status"}},[t._v("Status")]),t._v(" "),s("Checkbox",{attrs:{binary:!0},model:{value:t.status,callback:function(e){t.status=e},expression:"status"}}),t._v(" "),t.status?s("span",{staticClass:"p-ml-2"},[t._v("Active")]):s("span",{staticClass:"p-ml-2"},[t._v("Inactive ")])],1)]),t._v(" "),s("div",{staticClass:"p-field p-col-12 p-md-12"},[s("button",{staticClass:"form-control btn btn-success",attrs:{label:"Submit"}},[t._v("\r\n Create\r\n ")])])])]},proxy:!0}])})],1)}),[],!1,null,null,null).exports}}]); |
||
build.config.js | const config = {
...require('../../build.config'),
__SERVER__: false,
__CLIENT__: true,
__SSR__: false,
__TEST__: false, | module.exports = config; | __API_URL__: process.env.API_URL || 'http://localhost:8080/graphql',
__WEBSITE_URL__: process.env.WEBSITE_URL || 'http://localhost:8080'
};
|
template_cddl.py | # coding: utf-8
import pprint
import re
import six
class TemplateCddl:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'flow': 'FlowItem',
'states': 'dict(str, TemplateState)',
'workflow': 'Workflow'
}
attribute_map = {
'flow': 'flow',
'states': 'states',
'workflow': 'workflow'
}
def __init__(self, flow=None, states=None, workflow=None):
"""TemplateCddl - a model defined in huaweicloud sdk"""
self._flow = None
self._states = None
self._workflow = None
self.discriminator = None
self.flow = flow
self.states = states
self.workflow = workflow
@property
def flow(self):
"""Gets the flow of this TemplateCddl.
:return: The flow of this TemplateCddl.
:rtype: FlowItem
"""
return self._flow
@flow.setter
def flow(self, flow):
"""Sets the flow of this TemplateCddl.
:param flow: The flow of this TemplateCddl.
:type: FlowItem
"""
self._flow = flow
@property
def states(self):
"""Gets the states of this TemplateCddl.
子任务states,map类型数据
:return: The states of this TemplateCddl.
:rtype: dict(str, TemplateState)
"""
return self._states
@states.setter
def states(self, states):
"""Sets the states of this TemplateCddl.
子任务states,map类型数据
:param states: The states of this TemplateCddl.
:type: dict(str, TemplateState)
"""
self._states = states
@property
def workflow(self):
"""Gets the workflow of this TemplateCddl.
:return: The workflow of this TemplateCddl.
:rtype: Workflow
"""
return self._workflow
@workflow.setter
def workflow(self, workflow):
"""Sets the workflow of this TemplateCddl.
:param workflow: The workflow of this TemplateCddl.
:type: Workflow
"""
self._workflow = workflow
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
| to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, TemplateCddl):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| result[attr] = "****"
else:
result[attr] = value
return result
def |
Polyfills.ts | // Polyfills.ts - Some Polyfills for old browsers, e.g. IE8, and nodeJS
//
/* globals globalThis */
import { Utils } from "./Utils";
export var Polyfills = {
iCount: 0
// empty
};
// IE: window.console is only available when Dev Tools are open
if (!Utils.console) {
const oUtilsConsole: any = {
cpcBasicLog: "LOG:\n",
log: function () { // varargs
if (oUtilsConsole.cpcBasicLog) {
oUtilsConsole.cpcBasicLog += Array.prototype.slice.call(arguments).join(" ") + "\n";
}
}
}; | oUtilsConsole.info = oUtilsConsole.log;
oUtilsConsole.warn = oUtilsConsole.log;
oUtilsConsole.error = oUtilsConsole.log;
oUtilsConsole.debug = oUtilsConsole.log;
(Utils.console as any) = oUtilsConsole;
}
if (!Utils.console.debug) { // IE8 has no console.debug
Utils.console.debug = Utils.console.log;
Utils.console.debug("Polyfill: window.console.debug");
}
if ((typeof globalThis !== "undefined") && !globalThis.window) { // nodeJS
Utils.console.debug("Polyfill: window");
(globalThis.window as any) = {};
}
if (!Array.prototype.indexOf) { // IE8
Array.prototype.indexOf = function (searchElement, iFrom?: number) { // eslint-disable-line no-extend-native
const iLen = this.length >>> 0; // eslint-disable-line no-bitwise
iFrom = Number(iFrom) || 0;
iFrom = (iFrom < 0) ? Math.ceil(iFrom) : Math.floor(iFrom);
if (iFrom < 0) {
iFrom += iLen;
}
for (; iFrom < iLen; iFrom += 1) {
if (iFrom in this && this[iFrom] === searchElement) {
return iFrom;
}
}
return -1;
};
}
if (!Array.prototype.map) { // IE8
// based on: https://developer.mozilla.org/de/docs/Web/JavaScript/Reference/Global_Objects/Array/map
Utils.console.debug("Polyfill: Array.prototype.map");
Array.prototype.map = function (callback, thisArg) { // eslint-disable-line no-extend-native,func-names
const aValues = [],
oObject = Object(this),
len = oObject.length;
let T;
if (arguments.length > 1) {
T = thisArg;
}
for (let i = 0; i < len; i += 1) {
if (i in oObject) {
const kValue = oObject[i],
mappedValue = callback.call(T, kValue, i, oObject);
aValues[i] = mappedValue;
}
}
return aValues;
};
}
if (window.Element) {
if (!Element.prototype.addEventListener) { // IE8
Utils.console.debug("Polyfill: Element.prototype.addEventListener");
Element.prototype.addEventListener = function (sEvent: string, fnCallback: (e: Event) => void) {
sEvent = "on" + sEvent;
return (this as any).attachEvent(sEvent, fnCallback);
};
}
if (!Element.prototype.removeEventListener) { // IE8
Utils.console.debug("Polyfill: Element.prototype.removeEventListener");
Element.prototype.removeEventListener = function (sEvent: string, fnCallback: (e: Event) => void) {
sEvent = "on" + sEvent;
return (this as any).detachEvent(sEvent, fnCallback);
};
}
}
if (window.Event) {
if (!Event.prototype.preventDefault) { // IE8
Utils.console.debug("Polyfill: Event.prototype.preventDefault");
Event.prototype.preventDefault = function () {
// empty
};
}
if (!Event.prototype.stopPropagation) { // IE8
Utils.console.debug("Polyfill: Event.prototype.stopPropagation");
Event.prototype.stopPropagation = function () {
// empty
};
}
}
if (!Date.now) { // IE8
Utils.console.debug("Polyfill: Date.now");
Date.now = function () {
return new Date().getTime();
};
}
if (window.document) {
if (!document.addEventListener) {
// or check: https://gist.github.com/fuzzyfox/6762206
Utils.console.debug("Polyfill: document.addEventListener, removeEventListener");
if ((document as any).attachEvent) {
(function () {
type EventListenerEntry = {
object: Document,
sEvent: string,
fnHandler: (e: Event) => void,
fnOnEvent: (e: Event) => boolean
};
const aEventListeners: EventListenerEntry[] = [];
document.addEventListener = function (sEvent: string, fnHandler: (e: Event) => void) {
const fnFindCaret = function (event: Event) {
const documentSelection = (document as any).selection; // IE only
if (documentSelection) {
const eventTarget = event.target as HTMLTextAreaElement;
eventTarget.focus();
const oRange = documentSelection.createRange(),
oRange2 = oRange.duplicate();
if (oRange2.moveToElementTxt) { // not on IE8
oRange2.moveToElementTxt(event.target);
}
oRange2.setEndPoint("EndToEnd", oRange);
eventTarget.selectionStart = oRange2.text.length - oRange.text.length;
eventTarget.selectionEnd = eventTarget.selectionStart + oRange.text.length;
}
},
fnOnEvent = function (event: Event) {
event = event || window.event;
const eventTarget = event.target || event.srcElement;
if (event.type === "click" && eventTarget && (eventTarget as HTMLTextAreaElement).tagName === "TEXTAREA") {
fnFindCaret(event);
}
fnHandler(event);
return false;
};
// The change event is not bubbled and fired on document for old IE8. So attach it to every select tag
if (sEvent === "change") {
const aElements = document.getElementsByTagName("select");
for (let i = 0; i < aElements.length; i += 1) {
(aElements[i] as any).attachEvent("on" + sEvent, fnOnEvent);
aEventListeners.push({ //TTT does this work?
object: this,
sEvent: sEvent,
fnHandler: fnHandler,
fnOnEvent: fnOnEvent
});
}
} else { // e.g. "Click"
(document as any).attachEvent("on" + sEvent, fnOnEvent);
aEventListeners.push({
object: this,
sEvent: sEvent,
fnHandler: fnHandler,
fnOnEvent: fnOnEvent
});
}
};
document.removeEventListener = function (sEvent: string, fnHandler: (e: Event) => void) {
let counter = 0;
while (counter < aEventListeners.length) {
const oEventListener = aEventListeners[counter];
if (oEventListener.object === this && oEventListener.sEvent === sEvent && oEventListener.fnHandler === fnHandler) {
(this as any).detachEvent("on" + sEvent, oEventListener.fnOnEvent);
aEventListeners.splice(counter, 1);
break;
}
counter += 1;
}
};
}());
} else {
Utils.console.log("No document.attachEvent found."); // will be ignored
// debug: trying to fix
if ((document as any).__proto__.addEventListener) { // eslint-disable-line no-proto
document.addEventListener = (document as any).__proto__.addEventListener; // eslint-disable-line no-proto
}
}
}
}
if (!Function.prototype.bind) { // IE8
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/bind
// Does not work with `new funcA.bind(thisArg, args)`
Utils.console.debug("Polyfill: Function.prototype.bind");
(function () {
const ArrayPrototypeSlice = Array.prototype.slice; // since IE6
Function.prototype.bind = function () { // eslint-disable-line no-extend-native
const that = this, // eslint-disable-line @typescript-eslint/no-this-alias
thatArg = arguments[0],
args = ArrayPrototypeSlice.call(arguments, 1),
argLen = args.length;
if (typeof that !== "function") {
// closest thing possible to the ECMAScript 5 internal IsCallable function
throw new TypeError("Function.prototype.bind - what is trying to be bound is not callable");
}
return function () {
args.length = argLen;
args.push.apply(args, arguments as any);
return that.apply(thatArg, args);
};
};
}());
}
if (!Math.sign) { // IE11
Utils.console.debug("Polyfill: Math.sign");
Math.sign = function (x) {
return (Number(x > 0) - Number(x < 0)) || Number(x);
};
}
if (!Math.trunc) { // IE11
Utils.console.debug("Polyfill: Math.trunc");
Math.trunc = function (v) {
return v < 0 ? Math.ceil(v) : Math.floor(v);
};
}
if (!Object.assign) { // IE11
Utils.console.debug("Polyfill: Object.assign");
Object.assign = function (oTarget: Record<string, unknown>) { // varargs // Object.assign is ES6, not in IE
const oTo = oTarget;
for (let i = 1; i < arguments.length; i += 1) {
const oNextSource = arguments[i];
for (const sNextKey in oNextSource) {
if (oNextSource.hasOwnProperty(sNextKey)) {
oTo[sNextKey] = oNextSource[sNextKey];
}
}
}
return oTo;
};
}
if (!Object.keys) { // IE8
Utils.console.debug("Polyfill: Object.keys");
// https://tokenposts.blogspot.com/2012/04/javascript-objectkeys-browser.html
Object.keys = function (o: object): string[] { // eslint-disable-line @typescript-eslint/ban-types
const k: string[] = [];
if (o !== Object(o)) {
throw new TypeError("Object.keys called on a non-object");
}
for (const p in o) {
if (Object.prototype.hasOwnProperty.call(o, p)) {
k.push(p);
}
}
return k;
};
}
if (!String.prototype.endsWith) {
Utils.console.debug("Polyfill: String.prototype.endsWith");
String.prototype.endsWith = function (sSearch: string, iPosition?: number) { // eslint-disable-line no-extend-native
if (iPosition === undefined) {
iPosition = this.length;
}
iPosition -= sSearch.length;
const iLastIndex = this.indexOf(sSearch, iPosition);
return iLastIndex !== -1 && iLastIndex === iPosition;
};
}
if (!String.prototype.includes) { // IE11
Utils.console.debug("Polyfill: String.prototype.includes");
String.prototype.includes = function (sSearch: string, iStart = 0) { // eslint-disable-line no-extend-native
let bRet: boolean;
if (iStart + sSearch.length > this.length) {
bRet = false;
} else {
bRet = this.indexOf(sSearch, iStart) !== -1;
}
return bRet;
};
}
if (!String.prototype.padStart) { // IE11
Utils.console.debug("Polyfill: String.prototype.padStart");
String.prototype.padStart = function (iTargetLength: number, sPad?: string) { // eslint-disable-line no-extend-native
let sRet = String(this);
iTargetLength >>= 0; // eslint-disable-line no-bitwise
if (this.length < iTargetLength) {
sPad = String(typeof sPad !== "undefined" ? sPad : " ");
iTargetLength -= this.length;
if (iTargetLength > sPad.length) {
sPad += sPad.repeat(iTargetLength / sPad.length);
}
sRet = sPad.slice(0, iTargetLength) + sRet;
}
return sRet;
};
}
if (!String.prototype.padEnd) { // IE11
// based on: https://github.com/behnammodi/polyfill/blob/master/string.polyfill.js
Utils.console.debug("Polyfill: String.prototype.padEnd");
String.prototype.padEnd = function (iTargetLength: number, sPad?: string) { // eslint-disable-line no-extend-native
let sRet = String(this);
iTargetLength >>= 0; // eslint-disable-line no-bitwise
if (this.length < iTargetLength) {
sPad = String(typeof sPad !== "undefined" ? sPad : " ");
iTargetLength -= this.length;
if (iTargetLength > sPad.length) {
sPad += sPad.repeat(iTargetLength / sPad.length);
}
sRet += sPad.slice(0, iTargetLength); // this line differs from padStart
}
return sRet;
};
}
if (!String.prototype.repeat) { // IE11
Utils.console.debug("Polyfill: String.prototype.repeat");
String.prototype.repeat = function (iCount: number) { // eslint-disable-line no-extend-native
const sStr = String(this);
let sOut = "";
for (let i = 0; i < iCount; i += 1) {
sOut += sStr;
}
return sOut;
};
}
if (!String.prototype.startsWith) {
Utils.console.debug("Polyfill: String.prototype.startsWith");
String.prototype.startsWith = function (sSearch, iPosition) { // eslint-disable-line no-extend-native
iPosition = iPosition || 0;
return this.indexOf(sSearch, iPosition) === iPosition;
};
}
if (!String.prototype.trim) { // IE8
Utils.console.debug("Polyfill: String.prototype.trim");
String.prototype.trim = function () { // eslint-disable-line no-extend-native
return this.replace(/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g, "");
};
}
// based on: https://github.com/mathiasbynens/base64/blob/master/base64.js
// https://mths.be/base64 v0.1.0 by @mathias | MIT license
if (!Utils.atob) { // IE9 (and node.js)
Utils.console.debug("Polyfill: window.atob, btoa");
(function () {
const sTable = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",
reSpaceCharacters = /[\t\n\f\r ]/g; // http://whatwg.org/html/common-microsyntaxes.html#space-character
/* eslint-disable no-bitwise */
Utils.atob = function (sInput: string) { // decode
sInput = String(sInput).replace(reSpaceCharacters, "");
let length = sInput.length;
if (length % 4 === 0) {
sInput = sInput.replace(/[=]=?$/, ""); // additional brackets to maks eslint happy
length = sInput.length;
}
if (length % 4 === 1 || (/[^+a-zA-Z0-9/]/).test(sInput)) { // http://whatwg.org/C#alphanumeric-ascii-characters
throw new TypeError("Polyfills:atob: Invalid character: the string to be decoded is not correctly encoded.");
}
let bitCounter = 0,
output = "",
position = 0,
bitStorage = 0;
while (position < length) {
const buffer = sTable.indexOf(sInput.charAt(position));
bitStorage = bitCounter % 4 ? bitStorage * 64 + buffer : buffer;
bitCounter += 1;
if ((bitCounter - 1) % 4) { // Unless this is the first of a group of 4 characters...
output += String.fromCharCode(0xFF & bitStorage >> (-2 * bitCounter & 6)); // ...convert the first 8 bits to a single ASCII character
}
position += 1;
}
return output;
};
Utils.btoa = function (input: string) { // encode
input = String(input);
if ((/[^\0-\xFF]/).test(input)) {
throw new TypeError("Polyfills:btoa: The string to be encoded contains characters outside of the Latin1 range.");
}
const padding = input.length % 3,
length = input.length - padding; // Make sure any padding is handled outside of the loop
let output = "",
position = 0;
while (position < length) {
// Read three bytes, i.e. 24 bits.
const a = input.charCodeAt(position) << 16;
position += 1;
const b = input.charCodeAt(position) << 8;
position += 1;
const c = input.charCodeAt(position);
position += 1;
const buffer = a + b + c;
// Turn the 24 bits into four chunks of 6 bits each, and append the matching character for each of them to the output
output += sTable.charAt(buffer >> 18 & 0x3F) + sTable.charAt(buffer >> 12 & 0x3F) + sTable.charAt(buffer >> 6 & 0x3F) + sTable.charAt(buffer & 0x3F);
}
if (padding === 2) {
const a = input.charCodeAt(position) << 8;
position += 1;
const b = input.charCodeAt(position),
buffer = a + b;
output += sTable.charAt(buffer >> 10) + sTable.charAt((buffer >> 4) & 0x3F) + sTable.charAt((buffer << 2) & 0x3F) + "=";
} else if (padding === 1) {
const buffer = input.charCodeAt(position);
output += sTable.charAt(buffer >> 2) + sTable.charAt((buffer << 4) & 0x3F) + "==";
}
return output;
};
/* eslint-enable no-bitwise */
}());
}
// For IE and Edge, localStorage is only available if page is hosted on web server, so we simulate it (do not use property "length" or method names as keys!)
if (!Utils.localStorage) {
Utils.console.debug("Polyfill: window.localStorage");
(function () {
class Storage {
length = 0;
clear() {
for (const key in this) {
if (this.hasOwnProperty(key)) {
delete this[key];
}
}
this.length = 0;
}
key(index: number) {
let i = 0;
for (const key in this) {
if (this.hasOwnProperty(key) && key !== "length") {
if (i === index) {
return key;
}
i += 1;
}
}
return null;
}
getItem(sKey: string) {
return this.hasOwnProperty(sKey) ? (this as any)[sKey] : null;
}
setItem(sKey: string, value: string) {
if (this.getItem(sKey) === null) {
this.length += 1;
}
(this as any)[sKey] = String(value);
}
removeItem(sKey: string) {
if (this.getItem(sKey) !== null) {
delete (this as any)[sKey];
this.length -= 1;
}
}
}
Utils.localStorage = new Storage();
}());
}
if (!window.ArrayBuffer) { // IE9
Utils.console.debug("Polyfill: window.ArrayBuffer");
window.ArrayBuffer = Array as any;
}
if (!window.AudioContext) { // ? not for IE
window.AudioContext = (window as any).webkitAudioContext || (window as any).mozAudioContext;
if (window.AudioContext) {
Utils.console.debug("Polyfill: window.AudioContext");
} else {
Utils.console.warn("Polyfill: window.AudioContext: not ok!");
}
}
if (!window.JSON) { // simple polyfill for JSON.parse only
// for a better implementation, see https://github.com/douglascrockford/JSON-js/blob/master/json2.js
Utils.console.debug("Polyfill: window.JSON.parse");
(window as any).JSON = {
parse: function (sText: string) {
const oJson = eval("(" + sText + ")"); // eslint-disable-line no-eval
return oJson;
},
stringify: function (o: Object) { // eslint-disable-line @typescript-eslint/ban-types
Utils.console.error("Not implemented: window.JSON.stringify");
return String(o);
}
};
}
if (!window.requestAnimationFrame) { // IE9, SliTaz tazweb browser
// https://wiki.selfhtml.org/wiki/JavaScript/Window/requestAnimationFrame
window.requestAnimationFrame = (window as any).mozRequestAnimationFrame || (window as any).webkitRequestAnimationFrame || (window as any).msRequestAnimationFrame;
window.cancelAnimationFrame = (window as any).mozCancelAnimationFrame || (window as any).webkitCancelAnimationFrame || (window as any).msCancelAnimationFrame;
if (!window.requestAnimationFrame || !window.cancelAnimationFrame) {
(function () {
let lastTime = 0;
Utils.console.debug("Polyfill: window.requestAnimationFrame, cancelAnimationFrame");
window.requestAnimationFrame = function (callback /* , element */) {
const currTime = new Date().getTime(),
timeToCall = Math.max(0, 16 - (currTime - lastTime)),
id = window.setTimeout(function () { callback(currTime + timeToCall); }, timeToCall);
lastTime = currTime + timeToCall;
return id;
};
window.cancelAnimationFrame = function (id) {
clearTimeout(id);
};
}());
} else {
Utils.console.debug("Polyfill: window.requestAnimationFrame, cancelAnimationFrame: Using vendor specific method.");
}
}
if (!window.Uint8Array) { // IE9
Utils.console.debug("Polyfill: Uint8Array (fallback only)");
(window as any).Uint8Array = function (oArrayBuffer: ArrayBufferConstructor) {
return oArrayBuffer; // we just return the ArrayBuffer as fallback; enough for our needs
};
(window.Uint8Array as any).BYTES_PER_ELEMENT = 1;
// A more complex solution would be: https://github.com/inexorabletash/polyfill/blob/master/typedarray.js
}
Utils.console.debug("Polyfill: end of Polyfills");
// end | |
fmt-source-locations.rs | //! Demonstrates displaying events' source code locations with the `fmt`
//! subscriber.
#![deny(rust_2018_idioms)]
#[path = "fmt/yak_shave.rs"]
mod yak_shave;
fn main() | {
tracing_subscriber::fmt()
// enable everything
.with_max_level(tracing::Level::TRACE)
// display source code file paths
.with_file(true)
// display source code line numbers
.with_line_number(true)
// disable targets
.with_target(false)
// sets this to be the default, global collector for this application.
.init();
let number_of_yaks = 3;
// this creates a new event, outside of any spans.
tracing::info!(number_of_yaks, "preparing to shave yaks");
let number_shaved = yak_shave::shave_all(number_of_yaks);
tracing::info!(
all_yaks_shaved = number_shaved == number_of_yaks,
"yak shaving completed."
);
} |
|
dnssoarec_args.py | #
# Copyright (c) 2008-2016 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class dnssoarec_args :
r""" Provides additional arguments required for fetching the dnssoarec resource.
"""
def __init__(self) :
self._type = None
self._nodeid = None
@property
def type(self) :
r"""Type of records to display. Available settings function as follows:
* ADNS - Display all authoritative address records.
* PROXY - Display all proxy address records.
* ALL - Display all address records.<br/>Possible values = ALL, ADNS, PROXY.
"""
try :
return self._type
except Exception as e:
raise e
@type.setter
def type(self, type) :
r"""Type of records to display. Available settings function as follows:
* ADNS - Display all authoritative address records.
* PROXY - Display all proxy address records.
* ALL - Display all address records.<br/>Possible values = ALL, ADNS, PROXY
"""
try :
self._type = type
except Exception as e:
raise e
@property
def nodeid(self) :
r"""Unique number that identifies the cluster node.<br/>Minimum value = 0<br/>Maximum value = 31.
"""
try :
return self._nodeid
except Exception as e:
raise e
@nodeid.setter
def nodeid(self, nodeid) :
r"""Unique number that identifies the cluster node.<br/>Minimum value = 0<br/>Maximum value = 31
"""
try :
self._nodeid = nodeid
except Exception as e:
raise e
class | :
ALL = "ALL"
ADNS = "ADNS"
PROXY = "PROXY"
| Type |
createApp.tsx | import * as React from "react";
import type { AppProps } from "next/app";
import { useMemo } from "react";
import { CreateAppProps } from "../defs";
import { XUIProvider } from "@bluelibs/x-ui-react-bundle";
export const createApp = (props: CreateAppProps) => {
const { loadingComponent, kernel: baseKernel } = props;
const App = ({ Component, pageProps }: AppProps) => {
const kernel = useMemo(() => baseKernel, []);
| </XUIProvider>
);
};
return App;
}; | return (
<XUIProvider {...{ kernel, loadingComponent }}>
<Component {...pageProps} /> |
manager.ts | // *** WARNING: this file was generated by the Pulumi SDK Generator. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
import * as pulumi from "@pulumi/pulumi";
import * as inputs from "../../types/input";
import * as outputs from "../../types/output";
import * as utilities from "../../utilities";
/**
* The StorSimple Manager.
*/
export class Manager extends pulumi.CustomResource {
/**
* Get an existing Manager resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
public static get(name: string, id: pulumi.Input<pulumi.ID>, opts?: pulumi.CustomResourceOptions): Manager {
return new Manager(name, undefined as any, { ...opts, id: id });
}
/** @internal */
public static readonly __pulumiType = 'azure-nextgen:storsimple/v20170601:Manager';
/**
* Returns true if the given object is an instance of Manager. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
public static isInstance(obj: any): obj is Manager {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === Manager.__pulumiType;
}
/**
* Represents the type of StorSimple Manager.
*/
public readonly cisIntrinsicSettings!: pulumi.Output<outputs.storsimple.v20170601.ManagerIntrinsicSettingsResponse | undefined>;
/** | * The etag of the manager.
*/
public readonly etag!: pulumi.Output<string | undefined>;
/**
* The geo location of the resource.
*/
public readonly location!: pulumi.Output<string>;
/**
* The resource name.
*/
public /*out*/ readonly name!: pulumi.Output<string>;
/**
* Specifies the state of the resource as it is getting provisioned. Value of "Succeeded" means the Manager was successfully created.
*/
public readonly provisioningState!: pulumi.Output<string | undefined>;
/**
* Specifies the Sku.
*/
public readonly sku!: pulumi.Output<outputs.storsimple.v20170601.ManagerSkuResponse | undefined>;
/**
* The tags attached to the resource.
*/
public readonly tags!: pulumi.Output<{[key: string]: string} | undefined>;
/**
* The resource type.
*/
public /*out*/ readonly type!: pulumi.Output<string>;
/**
* Create a Manager resource with the given unique name, arguments, and options.
*
* @param name The _unique_ name of the resource.
* @param args The arguments to use to populate this resource's properties.
* @param opts A bag of options that control this resource's behavior.
*/
constructor(name: string, args: ManagerArgs, opts?: pulumi.CustomResourceOptions) {
let inputs: pulumi.Inputs = {};
if (!(opts && opts.id)) {
if (!args || args.location === undefined) {
throw new Error("Missing required property 'location'");
}
if (!args || args.managerName === undefined) {
throw new Error("Missing required property 'managerName'");
}
if (!args || args.resourceGroupName === undefined) {
throw new Error("Missing required property 'resourceGroupName'");
}
inputs["cisIntrinsicSettings"] = args ? args.cisIntrinsicSettings : undefined;
inputs["etag"] = args ? args.etag : undefined;
inputs["location"] = args ? args.location : undefined;
inputs["managerName"] = args ? args.managerName : undefined;
inputs["provisioningState"] = args ? args.provisioningState : undefined;
inputs["resourceGroupName"] = args ? args.resourceGroupName : undefined;
inputs["sku"] = args ? args.sku : undefined;
inputs["tags"] = args ? args.tags : undefined;
inputs["name"] = undefined /*out*/;
inputs["type"] = undefined /*out*/;
} else {
inputs["cisIntrinsicSettings"] = undefined /*out*/;
inputs["etag"] = undefined /*out*/;
inputs["location"] = undefined /*out*/;
inputs["name"] = undefined /*out*/;
inputs["provisioningState"] = undefined /*out*/;
inputs["sku"] = undefined /*out*/;
inputs["tags"] = undefined /*out*/;
inputs["type"] = undefined /*out*/;
}
if (!opts) {
opts = {}
}
if (!opts.version) {
opts.version = utilities.getVersion();
}
const aliasOpts = { aliases: [{ type: "azure-nextgen:storsimple/latest:Manager" }, { type: "azure-nextgen:storsimple/v20161001:Manager" }] };
opts = opts ? pulumi.mergeOptions(opts, aliasOpts) : aliasOpts;
super(Manager.__pulumiType, name, inputs, opts);
}
}
/**
* The set of arguments for constructing a Manager resource.
*/
export interface ManagerArgs {
/**
* Represents the type of StorSimple Manager.
*/
readonly cisIntrinsicSettings?: pulumi.Input<inputs.storsimple.v20170601.ManagerIntrinsicSettings>;
/**
* The etag of the manager.
*/
readonly etag?: pulumi.Input<string>;
/**
* The geo location of the resource.
*/
readonly location: pulumi.Input<string>;
/**
* The manager name
*/
readonly managerName: pulumi.Input<string>;
/**
* Specifies the state of the resource as it is getting provisioned. Value of "Succeeded" means the Manager was successfully created.
*/
readonly provisioningState?: pulumi.Input<string>;
/**
* The resource group name
*/
readonly resourceGroupName: pulumi.Input<string>;
/**
* Specifies the Sku.
*/
readonly sku?: pulumi.Input<inputs.storsimple.v20170601.ManagerSku>;
/**
* The tags attached to the resource.
*/
readonly tags?: pulumi.Input<{[key: string]: pulumi.Input<string>}>;
} | |
main.go | package main
import (
"flag"
"fmt"
"github.com/pierrec/lz4/internal/cmdflag"
"github.com/pierrec/lz4/internal/cmds"
)
func init() |
func main() {
flag.CommandLine.Bool(cmdflag.VersionBoolFlag, false, "print the program version")
err := cmdflag.Parse()
if err != nil {
fmt.Println(err)
return
}
}
| {
const onError = flag.ExitOnError
cmdflag.New(
"compress", "[arguments] [<file name> ...]",
"Compress the given files or from stdin to stdout.",
onError, cmds.Compress)
cmdflag.New(
"uncompress", "[arguments] [<file name> ...]",
"Uncompress the given files or from stdin to stdout.",
onError, cmds.Uncompress)
} |
index.tsx | import LogInContainer from "./LogInContainer"; |
export default LogInContainer; |
|
0007_auto_20210121_2155.py | # Generated by Django 3.1.3 on 2021-01-21 21:55
from django.db import migrations, models
class Migration(migrations.Migration):
| dependencies = [
('server', '0006_auto_20210120_2320'),
]
operations = [
migrations.AddField(
model_name='sensor',
name='lux_max',
field=models.DecimalField(blank=True, decimal_places=1, max_digits=3, null=True),
),
migrations.AddField(
model_name='sensor',
name='temp_max',
field=models.DecimalField(blank=True, decimal_places=1, max_digits=3, null=True),
),
] |
|
s2-layer-icon.js | // Copyright (c) 2022 Uber Technologies, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
import React, {Component} from 'react';
import PropTypes from 'prop-types';
import Base from 'components/common/icons/base';
class | extends Component {
static propTypes = {
/** Set the height of the icon, ex. '16px' */
height: PropTypes.string,
colors: PropTypes.arrayOf(PropTypes.string)
};
static defaultProps = {
height: '18px',
predefinedClassName: 's2-layer-icon'
};
render() {
return (
<Base {...this.props}>
<path d="M14.76,58,15,20.54,50.06,6.75V44Zm4-34.86L18.6,52.38l27.66-11V12.33Z" />
<path d="M27.21,38.58a7.38,7.38,0,0,1-3.62-.9l.3-2a6.49,6.49,0,0,0,3.3.9c1.49,0,2.18-.59,2.18-1.63,0-2.26-5.71-1.28-5.71-5.3,0-2,1.26-3.54,4.16-3.54a8.38,8.38,0,0,1,3.28.64l-.29,1.9a8.41,8.41,0,0,0-2.88-.54c-1.63,0-2.14.66-2.14,1.42,0,2.19,5.71,1.18,5.71,5.27C31.5,37.16,29.93,38.58,27.21,38.58Z" />
<path d="M36.17,36.36v0h5.06l0,2H33.32V36.9c3-2.88,5.67-5.09,5.67-7,0-1-.64-1.67-2.19-1.67a5,5,0,0,0-3,1.1l-.53-1.79a6.31,6.31,0,0,1,3.91-1.28c2.66,0,4,1.34,4,3.41C41.21,31.94,39.13,33.89,36.17,36.36Z" />
</Base>
);
}
}
export default S2LayerIcon;
| S2LayerIcon |
bounds.rs | use aries_model::bounds::Lit;
use aries_model::Model;
use criterion::{black_box, criterion_group, criterion_main, Criterion};
use rand::prelude::SliceRandom;
use rand::rngs::StdRng;
use rand::SeedableRng;
#[inline]
fn entailment(xs: &[Lit], ys: &[Lit]) -> u64 {
let mut count = 0;
for &x in xs {
for &y in ys {
if x.entails(y) {
count += 1
}
}
} | }
pub fn criterion_benchmark(c: &mut Criterion) {
let mut rng = StdRng::seed_from_u64(2398248538438434234);
let mut model = Model::new();
let mut bounds = Vec::new();
for _ in 0..50 {
let var = model.new_ivar(0, 100, "");
for v in -20..20 {
bounds.push(Lit::leq(var, v));
bounds.push(Lit::geq(var, v));
}
}
bounds.shuffle(&mut rng);
c.bench_function("bounds-entail-many-vars", |b| {
b.iter(|| entailment(black_box(&bounds), black_box(&bounds)))
});
let mut bounds = Vec::new();
for _ in 0..5 {
let var = model.new_ivar(0, 100, "");
for v in -20..20 {
bounds.push(Lit::leq(var, v));
bounds.push(Lit::geq(var, v));
}
}
bounds.shuffle(&mut rng);
c.bench_function("bounds-entail-few-vars", |b| {
b.iter(|| entailment(black_box(&bounds), black_box(&bounds)))
});
let mut bounds = Vec::new();
let var = model.new_ivar(0, 100, "");
for v in -40..40 {
bounds.push(Lit::leq(var, v));
bounds.push(Lit::geq(var, v));
}
bounds.shuffle(&mut rng);
c.bench_function("bounds-entail-one-var", |b| {
b.iter(|| entailment(black_box(&bounds), black_box(&bounds)))
});
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches); | count |
util.py | import tempfile
import json
import os
import logging
from six import itervalues, iterlists
import connexion
from werkzeug.utils import secure_filename
def visit(d, op):
"""Recursively call op(d) for all list subelements and dictionary 'values' that d may have."""
op(d)
if isinstance(d, list):
for i in d:
visit(i, op)
elif isinstance(d, dict):
for i in itervalues(d):
visit(i, op)
class WESBackend(object):
"""Stores and retrieves options. Intended to be inherited."""
def __init__(self, opts):
"""Parse and store options as a list of tuples."""
self.pairs = []
for o in opts if opts else []:
k, v = o.split("=", 1)
self.pairs.append((k, v))
def getopt(self, p, default=None):
"""Returns the first option value stored that matches p or default."""
for k, v in self.pairs:
if k == p:
return v
return default
def getoptlist(self, p):
"""Returns all option values stored that match p as a list."""
optlist = []
for k, v in self.pairs:
if k == p:
optlist.append(v)
return optlist
def log_for_run(self, run_id, message):
logging.info("Workflow %s: %s", run_id, message)
def collect_attachments(self, run_id=None):
tempdir = tempfile.mkdtemp()
body = {}
has_attachments = False
for k, ls in iterlists(connexion.request.files):
try:
for v in ls:
if k == "workflow_attachment":
sp = v.filename.split("/")
fn = []
for p in sp:
if p not in ("", ".", ".."):
fn.append(secure_filename(p))
dest = os.path.join(tempdir, *fn)
if not os.path.isdir(os.path.dirname(dest)):
os.makedirs(os.path.dirname(dest))
self.log_for_run(run_id, "Staging attachment '%s' to '%s'" % (v.filename, dest))
v.save(dest)
has_attachments = True
body[k] = "file://%s" % tempdir # Reference to temp working dir.
elif k in ("workflow_params", "tags", "workflow_engine_parameters"):
content = v.read()
body[k] = json.loads(content.decode("utf-8"))
else:
|
except Exception as e:
raise ValueError("Error reading parameter '%s': %s" % (k, e))
for k, ls in iterlists(connexion.request.form):
try:
for v in ls:
if not v:
continue
if k in ("workflow_params", "tags", "workflow_engine_parameters"):
body[k] = json.loads(v)
else:
body[k] = v
except Exception as e:
raise ValueError("Error reading parameter '%s': %s" % (k, e))
if "workflow_url" in body:
if ":" not in body["workflow_url"]:
if not has_attachments:
raise ValueError("Relative 'workflow_url' but missing 'workflow_attachment'")
body["workflow_url"] = "file://%s" % os.path.join(tempdir, secure_filename(body["workflow_url"]))
self.log_for_run(run_id, "Using workflow_url '%s'" % body.get("workflow_url"))
else:
raise ValueError("Missing 'workflow_url' in submission")
if "workflow_params" not in body:
raise ValueError("Missing 'workflow_params' in submission")
return tempdir, body
| body[k] = v.read().decode() |
0012_auto_20181008_1832.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-10-08 15:32
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
| dependencies = [
('gram', '0011_auto_20181008_1505'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='profilepic',
field=models.ImageField(blank=True, upload_to='picture/'),
),
] |
|
index.js | 'use strict';
var BarracksMessenger = require('../src/index').BarracksMessenger;
var fs = require('fs');
var args = {};
process.argv.forEach(function (val, index) {
if (index >= 2 && index % 2 === 0) {
var argName = val.substr(2);
args[argName] = process.argv[index + 1];
} |
var barracksBaseUrl = args.baseUrl;
var barracksMqttEndpoint = args.mqttEndpoint;
var barracksApiKey = args.apiKey;
var barracksUnitId = args.unitId;
function usage() {
console.log('You can also use the arguments --baseUrl <BARRACKS_URL> and --mqttEndpoint <MQTT_ENDPOINT> if you want to request another domain than the default one.');
process.exit();
}
if (!barracksApiKey) {
console.log('Argument --apiKey <API_KEY> is mandatory.');
console.log('<API_KEY> is your user api key that you can find on the Account page of Barracks.');
usage();
}
if (!barracksUnitId) {
console.log('Argument --unitId <UNIT_ID> is mandatory.');
console.log('<UNIT_ID> is your device\'s id');
usage();
}
var barracksMessenger = new BarracksMessenger({
baseURL: barracksBaseUrl,
mqttEndpoint: barracksMqttEndpoint,
unitId: barracksUnitId,
apiKey: barracksApiKey
});
function listenMessages() {
barracksMessenger.connect({
onConnect: function () {
console.log('Connected to ' + barracksMqttEndpoint);
},
onError: function (err) {
console.log('Error occurred : ' + err);
},
onClose: function () {
console.log('Connection closed');
},
onReconnect: function () {
console.log('Attempting to reconnect...');
}
});
barracksMessenger.subscribe(function (messageReceived) {
console.log('Received: ' + messageReceived.payload);
console.log('retain : ' + messageReceived.retained);
console.log('topic : ' + messageReceived.topic);
console.log('length: ' + messageReceived.length);
console.log('qos ' + messageReceived.qos);
}, { qos: 1 });
setTimeout(function () {
barracksMessenger.end();
}, 120000);
}
listenMessages(); | }); |
test_numa.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import objects
from nova.tests.unit.objects import test_objects
fake_obj_numa = objects.NUMATopology(
cells=[
objects.NUMACell(
id=0, cpuset=set([1, 2]), memory=512,
cpu_usage=2, memory_usage=256),
objects.NUMACell(
id=1, cpuset=set([3, 4]), memory=512,
cpu_usage=1, memory_usage=128)])
class _TestNUMA(object):
def test_convert_wipe(self):
d1 = fake_obj_numa._to_dict()
d2 = objects.NUMATopology.obj_from_primitive(d1)._to_dict()
self.assertEqual(d1, d2)
def test_pinning_logic(self):
|
class TestNUMA(test_objects._LocalTest,
_TestNUMA):
pass
class TestNUMARemote(test_objects._RemoteTest,
_TestNUMA):
pass
| obj = objects.NUMATopology(cells=[
objects.NUMACell(
id=0, cpuset=set([1, 2]), memory=512,
cpu_usage=2, memory_usage=256,
pinned_cpus=set([1])),
objects.NUMACell(
id=1, cpuset=set([3, 4]), memory=512,
cpu_usage=1, memory_usage=128,
pinned_cpus=set([]))
]
)
self.assertEqual(set([2]), obj.cells[0].free_cpus)
self.assertEqual(set([3, 4]), obj.cells[1].free_cpus) |
CaseReport.js | import React from 'react'
import PropTypes from 'prop-types'
import { Row, Col, Tabs } from 'antd'
const TabPane = Tabs.TabPane;
function | (json) {
return JSON.stringify(json, null, ' ')
}
const CaseReport = function (props) {
let body = json_format(props.body);
let headers = json_format(props.headers, null, ' ');
let res_header = json_format(props.res_header, null, ' ');
let res_body = json_format(props.res_body);
let validRes;
if (props.validRes && Array.isArray(props.validRes)) {
validRes = props.validRes.map((item, index) => {
return <div key={index}>{item.message}</div>
})
}
return <div className="report">
<Tabs defaultActiveKey="request" >
<TabPane className="case-report-pane" tab="Request" key="request">
<Row className="case-report">
<Col className="case-report-title" span="6">Url</Col>
<Col span="18">{props.url}</Col>
</Row>
{props.query ?
<Row className="case-report">
<Col className="case-report-title" span="6">Query</Col>
<Col span="18">{props.query}</Col>
</Row>
: null
}
{props.headers ?
<Row className="case-report">
<Col className="case-report-title" span="6">Headers</Col>
<Col span="18"><pre>{headers}</pre></Col>
</Row>
: null
}
{props.body ?
<Row className="case-report">
<Col className="case-report-title" span="6">Body</Col>
<Col span="18"><pre style={{whiteSpace: 'pre-wrap'}}>{body}</pre></Col>
</Row>
: null
}
</TabPane>
<TabPane className="case-report-pane" tab="Response" key="response">
{props.res_header ?
<Row className="case-report">
<Col className="case-report-title" span="6">Headers</Col>
<Col span="18"><pre>{res_header}</pre></Col>
</Row>
: null
}
{props.res_body ?
<Row className="case-report">
<Col className="case-report-title" span="6">Body</Col>
<Col span="18"><pre>{res_body}</pre></Col>
</Row>
: null
}
</TabPane>
<TabPane className="case-report-pane" tab="验证结果" key="valid">
{props.validRes ?
<Row className="case-report">
<Col className="case-report-title" span="6">验证结果</Col>
<Col span="18">
{ validRes }
</Col>
</Row>
: null
}
</TabPane>
</Tabs>
</div>
}
CaseReport.propTypes = {
url: PropTypes.string,
body: PropTypes.any,
headers: PropTypes.object,
res_header: PropTypes.object,
res_body: PropTypes.any,
query: PropTypes.string,
validRes: PropTypes.array
}
export default CaseReport; | json_format |
application.rs | //! CargoMove Abscissa Application
use crate::{commands::CargoMoveCmd, config::CargoMoveConfig};
use abscissa_core::{
application, config, logging, Application, EntryPoint, FrameworkError, StandardPaths,
};
use lazy_static::lazy_static;
lazy_static! {
/// Application state
pub static ref APPLICATION: application::Lock<CargoMoveApp> = application::Lock::default();
}
/// Obtain a read-only (multi-reader) lock on the application state.
///
/// Panics if the application state has not been initialized.
pub fn app_reader() -> application::lock::Reader<CargoMoveApp> {
APPLICATION.read()
}
/// Obtain an exclusive mutable lock on the application state.
pub fn app_writer() -> application::lock::Writer<CargoMoveApp> {
APPLICATION.write()
} | pub fn app_config() -> config::Reader<CargoMoveApp> {
config::Reader::new(&APPLICATION)
}
/// CargoMove Application
#[derive(Debug)]
pub struct CargoMoveApp {
/// Application configuration.
config: Option<CargoMoveConfig>,
/// Application state.
state: application::State<Self>,
}
/// Initialize a new application instance.
///
/// By default no configuration is loaded, and the framework state is
/// initialized to a default, empty state (no components, threads, etc).
impl Default for CargoMoveApp {
fn default() -> Self {
Self {
config: None,
state: application::State::default(),
}
}
}
impl Application for CargoMoveApp {
/// Entrypoint command for this application.
type Cmd = EntryPoint<CargoMoveCmd>;
/// Application configuration.
type Cfg = CargoMoveConfig;
/// Paths to resources within the application.
type Paths = StandardPaths;
/// Accessor for application configuration.
fn config(&self) -> &CargoMoveConfig {
self.config.as_ref().expect("config not loaded")
}
/// Borrow the application state immutably.
fn state(&self) -> &application::State<Self> {
&self.state
}
/// Borrow the application state mutably.
fn state_mut(&mut self) -> &mut application::State<Self> {
&mut self.state
}
/// Register all components used by this application.
///
/// If you would like to add additional components to your application
/// beyond the default ones provided by the framework, this is the place
/// to do so.
fn register_components(&mut self, command: &Self::Cmd) -> Result<(), FrameworkError> {
let components = self.framework_components(command)?;
self.state.components.register(components)
}
/// Post-configuration lifecycle callback.
///
/// Called regardless of whether config is loaded to indicate this is the
/// time in app lifecycle when configuration would be loaded if
/// possible.
fn after_config(&mut self, config: Self::Cfg) -> Result<(), FrameworkError> {
// Configure components
self.state.components.after_config(&config)?;
self.config = Some(config);
Ok(())
}
/// Get logging configuration from command-line options
fn logging_config(&self, command: &EntryPoint<CargoMoveCmd>) -> logging::Config {
if command.verbose {
logging::Config::verbose()
} else {
logging::Config::default()
}
}
} |
/// Obtain a read-only (multi-reader) lock on the application configuration.
///
/// Panics if the application configuration has not been loaded. |
init_test.go | /*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package kubeadm
import (
"testing"
"github.com/renstrom/dedent"
)
func runKubeadmInit(args ...string) (string, string, error) {
kubeadmPath := getKubeadmPath()
kubeadmArgs := []string{"init", "--dry-run", "--ignore-preflight-errors=all"}
kubeadmArgs = append(kubeadmArgs, args...)
return RunCmd(kubeadmPath, kubeadmArgs...)
}
func TestCmdInitToken(t *testing.T) {
if *kubeadmCmdSkip {
t.Log("kubeadm cmd tests being skipped")
t.Skip()
}
initTest := []struct {
name string
args string
expected bool
}{
/*
{
name: "invalid token size",
args: "--token=abcd:1234567890abcd",
expected: false,
},
{
name: "invalid token non-lowercase",
args: "--token=Abcdef:1234567890abcdef",
expected: false,
},
*/
{
name: "valid token is accepted",
args: "--token=abcdef.0123456789abcdef",
expected: true,
},
}
for _, rt := range initTest {
t.Run(rt.name, func(t *testing.T) {
_, _, err := runKubeadmInit(rt.args)
if (err == nil) != rt.expected {
t.Fatalf(dedent.Dedent(`
CmdInitToken test case %q failed with an error: %v
command 'kubeadm init %s'
expected: %t
err: %t
`),
rt.name,
err,
rt.args,
rt.expected,
(err == nil),
)
}
})
}
}
func | (t *testing.T) {
if *kubeadmCmdSkip {
t.Log("kubeadm cmd tests being skipped")
t.Skip()
}
initTest := []struct {
name string
args string
expected bool
}{
{
name: "invalid semantic version string is detected",
args: "--kubernetes-version=v1.1",
expected: false,
},
{
name: "valid version is accepted",
args: "--kubernetes-version=1.11.0",
expected: true,
},
}
for _, rt := range initTest {
t.Run(rt.name, func(t *testing.T) {
_, _, err := runKubeadmInit(rt.args)
if (err == nil) != rt.expected {
t.Fatalf(dedent.Dedent(`
CmdInitKubernetesVersion test case %q failed with an error: %v
command 'kubeadm init %s'
expected: %t
err: %t
`),
rt.name,
err,
rt.args,
rt.expected,
(err == nil),
)
}
})
}
}
func TestCmdInitConfig(t *testing.T) {
if *kubeadmCmdSkip {
t.Log("kubeadm cmd tests being skipped")
t.Skip()
}
initTest := []struct {
name string
args string
expected bool
}{
{
name: "fail on non existing path",
args: "--config=/does/not/exist/foo/bar",
expected: false,
},
{
name: "can't load v1alpha1 config",
args: "--config=testdata/init/v1alpha1.yaml",
expected: false,
},
{
name: "can't load v1alpha2 config",
args: "--config=testdata/init/v1alpha2.yaml",
expected: false,
},
{
name: "can load v1alpha3 config",
args: "--config=testdata/init/v1alpha3.yaml",
expected: true,
},
{
name: "can load v1beta1 config",
args: "--config=testdata/init/v1beta1.yaml",
expected: true,
},
{
name: "don't allow mixed arguments v1alpha3",
args: "--kubernetes-version=1.11.0 --config=testdata/init/v1alpha3.yaml",
expected: false,
},
{
name: "don't allow mixed arguments v1beta1",
args: "--kubernetes-version=1.11.0 --config=testdata/init/v1beta1.yaml",
expected: false,
},
}
for _, rt := range initTest {
t.Run(rt.name, func(t *testing.T) {
_, _, err := runKubeadmInit(rt.args)
if (err == nil) != rt.expected {
t.Fatalf(dedent.Dedent(`
CmdInitConfig test case %q failed with an error: %v
command 'kubeadm init %s'
expected: %t
err: %t
`),
rt.name,
err,
rt.args,
rt.expected,
(err == nil),
)
}
})
}
}
func TestCmdInitAPIPort(t *testing.T) {
if *kubeadmCmdSkip {
t.Log("kubeadm cmd tests being skipped")
t.Skip()
}
initTest := []struct {
name string
args string
expected bool
}{
{
name: "fail on non-string port",
args: "--apiserver-bind-port=foobar",
expected: false,
},
{
name: "fail on too large port number",
args: "--apiserver-bind-port=100000",
expected: false,
},
{
name: "fail on negative port number",
args: "--apiserver-bind-port=-6000",
expected: false,
},
{
name: "accept a valid port number",
args: "--apiserver-bind-port=6000",
expected: true,
},
}
for _, rt := range initTest {
t.Run(rt.name, func(t *testing.T) {
_, _, err := runKubeadmInit(rt.args)
if (err == nil) != rt.expected {
t.Fatalf(dedent.Dedent(`
CmdInitAPIPort test case %q failed with an error: %v
command 'kubeadm init %s'
expected: %t
err: %t
`),
rt.name,
err,
rt.args,
rt.expected,
(err == nil),
)
}
})
}
}
| TestCmdInitKubernetesVersion |
main.rs | use std::{thread, time};
use std::sync::Arc;
use std::sync::atomic::{AtomicBool, Ordering};
use ctrlc;
use rust_gpiozero::LED;
fn main() {
let keep_going = Arc::new(AtomicBool::new(true));
let kg = keep_going.clone();
let red = LED::new(9);
let amber = LED::new(10);
let green = LED::new(11);
ctrlc::set_handler(move || {
kg.store(false, Ordering::SeqCst);
}).unwrap();
red.off();
amber.off();
green.off();
while keep_going.load(Ordering::SeqCst) {
// Red
red.on();
thread::sleep(time::Duration::from_secs(3));
// Red and Yellow
amber.on();
thread::sleep(time::Duration::from_secs(1));
| // Green
red.off();
amber.off();
green.on();
thread::sleep(time::Duration::from_secs(5));
// Yellow
green.off();
amber.on();
thread::sleep(time::Duration::from_secs(2));
// Yellow off
amber.off();
}
red.off();
amber.off();
green.off();
} | |
geo.rs | use util::rand;
use vec3::Vec3;
use ray::Ray;
use aabb::AABB;
use material::Material;
use std::cmp::Ordering::Equal;
#[derive(Debug, Clone)]
pub struct Intersection {
/// The distance along the ray at which the intersection occured
pub t: f64,
/// The intersecting ray
pub ray: Ray,
/// The point of intersection
pub point: Vec3,
/// A normal off the intersection
pub normal: Vec3,
/// The material of the surface
pub material: Material,
}
impl Intersection {
/// Create a new Intersection instance
///
/// # Arguments
/// * `t` - the distance along the ray at which the intersection occured
/// * `ray` - the intersecting ray
/// * `normal` - a normal off the intersection
///
pub fn new(t: f64, ray: Ray, normal: Vec3, mat: Material) -> Self {
Intersection {
t: t,
ray: ray,
point: ray.at(t),
normal: normal.unit(),
material: mat,
}
}
/// Returns the vector reflected by the ray off the normal
pub fn reflected(&self) -> Vec3 {
Vec3::reflect(self.ray.dir, self.normal)
}
}
#[derive(Debug, Clone)]
pub enum Geo {
/// A geometry representing a set of geometry
List {
children: Vec<Geo>,
},
/// A geometry representing a BVH
#[allow(non_camel_case_types)]
BVH_node {
bbox: AABB,
left: Box<Geo>,
right: Box<Geo>,
},
/// A geometry representing a sphere
Sphere {
center0: Vec3,
center1: Vec3,
radius: f64,
time0: f64,
time1: f64,
material: Material,
},
}
impl Geo {
/// Construct a new geometry list
pub fn list(children: Vec<Geo>) -> Self {
Geo::List {
children: children,
}
}
/// Construct a new BVH node
pub fn bvh_node(mut children: Vec<Geo>) -> Self {
// Comparison function generator
let compare = |i: u32| {
move |a: &Geo, b: &Geo| {
match (a.bounding_box(), b.bounding_box()) {
(
AABB::BBox { min: min1, .. },
AABB::BBox { min: min2, .. },
) => min1[i].partial_cmp(&min2[i]).unwrap_or(Equal),
_ => Equal,
}
}
};
// Sort children
let axis = (3.0 * rand()) as u32;
children.sort_by(compare(axis));
// Determine left and right nodes
let n = children.len();
let (left, right) = match n {
// Special cases, return leaf
0 => return Geo::list(Vec::new()),
1 => return children.remove(0),
// Left and right are leaves
2 => (children.remove(0), children.remove(0)),
// Child nodes
_ =>{
let mut chunks = children.chunks_mut(n / 2 + 1);
let left = Geo::bvh_node(chunks.next().unwrap().to_vec());
let right = Geo::bvh_node(chunks.next().unwrap().to_vec());
(left, right)
},
};
Geo::BVH_node {
bbox: AABB::bound_boxes(
left.bounding_box(),
right.bounding_box(),
),
left: Box::new(left),
right: Box::new(right),
}
}
/// Construct a new sphere
pub fn sphere(center: Vec3, radius: f64, mat: Material) -> Self {
Geo::Sphere {
center0: center,
center1: center,
radius: radius,
time0: 0.0,
time1: 1.0,
material: mat,
}
}
/// Construct a new moving sphere
pub fn moving_sphere(c0: Vec3, c1: Vec3, r: f64, t0: f64, t1: f64, mat: Material) -> Self {
Geo::Sphere {
center0: c0,
center1: c1,
radius: r,
time0: t0,
time1: t1,
material: mat,
}
}
/// Find the center of a sphere at a given time
pub fn | (&self, time: f64) -> Vec3 {
match self {
&Geo::Sphere { center0, center1, time0, time1, .. } => center0
+ ((time - time0) / (time1 - time0))
* (center1 - center0),
other => panic!("{:?} does not havea center!", other),
}
}
/// Given a ray, a piece of geometry, and bounds on the ray determines if the
/// ray and geometry intersect. Returns an Option of Intersection which will be
/// Some if the intersection occured, and None if there was no intersection.
///
/// # Arguments
/// * `ray` - the ray
/// * `geo` - the geometry
///`* `min` - the minimum distance along the ray
/// * `max` - the maximum distance along the ray
///
pub fn intersects(&self, ray: Ray, min: f64, max: f64) -> Option<Intersection> {
match self {
// Handle intersection for a list
&Geo::List { ref children } => {
let mut closest = None;
let mut cmax = max;
for geo in children {
if let Some(i) = geo.intersects(ray, min, cmax) {
cmax = i.t;
closest = Some(i);
}
}
closest
},
// Handle intersection for a BVH
&Geo::BVH_node { ref bbox, ref left, ref right, .. } => {
if bbox.hit(ray, min, max) {
match (
left.intersects(ray, min, max),
right.intersects(ray, min, max),
) {
(Some(ileft), Some(iright)) => {
if ileft.t < iright.t {
Some(ileft)
} else {
Some(iright)
}
},
// Pass through the results
(Some(ileft), _) => Some(ileft),
(_, Some(iright)) => Some(iright),
_ => None,
}
} else {
None
}
},
// Handle intersection for a sphere
&Geo::Sphere { radius, ref material, .. } => {
let center = self.center(ray.time);
let intersect = |t| Some(Intersection::new(
t, ray,
(ray.at(t) - center) / radius,
material.clone()
));
let oc = ray.origin - center;
let a = Vec3::dot(ray.dir, ray.dir);
let b = Vec3::dot(oc, ray.dir);
let c = Vec3::dot(oc, oc) - radius * radius;
let d = b * b - a * c;
if d > 0.0 {
let t = (-b - d.sqrt()) / a;
if min < t && t < max {
return intersect(t);
}
let t = (-b + d.sqrt()) / a;
if min < t && t < max {
return intersect(t);
}
}
None
}
}
}
/// Return the bounding box of a piece of geometry
pub fn bounding_box(&self) -> AABB {
match self {
// Determine the bounding box for a list
&Geo::List { ref children } => {
let mut bbox = AABB::None;
for geo in children {
bbox = AABB::bound_boxes(bbox, geo.bounding_box());
}
bbox
},
// Determine the bounding box for a BVH (trivial)
&Geo::BVH_node { bbox, .. } => { bbox },
// Determine the bounding box for a sphere
&Geo::Sphere { center0, center1, radius, .. } => {
let vradius = Vec3::ones() * radius;
let box1 = AABB::new(center0 - vradius, center0 + vradius);
let box2 = AABB::new(center1 - vradius, center1 + vradius);
AABB::bound_boxes(box1, box2)
},
}
}
}
| center |
disk_cache.go | package keeper
import (
"sort"
sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/omexapp/omexchain/x/order/types"
)
// OrderIDsMap stores orderIDSlice with map.
// <product:price:side> -> <orderIDs>
type OrderIDsMap struct {
Data map[string][]string
updatedItems map[string]struct{}
}
// DepthBookMap stores depthBook with map.
// <product> -> <depthBook>
type DepthBookMap struct {
data map[string]*types.DepthBook
updatedItems map[string]struct{}
newItems map[string]struct{}
}
// DiskCache stores cache that will persist to disk at endBlock.
type DiskCache struct {
depthBookMap *DepthBookMap
orderIDsMap *OrderIDsMap
priceMap map[string]sdk.Dec
storeOrderNum int64 // current stored order num
openNum int64 // current open orders num
closedOrderIDs []string
}
func | () *DiskCache {
return &DiskCache{
closedOrderIDs: []string{},
orderIDsMap: &OrderIDsMap{make(map[string][]string), make(map[string]struct{})},
priceMap: make(map[string]sdk.Dec),
depthBookMap: &DepthBookMap{make(map[string]*types.DepthBook), make(map[string]struct{}),
make(map[string]struct{})},
}
}
// reset is invoked in begin block
func (c *DiskCache) reset() {
c.closedOrderIDs = []string{}
c.orderIDsMap.updatedItems = make(map[string]struct{})
c.depthBookMap.updatedItems = make(map[string]struct{})
c.depthBookMap.newItems = make(map[string]struct{})
}
// nolint
func (c *DiskCache) GetClosedOrderIDs() []string {
return c.closedOrderIDs
}
func (c *DiskCache) setLastPrice(product string, price sdk.Dec) {
c.priceMap[product] = price
}
func (c *DiskCache) getLastPrice(product string) sdk.Dec {
if price, ok := c.priceMap[product]; ok {
return price
}
return sdk.ZeroDec()
}
// GetOrderIDsMapCopy returns a new copy of OrderIDsMap
func (c *DiskCache) GetOrderIDsMapCopy() *OrderIDsMap {
if c.orderIDsMap == nil {
return nil
}
ret := make(map[string][]string)
for k, v := range c.orderIDsMap.Data {
if len(v) == 0 {
ret[k] = []string{}
}
ret[k] = append(ret[k], v...)
}
return &OrderIDsMap{Data: ret}
}
func (c *DiskCache) getOrderIDs(key string) []string {
return c.orderIDsMap.Data[key]
}
func (c *DiskCache) setStoreOrderNum(num int64) {
c.storeOrderNum = num
}
// nolint
func (c *DiskCache) DecreaseStoreOrderNum(num int64) int64 {
c.storeOrderNum -= num
return c.storeOrderNum
}
func (c *DiskCache) setOpenNum(num int64) {
c.openNum = num
}
func (c *DiskCache) getOpenNum() int64 {
return c.openNum
}
func (c *DiskCache) addOrderIDs(key string, orderIDs []string) {
c.orderIDsMap.Data[key] = orderIDs
}
func (c *DiskCache) addDepthBook(product string, book *types.DepthBook) {
c.depthBookMap.data[product] = book
}
// setOrderIDs updates or removes unfilled order ids
func (c *DiskCache) setOrderIDs(key string, orderIDs []string) {
if len(orderIDs) == 0 {
// remove empty element immediately, not do it by the end of endblock
delete(c.orderIDsMap.Data, key)
} else {
c.orderIDsMap.Data[key] = orderIDs
}
c.orderIDsMap.updatedItems[key] = struct{}{}
}
// setDepthBook updates or removes a depth book
func (c *DiskCache) setDepthBook(product string, book *types.DepthBook) {
if book != nil && len(book.Items) > 0 {
c.depthBookMap.data[product] = book
} else {
delete(c.depthBookMap.data, product)
}
c.depthBookMap.updatedItems[product] = struct{}{}
}
// UpdatedOrderIDKeys
// nolint
func (c *DiskCache) GetUpdatedOrderIDKeys() []string {
updatedKeys := make([]string, 0, len(c.orderIDsMap.updatedItems))
for key := range c.orderIDsMap.updatedItems {
updatedKeys = append(updatedKeys, key)
}
sort.Strings(updatedKeys)
return updatedKeys
}
func (c *DiskCache) getDepthBook(product string) *types.DepthBook {
res := c.depthBookMap.data[product]
return res
}
func (c *DiskCache) getProductsFromDepthBookMap() []string {
products := make([]string, 0, len(c.depthBookMap.data))
for product := range c.depthBookMap.data {
products = append(products, product)
}
return products
}
// GetUpdatedDepthbookKeys returns a new copy of UpdatedDepthbookKeys
func (c *DiskCache) GetUpdatedDepthbookKeys() []string {
updatedKeys := make([]string, 0, len(c.depthBookMap.updatedItems))
for key := range c.depthBookMap.updatedItems {
updatedKeys = append(updatedKeys, key)
}
sort.Strings(updatedKeys)
return updatedKeys
}
// GetNewDepthbookKeys returns a new copy of NewDepthbookKeys
func (c *DiskCache) GetNewDepthbookKeys() []string {
newAddKeys := make([]string, 0, len(c.depthBookMap.newItems))
for key := range c.depthBookMap.newItems {
newAddKeys = append(newAddKeys, key)
}
return newAddKeys
}
// insertOrder inserts a new order into orderIDsMap
func (c *DiskCache) insertOrder(order *types.Order) {
// 1. update depthBookMap
depthBook, ok := c.depthBookMap.data[order.Product]
if !ok {
depthBook = &types.DepthBook{}
c.depthBookMap.data[order.Product] = depthBook
}
depthBook.InsertOrder(order)
c.depthBookMap.updatedItems[order.Product] = struct{}{}
c.depthBookMap.newItems[order.Product] = struct{}{}
// 2. update orderIDsMap
orderIDsMap := c.orderIDsMap
key := types.FormatOrderIDsKey(order.Product, order.Price, order.Side)
orderIDs, ok := orderIDsMap.Data[key]
if !ok {
orderIDs = []string{}
}
orderIDs = append(orderIDs, order.OrderID)
orderIDsMap.Data[key] = orderIDs
c.orderIDsMap.updatedItems[key] = struct{}{}
c.openNum++
c.storeOrderNum++
}
func (c *DiskCache) closeOrder(orderID string) {
c.closedOrderIDs = append(c.closedOrderIDs, orderID)
c.openNum--
}
// remove an order from orderIDsMap when order cancelled/expired
func (c *DiskCache) removeOrder(order *types.Order) {
// update depth book map
depthBook := c.getDepthBook(order.Product)
if depthBook != nil {
depthBook.RemoveOrder(order)
c.setDepthBook(order.Product, depthBook)
}
// update order id map
orderIDsMap := c.orderIDsMap
key := types.FormatOrderIDsKey(order.Product, order.Price, order.Side)
orderIDs := orderIDsMap.Data[key]
orderIDsLen := len(orderIDs)
for i := 0; i < orderIDsLen; i++ {
if orderIDs[i] == order.OrderID {
orderIDs = append(orderIDs[:i], orderIDs[i+1:]...)
c.setOrderIDs(key, orderIDs)
break
}
}
c.closeOrder(order.OrderID)
}
| newDiskCache |
stub.go | // Copyright (c) 2014 The SkyDNS Authors. All rights reserved.
// Use of this source code is governed by The MIT License (MIT) that can be
// found in the LICENSE file.
package server
import (
"net"
"strconv"
"strings"
"github.com/miekg/dns"
"github.com/skynetservices/skydns/msg"
)
const ednsStubCode = dns.EDNS0LOCALSTART + 10
// ednsStub is the EDNS0 record we add to stub queries. Queries which have this record are
// not forwarded again.
var ednsStub = func() *dns.OPT {
o := new(dns.OPT)
o.Hdr.Name = "."
o.Hdr.Rrtype = dns.TypeOPT
e := new(dns.EDNS0_LOCAL)
e.Code = ednsStubCode
e.Data = []byte{1}
o.Option = append(o.Option, e)
return o
}()
// Look in .../dns/stub/<domain>/xx for msg.Services. Loop through them
// extract <domain> and add them as forwarders (ip:port-combos) for
// the stub zones. Only numeric (i.e. IP address) hosts are used.
func (s *server) UpdateStubZones() {
stubmap := make(map[string][]string)
services, err := s.backend.Records("stub.dns."+s.config.Domain, false)
if err != nil {
logf("stub zone update failed: %s", err)
return
}
for _, serv := range services {
if serv.Port == 0 {
serv.Port = 53
}
ip := net.ParseIP(serv.Host)
if ip == nil {
logf("stub zone non-address %s seen for: %s", serv.Key, serv.Host)
continue
}
domain := msg.Domain(serv.Key)
// Chop of left most label, because that is used as the nameserver place holder
// and drop the right most labels that belong to localDomain.
labels := dns.SplitDomainName(domain)
domain = dns.Fqdn(strings.Join(labels[1:len(labels)-dns.CountLabel(s.config.localDomain)], "."))
// If the remaining name equals s.config.LocalDomain we ignore it.
if domain == s.config.localDomain {
logf("not adding stub zone for my own domain")
continue
}
stubmap[domain] = append(stubmap[domain], net.JoinHostPort(serv.Host, strconv.Itoa(serv.Port)))
}
s.config.stub = &stubmap
}
// ServeDNSStubForward forwards a request to a nameservers and returns the response.
func (s *server) ServeDNSStubForward(w dns.ResponseWriter, req *dns.Msg, ns []string) *dns.Msg {
// Check EDNS0 Stub option, if set drop the packet.
option := req.IsEdns0()
if option != nil {
for _, o := range option.Option {
if o.Option() == ednsStubCode && len(o.(*dns.EDNS0_LOCAL).Data) == 1 &&
o.(*dns.EDNS0_LOCAL).Data[0] == 1 |
}
}
// Add a custom EDNS0 option to the packet, so we can detect loops
// when 2 stubs are forwarding to each other.
if option != nil {
option.Option = append(option.Option, &dns.EDNS0_LOCAL{ednsStubCode, []byte{1}})
} else {
req.Extra = append(req.Extra, ednsStub)
}
var (
r *dns.Msg
err error
)
// Use request Id for "random" nameserver selection.
nsid := int(req.Id) % len(ns)
try := 0
Redo:
if isTCP(w) {
r, err = exchangeWithRetry(s.dnsTCPclient, req, ns[nsid])
} else {
r, err = exchangeWithRetry(s.dnsUDPclient, req, ns[nsid])
}
if err == nil || err == dns.ErrTruncated {
r.Compress = true
r.Id = req.Id
w.WriteMsg(r)
return r
}
// Seen an error, this can only mean, "server not reached", try again
// but only if we have not exausted our nameservers.
if try < len(ns) {
try++
nsid = (nsid + 1) % len(ns)
goto Redo
}
logf("failure to forward stub request %q", err)
m := s.ServerFailure(req)
w.WriteMsg(m)
return m
}
| {
// Maybe log source IP here?
logf("not fowarding stub request to another stub")
return nil
} |
parameter_classes_tx.py | """
@author: ksanoo
@updated_at: 12/4/2020
@description: All parameters that are to be swept (specified in sweep_setup files) must have a class definition in
this script. The class name must be the same as the parameter key in loop_param
"""
import global_vars as swp_gbl
from parameter_classes import GenericParam
import device_startup
from abc import abstractmethod
class TestParameter13(GenericParam):
def set_param(self, key=None, value=None, check_if_value_changed=True): | print('>>> Executing TestParameter1.set_param...')
pass
return value_changed
class TxParameter1(GenericParam):
def set_param(self, key=None, value=None, check_if_value_changed=True):
value_changed = super().check_value_change(key, value, check_if_value_changed)
if value_changed:
# custom set_param function starts here
pass
return value_changed | value_changed = super().check_value_change(key, value, check_if_value_changed)
if value_changed:
# custom set_param function starts here |
interface.py | __author__ = 'Thomas Rueckstiess, [email protected]'
from pybrain.utilities import abstractMethod
from pybrain.structure.modules import Table, Module, TanhLayer, LinearLayer, BiasUnit
from pybrain.structure.connections import FullConnection
from pybrain.structure.networks import FeedForwardNetwork
from pybrain.structure.parametercontainer import ParameterContainer
from pybrain.tools.shortcuts import buildNetwork
from pybrain.utilities import one_to_n
from scipy import argmax, array, r_, asarray, where
from random import choice
class ActionValueInterface(object):
""" Interface for different ActionValue modules, like the
ActionValueTable or the ActionValueNetwork.
"""
numActions = None
def getMaxAction(self, state):
abstractMethod()
def getActionValues(self, state):
abstractMethod()
class ActionValueTable(Table, ActionValueInterface):
""" A special table that is used for Value Estimation methods
in Reinforcement Learning. This table is used for value-based
TD algorithms like Q or SARSA.
"""
def __init__(self, numStates, numActions, name=None):
Module.__init__(self, 1, 1, name)
ParameterContainer.__init__(self, numStates * numActions)
self.numRows = numStates
self.numColumns = numActions
@property
def numActions(self):
return self.numColumns
def _forwardImplementation(self, inbuf, outbuf):
""" Take a vector of length 1 (the state coordinate) and return
the action with the maximum value over all actions for this state.
"""
outbuf[0] = self.getMaxAction(inbuf[0])
def getMaxAction(self, state):
""" Return the action with the maximal value for the given state. """
values = self.params.reshape(self.numRows, self.numColumns)[state, :].flatten()
action = where(values == max(values))[0]
action = choice(action)
return action
def getActionValues(self, state):
return self.params.reshape(self.numRows, self.numColumns)[state, :].flatten()
def initialize(self, value=0.0):
""" Initialize the whole table with the given value. """
self._params[:] = value
class ActionValueNetwork(Module, ActionValueInterface):
""" A network that approximates action values for continuous state /
discrete action RL environments. To receive the maximum action
for a given state, a forward pass is executed for all discrete
actions, and the maximal action is returned. This network is used
for the NFQ algorithm. """
def __init__(self, dimState, numActions, name=None):
Module.__init__(self, dimState, 1, name)
self.network = buildNetwork(dimState + numActions, dimState + numActions, 1)
self.numActions = numActions
def | (self, inbuf, outbuf):
""" takes the state vector and return the discrete action with
the maximum value over all actions for this state.
"""
outbuf[0] = self.getMaxAction(asarray(inbuf))
def getMaxAction(self, state):
""" Return the action with the maximal value for the given state. """
return argmax(self.getActionValues(state))
def getActionValues(self, state):
""" Run forward activation for each of the actions and returns all values. """
values = array([self.network.activate(r_[state, one_to_n(i, self.numActions)]) for i in range(self.numActions)])
return values
def getValue(self, state, action):
return self.network.activate(r_[state, one_to_n(action, self.numActions)]) | _forwardImplementation |
copypropagation.go | // Copyright 2018 The OPA Authors. All rights reserved.
// Use of this source code is governed by an Apache2
// license that can be found in the LICENSE file.
package copypropagation
import (
"sort"
"github.com/open-policy-agent/opa/ast"
)
// CopyPropagator implements a simple copy propagation optimization to remove
// intermediate variables in partial evaluation results.
//
// For example, given the query: input.x > 1 where 'input' is unknown, the
// compiled query would become input.x = a; a > 1 which would remain in the
// partial evaluation result. The CopyPropagator will remove the variable
// assignment so that partial evaluation simply outputs input.x > 1.
//
// In many cases, copy propagation can remove all variables from the result of
// partial evaluation which simplifies evaluation for non-OPA consumers.
//
// In some cases, copy propagation cannot remove all variables. If the output of
// a built-in call is subsequently used as a ref head, the output variable must
// be kept. For example. sort(input, x); x[0] == 1. In this case, copy
// propagation cannot replace x[0] == 1 with sort(input, x)[0] == 1 as this is
// not legal.
type CopyPropagator struct {
livevars ast.VarSet // vars that must be preserved in the resulting query
sorted []ast.Var // sorted copy of vars to ensure deterministic result
ensureNonEmptyBody bool
}
// New returns a new CopyPropagator that optimizes queries while preserving vars
// in the livevars set.
func New(livevars ast.VarSet) *CopyPropagator {
sorted := make([]ast.Var, 0, len(livevars))
for v := range livevars {
sorted = append(sorted, v)
}
sort.Slice(sorted, func(i, j int) bool {
return sorted[i].Compare(sorted[j]) < 0
})
return &CopyPropagator{livevars: livevars, sorted: sorted}
}
// WithEnsureNonEmptyBody configures p to ensure that results are always non-empty.
func (p *CopyPropagator) WithEnsureNonEmptyBody(yes bool) *CopyPropagator {
p.ensureNonEmptyBody = yes
return p
}
// Apply executes the copy propagation optimization and returns a new query.
func (p *CopyPropagator) Apply(query ast.Body) (result ast.Body) {
uf, ok := makeDisjointSets(query)
if !ok {
return query
}
// Compute set of vars that appear in the head of refs in the query. If a var
// is dereferenced, we cannot plug it with a constant value so the constant on
// the union-find root must be unset (e.g., [1][0] is not legal.)
headvars := ast.NewVarSet()
ast.WalkRefs(query, func(x ast.Ref) bool {
if v, ok := x[0].Value.(ast.Var); ok {
if root, ok := uf.Find(v); ok {
root.constant = nil
headvars.Add(root.key)
} else {
headvars.Add(v)
}
}
return false
})
bindings := map[ast.Var]*binding{}
for _, expr := range query {
// Deep copy the expr as it may be mutated below. The caller that is running
// copy propagation may hold references to the expr.
expr = expr.Copy()
pctx := &plugContext{
bindings: bindings,
uf: uf,
negated: expr.Negated,
headvars: headvars,
}
if p.plugBindings(pctx, expr) {
if p.updateBindings(pctx, expr) {
result.Append(expr)
}
}
}
// Run post-processing step on the query to ensure that all live vars are bound
// in the result. The plugging that happens above substitutes all vars in the
// same set with the root.
//
// This step should run before the next step to prevent unnecessary bindings
// from being added to the result. For example:
//
// - Given the following result: <empty>
// - Given the following bindings: x/input.x and y/input
// - Given the following liveset: {x}
//
// If this step were to run AFTER the following step, the output would be:
//
// x = input.x; y = input
//
// Even though y = input is not required.
for _, v := range p.sorted {
if root, ok := uf.Find(v); ok {
if root.constant != nil {
result.Append(ast.Equality.Expr(ast.NewTerm(v), root.constant))
} else if b, ok := bindings[root.key]; ok {
result.Append(ast.Equality.Expr(ast.NewTerm(v), ast.NewTerm(b.v)))
} else if root.key != v {
result.Append(ast.Equality.Expr(ast.NewTerm(v), ast.NewTerm(root.key)))
}
}
}
// Run post-processing step on query to ensure that all killed exprs are
// accounted for. If an expr is killed but the binding is never used, the query
// must still include the expr. For example, given the query 'input.x = a' and
// an empty livevar set, the result must include the ref input.x otherwise the
// query could be satisfied without input.x being defined. When exprs are
// killed we initialize the binding counter to zero and then increment it each
// time the binding is substituted. if the binding was never substituted it
// means the binding value must be added back into the query.
for _, b := range sortbindings(bindings) {
if !b.containedIn(result) {
result.Append(ast.Equality.Expr(ast.NewTerm(b.k), ast.NewTerm(b.v)))
}
}
if p.ensureNonEmptyBody && len(result) == 0 {
result = append(result, ast.NewExpr(ast.BooleanTerm(true)))
}
return result
}
// plugBindings applies the binding list and union-find to x. This process
// removes as many variables as possible.
func (p *CopyPropagator) plugBindings(pctx *plugContext, x interface{}) bool {
// Kill single term expressions that are in the binding list. They will be
// re-added during post-processing if needed.
if expr, ok := x.(*ast.Expr); ok {
if term, ok := expr.Terms.(*ast.Term); ok {
if v, ok := term.Value.(ast.Var); ok {
if root, ok := pctx.uf.Find(v); ok {
if _, ok := pctx.bindings[root.key]; ok {
return false
}
}
}
}
}
ast.WalkTerms(x, func(t *ast.Term) bool {
// Apply union-find to remove redundant variables from input.
switch v := t.Value.(type) {
case ast.Var:
if root, ok := pctx.uf.Find(v); ok {
t.Value = root.Value()
}
case ast.Ref:
if root, ok := pctx.uf.Find(v[0].Value.(ast.Var)); ok {
v[0].Value = root.Value()
}
}
// Apply binding list to substitute remaining vars.
switch v := t.Value.(type) {
case ast.Var:
if b, ok := pctx.bindings[v]; ok {
if !pctx.negated || b.v.IsGround() {
t.Value = b.v
}
return true
}
case ast.Ref:
// Refs require special handling. If the head of the ref was killed, then the
// rest of the ref must be concatenated with the new base.
//
// Invariant: ref heads can only be replaced by refs (not calls).
if b, ok := pctx.bindings[v[0].Value.(ast.Var)]; ok {
if !pctx.negated || b.v.IsGround() {
t.Value = b.v.(ast.Ref).Concat(v[1:])
}
}
for i := 1; i < len(v); i++ {
p.plugBindings(pctx, v[i])
}
return true
}
return false
})
return true
}
// updateBindings returns false if the expression can be killed. If the
// expression is killed, the binding list is updated to map a var to value.
func (p *CopyPropagator) updateBindings(pctx *plugContext, expr *ast.Expr) bool {
if pctx.negated {
return true
}
if expr.IsEquality() {
a, b := expr.Operand(0), expr.Operand(1)
if a.Equal(b) {
return false
}
k, v, keep := p.updateBindingsEq(a, b)
if !keep {
if v != nil {
pctx.bindings[k] = newbinding(k, v)
}
return false
}
} else if expr.IsCall() {
terms := expr.Terms.([]*ast.Term)
output := terms[len(terms)-1]
if k, ok := output.Value.(ast.Var); ok && !p.livevars.Contains(k) && !pctx.headvars.Contains(k) {
pctx.bindings[k] = newbinding(k, ast.CallTerm(terms[:len(terms)-1]...).Value)
return false
}
}
return !isNoop(expr)
}
func (p *CopyPropagator) updateBindingsEq(a, b *ast.Term) (ast.Var, ast.Value, bool) {
k, v, keep := p.updateBindingsEqAsymmetric(a, b)
if !keep {
return k, v, keep
}
return p.updateBindingsEqAsymmetric(b, a)
}
func (p *CopyPropagator) updateBindingsEqAsymmetric(a, b *ast.Term) (ast.Var, ast.Value, bool) {
k, ok := a.Value.(ast.Var)
if !ok || p.livevars.Contains(k) {
return "", nil, true
}
switch b.Value.(type) {
case ast.Ref, ast.Call:
return k, b.Value, false
}
return "", nil, true
}
type plugContext struct {
bindings map[ast.Var]*binding
uf *unionFind
headvars ast.VarSet
negated bool
}
type binding struct {
k ast.Var
v ast.Value
}
func newbinding(k ast.Var, v ast.Value) *binding {
return &binding{k: k, v: v}
}
func (b *binding) containedIn(query ast.Body) bool {
var stop bool
switch v := b.v.(type) {
case ast.Ref:
ast.WalkRefs(query, func(other ast.Ref) bool {
if stop || other.HasPrefix(v) {
stop = true
return stop
}
return false
})
default:
ast.WalkTerms(query, func(other *ast.Term) bool {
if stop || other.Value.Compare(v) == 0 {
stop = true
return stop
}
return false
})
}
return stop
}
func sortbindings(bindings map[ast.Var]*binding) []*binding {
sorted := make([]*binding, 0, len(bindings))
for _, b := range bindings {
sorted = append(sorted, b)
}
sort.Slice(sorted, func(i, j int) bool {
return sorted[i].k.Compare(sorted[j].k) < 0
})
return sorted
}
type unionFind struct {
roots map[ast.Var]*unionFindRoot
parents map[ast.Var]ast.Var
}
// makeDisjointSets builds the union-find structure for the query. The structure
// is built by processing all of the equality exprs in the query. Sets represent
// vars that must be equal to each other. In addition to vars, each set can have
// at most one constant. If the query contains expressions that cannot be
// satisfied (e.g., because a set has multiple constants) this function returns
// false.
func makeDisjointSets(query ast.Body) (*unionFind, bool) {
uf := newUnionFind()
for _, expr := range query {
if expr.IsEquality() {
a, b := expr.Operand(0), expr.Operand(1)
varA, ok1 := a.Value.(ast.Var)
varB, ok2 := b.Value.(ast.Var)
if ok1 && ok2 {
if _, ok := uf.Merge(varA, varB); !ok {
return nil, false
}
} else if ok1 && ast.IsConstant(b.Value) {
root := uf.MakeSet(varA)
if root.constant != nil && !root.constant.Equal(b) {
return nil, false
}
root.constant = b
} else if ok2 && ast.IsConstant(a.Value) {
root := uf.MakeSet(varB)
if root.constant != nil && !root.constant.Equal(a) {
return nil, false
}
root.constant = a
}
}
}
return uf, true
}
func newUnionFind() *unionFind {
return &unionFind{
roots: map[ast.Var]*unionFindRoot{},
parents: map[ast.Var]ast.Var{},
}
}
func (uf *unionFind) MakeSet(v ast.Var) *unionFindRoot {
root, ok := uf.Find(v)
if ok {
return root
}
root = newUnionFindRoot(v)
uf.parents[v] = v
uf.roots[v] = root
return uf.roots[v]
}
func (uf *unionFind) Find(v ast.Var) (*unionFindRoot, bool) {
parent, ok := uf.parents[v]
if !ok |
if parent == v {
return uf.roots[v], true
}
return uf.Find(parent)
}
func (uf *unionFind) Merge(a, b ast.Var) (*unionFindRoot, bool) {
r1 := uf.MakeSet(a)
r2 := uf.MakeSet(b)
if r1 != r2 {
uf.parents[r1.key] = r2.key
delete(uf.roots, r1.key)
// Sets can have at most one constant value associated with them. When
// unioning, we must preserve this invariant. If a set has two constants,
// there will be no way to prove the query.
if r1.constant != nil && r2.constant != nil && !r1.constant.Equal(r2.constant) {
return nil, false
} else if r2.constant == nil {
r2.constant = r1.constant
}
}
return r2, true
}
type unionFindRoot struct {
key ast.Var
constant *ast.Term
}
func newUnionFindRoot(key ast.Var) *unionFindRoot {
return &unionFindRoot{
key: key,
}
}
func (r *unionFindRoot) Value() ast.Value {
if r.constant != nil {
return r.constant.Value
}
return r.key
}
func isNoop(expr *ast.Expr) bool {
if !expr.IsCall() {
term := expr.Terms.(*ast.Term)
if !ast.IsConstant(term.Value) {
return false
}
return !ast.Boolean(false).Equal(term.Value)
}
// A==A can be ignored
if expr.Operator().Equal(ast.Equal.Ref()) {
return expr.Operand(0).Equal(expr.Operand(1))
}
return false
}
| {
return nil, false
} |
url.go | package astiurl
import (
"net/url"
"path/filepath"
"github.com/pkg/errors"
)
// Parse parses an URL (files included)
func Parse(i string) (o *url.URL, err error) | {
// Basic parse
if o, err = url.Parse(i); err != nil {
err = errors.Wrapf(err, "basic parsing of url %s failed", i)
return
}
// File
if o.Scheme == "" {
// Get absolute path
if i, err = filepath.Abs(i); err != nil {
err = errors.Wrapf(err, "getting absolute path of %s failed", i)
return
}
// Set url
o = &url.URL{Path: filepath.ToSlash(i), Scheme: "file"}
}
return
} |
|
crawl_weibo_data.go | package main
import (
"flag"
"fmt"
"github.com/huichen/gobo"
"github.com/huichen/gobo/contrib"
"io/ioutil"
"log"
"os"
"strings"
"time"
)
var (
access_token = flag.String("access_token", "", "用户的访问令牌")
weibo = gobo.Weibo{}
users_file = flag.String("users_file", "users.txt", "从该文件读入要下载的微博用户名,每个名字一行")
output_file = flag.String("output_file", "weibo_data.txt", "将抓取的微博写入下面的文件")
num_weibos = flag.Int("num_weibos", 2000, "从每个微博账号中抓取多少条微博")
)
func main() {
flag.Parse()
| log.Fatal("无法读取-users_file")
}
users := strings.Split(string(content), "\n")
outputFile, _ := os.Create(*output_file)
defer outputFile.Close()
// 抓微博
for _, user := range users {
if user == "" {
continue
}
log.Printf("抓取 @%s 的微博", user)
statuses, err := contrib.GetStatuses(
&weibo, *access_token, user, 0, *num_weibos, 5000) // 超时5秒
if err != nil {
log.Print(err)
continue
}
for _, status := range statuses {
t, _ := time.Parse("Mon Jan 2 15:04:05 -0700 2006", status.Created_At)
outputFile.WriteString(fmt.Sprintf(
"%d||||%d||||%d||||%s||||%d||||%d||||%d||||%s||||%s||||%s\n",
status.Id, uint32(t.Unix()), status.User.Id, status.User.Screen_Name,
status.Reposts_Count, status.Comments_Count, status.Attitudes_Count,
status.Thumbnail_Pic, status.Original_Pic, status.Text))
}
}
} | // 读取用户名
content, err := ioutil.ReadFile(*users_file)
if err != nil { |
error.rs | use std::fmt;
use modifier::Modifier;
use Response;
pub use hyper::error::Result as HttpResult;
pub use hyper::Error as HttpError;
pub use std::error::Error;
/// The type of Errors inside and when using Iron.
///
/// `IronError` informs its receivers of two things:
///
/// * What went wrong
/// * What to do about it
///
/// The `error` field is responsible for informing receivers of which
/// error occured, and receivers may also modify the error field by layering
/// it (building up a cause chain).
///
/// The `response` field provides a tangible action to be taken if this error
/// is not otherwise handled.
#[derive(Debug)]
pub struct IronError {
/// The underlying error
///
/// This can be layered and will be logged at the end of an errored
/// request.
pub error: Box<Error + Send>,
/// What to do about this error.
///
/// This Response will be used when the error-handling flow finishes.
pub response: Response,
}
impl IronError {
/// Create a new `IronError` from an error and a modifier.
pub fn new<E: 'static + Error + Send, M: Modifier<Response>>(e: E, m: M) -> IronError {
IronError {
error: Box::new(e),
response: Response::with(m),
}
}
}
impl fmt::Display for IronError {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
fmt::Display::fmt(&*self.error, f)
}
}
impl Error for IronError {
fn description(&self) -> &str |
fn cause(&self) -> Option<&Error> {
self.error.cause()
}
}
| {
self.error.description()
} |
dataset.py | # tests.dataset
# Helper functions for tests that utilize downloadable datasets.
#
# Author: Benjamin Bengfort <[email protected]>
# Created: Thu Oct 13 19:55:53 2016 -0400
#
# Copyright (C) 2016 District Data Labs
# For license information, see LICENSE.txt
#
# ID: dataset.py [8f4de77] [email protected] $
"""
Helper functions for tests that utilize downloadable datasets.
"""
##########################################################################
## Imports
##########################################################################
import os
import shutil
import hashlib
import zipfile
import numpy as np
from sklearn.datasets.base import Bunch
try:
import requests
except ImportError:
requests = None
##########################################################################
## Fixtures
##########################################################################
DATASETS = {
'concrete': {
'url': 'https://s3.amazonaws.com/ddl-data-lake/yellowbrick/concrete.zip',
'signature': 'b9ea5f26a7bb272a040e2f1a993b26babbf8dc4a04ab8198bb315ca66d71f10d',
'type': 'numpy',
},
'energy': {
'url': 'https://s3.amazonaws.com/ddl-data-lake/yellowbrick/energy.zip',
'signature': '19fb86f3bcdde208eed46944172cb643ef6a7d58da103fb568fae43205ed89d3',
'type': 'numpy',
},
'credit': {
'url': 'https://s3.amazonaws.com/ddl-data-lake/yellowbrick/credit.zip',
'signature': '4a91339c69f55e18f3f48004328fbcb7868070b618208fed099920427b084e5e',
'type': 'numpy',
},
'occupancy': {
'url': 'https://s3.amazonaws.com/ddl-data-lake/yellowbrick/occupancy.zip',
'signature': '429cfe376dc9929a1fa528da89f0e1626e34e19695f3f555d8954025bbc522b8',
'type': 'numpy',
},
'mushroom': {
'url': 'https://s3.amazonaws.com/ddl-data-lake/yellowbrick/mushroom.zip',
'signature': '884c43cb70db35d211c67b1cf6a3683b2b4569393d2789d5c07840da4dc85ba8',
'type': 'numpy',
},
'hobbies': {
'url': 'https://s3.amazonaws.com/ddl-data-lake/yellowbrick/hobbies.zip',
'signature': '415c8f68df1486d5d84a1d1757a5aa3035aef5ad63ede5013c261d622fbd29d8',
'type': 'corpus',
},
'game': {
'url': 'https://s3.amazonaws.com/ddl-data-lake/yellowbrick/game.zip',
'signature': 'b1bd85789a014a898daa34cb5f89ceab6d2cd6488a2e572187e34aa4ec21a43b',
'type': 'numpy',
},
'bikeshare': {
'url': 'https://s3.amazonaws.com/ddl-data-lake/yellowbrick/bikeshare.zip',
'signature': 'a9b440f65549746dff680c92ff8bdca3c7265f09db1cf09e708e6e26fc8aba44',
'type': 'numpy',
},
}
FIXTURES = os.path.join(os.path.dirname(__file__), "fixtures")
##########################################################################
## Test Cases that Require Download
##########################################################################
class DatasetMixin(object):
"""
Mixin for unittest.TestCase class to download datasets from S3 for
testing real world machine learning visual diagnostics.
"""
@staticmethod
def sha256sum(path, blocksize=65536):
"""
Computes the SHA256 signature of a file to verify that the file has not
been modified in transit and that it is the correct version of the data.
"""
sig = hashlib.sha256()
with open(path, 'rb') as f:
buf = f.read(blocksize)
while len(buf) > 0:
sig.update(buf)
buf = f.read(blocksize)
return sig.hexdigest()
@staticmethod
def download_data(url, path=FIXTURES, signature=None, extract=True):
"""
Downloads the zipped data set specified at the given URL, saving it to
the output path specified. This function verifies the download with the
given signature (if supplied) and extracts the zip file if requested.
"""
if requests is None:
raise ImportError(
"The requests module is required to download data --\n"
"please install it with pip install requests."
)
# Create the output directory if it does not exist
if not os.path.exists(path):
os.mkdir(path)
# Get the name of the file from the URL
name = os.path.basename(url)
dlpath = os.path.join(path, name)
# Fetch the response in a streaming fashion and write it to disk.
response = requests.get(url, stream=True)
with open(dlpath, 'wb') as f:
for chunk in response.iter_content(65536):
f.write(chunk)
# If verify, compare the signature
if signature is not None:
dlsignature = DatasetMixin.sha256sum(dlpath)
if signature != dlsignature:
raise ValueError(
"Download signature does not match hardcoded signature!"
)
# If extract, extract the zipfile.
if extract:
zf = zipfile.ZipFile(dlpath)
zf.extractall(path)
@staticmethod
def download_all(path=FIXTURES, verify=True, extract=True):
"""
Downloads all the example datasets. If verify is True then compare the
download signature with the hardcoded signature. If extract is True then
extract the contents of the zipfile to the given path.
"""
for name, meta in DATASETS.items():
url = meta['url']
signature = meta['signature'] if verify else None
DatasetMixin.download_data(
url, path=path, signature=signature, extract=extract
)
@staticmethod
def remove_all(fixtures=FIXTURES):
"""
Removes all the downloaded datasets as clean up
"""
shutil.rmtree(fixtures)
@staticmethod
def load_data(name, fixtures=FIXTURES):
"""
Loads the numpy matrix from the specified data set, downloads it if
it hasn't already been downloaded.
"""
# Just in case this is a corpus data set, then do that instead.
if DATASETS[name]['type'] == 'corpus':
|
path = os.path.join(fixtures, name, "{}.csv".format(name))
if not os.path.exists(path):
DatasetMixin.download_all(path=fixtures)
return np.genfromtxt(path, dtype=float, delimiter=',', names=True)
@staticmethod
def load_corpus(name, fixtures=FIXTURES):
"""
Loads a sklearn Bunch with the corpus and downloads it if it hasn't
already been downloaded. Used to test text visualizers.
"""
path = os.path.join(fixtures, name)
if not os.path.exists(path):
DatasetMixin.download_all(path=fixtures)
# Read the directories in the directory as the categories.
categories = [
cat for cat in os.listdir(path)
if os.path.isdir(os.path.join(path, cat))
]
files = [] # holds the file names relative to the root
data = [] # holds the text read from the file
target = [] # holds the string of the category
# Load the data from the files in the corpus
for cat in categories:
for name in os.listdir(os.path.join(path, cat)):
files.append(os.path.join(path, cat, name))
target.append(cat)
with open(os.path.join(path, cat, name), 'r') as f:
data.append(f.read())
# Return the data bunch for use similar to the newsgroups example
return Bunch(
categories=categories,
files=files,
data=data,
target=target,
)
| return DatasetMixin.load_corpus(name, fixtures) |
fscanf.py | from angr.procedures.stubs.format_parser import FormatParser
from cle.backends.externs.simdata.io_file import io_file_data_for_arch
class fscanf(FormatParser):
#pylint:disable=arguments-differ
def run(self, file_ptr):
# TODO handle errors
| fd_offset = io_file_data_for_arch(self.state.arch)['fd']
fd = self.state.mem[file_ptr + fd_offset:].int.resolved
simfd = self.state.posix.get_fd(fd)
if simfd is None:
return -1
fmt_str = self._parse(1)
items = fmt_str.interpret(2, self.arg, simfd=simfd)
return items |
|
activity.service.ts | import { Injectable } from '@angular/core';
import { HttpClient,HttpHeaders } from '@angular/common/http';
import { environment } from '../../../environments/environment';
@Injectable()
export class | {
APP_ID :string
MASTER_KEY :string
SERVER_URL : string
objectId:any
constructor(
private http:HttpClient
) {
this.APP_ID = environment.APP_ID;
this.MASTER_KEY = environment.MASTER_KEY;
this.SERVER_URL = environment.apiUrl+'/classes/activity'
}
saveData(frm : any){
if(frm.objectId==null){
// let arr={
// "meetingId":{
// "__type": "Pointer",
// "className": "meeting",
// "objectId": frm.meetingId
// },
// "order":frm.order,
// "type":frm.type,
// "section": frm.section,
// "presentationPlace":frm.presentationPlace,
// "indianStaff":frm.indianStaff,
// "startTime":frm.startTime,
// "endTime":frm.endTime,
// "duration":frm.duration
// }
//console.log(arr)
return this.http.post(this.SERVER_URL,frm,{
headers:new HttpHeaders({
'Content-Type':'application/json',
'X-Parse-Application-Id':this.APP_ID,
'X-Parse-REST-API-Key':this.MASTER_KEY,
})
})
}
else{
// let arr={
// "meetingId":{
// "__type": "Pointer",
// "className": "meeting",
// "objectId": frm.meetingId
// },
// "order":frm.order,
// "section": frm.section,
// "type":frm.type,
// "presentationPlace":frm.presentationPlace,
// "indianStaff":frm.indianStaff,
// "startTime":frm.startTime,
// "endTime":frm.endTime,
// "duration":frm.duration
// }
this.SERVER_URL = environment.apiUrl+'/classes/activity/'+frm.objectId
return this.http.put(this.SERVER_URL,frm,{
headers:new HttpHeaders({
'Content-Type':'application/json',
'X-Parse-Application-Id':this.APP_ID,
'X-Parse-REST-API-Key':this.MASTER_KEY,
})
})
}
}
displayActivity(){
return this.http.get(this.SERVER_URL,{
headers:new HttpHeaders({
'Content-Type':'application/json',
'X-Parse-Application-Id':this.APP_ID,
'X-Parse-REST-API-Key':this.MASTER_KEY,
})
})
}
deleteData(frm:any){
this.SERVER_URL = environment.apiUrl+'/classes/activity/'+frm.objectId
return this.http.delete(this.SERVER_URL,{
headers:new HttpHeaders({
'Content-Type':'application/json',
'X-Parse-Application-Id':this.APP_ID,
'X-Parse-REST-API-Key':this.MASTER_KEY,
})
})
}
}
| ActivityService |
index.tsx | import jsonViewStyles from "@/components/JsonView/index.less";
import { CaretDownOutlined, CaretRightOutlined } from "@ant-design/icons";
import { useEffect, useState } from "react";
import JsonValue from "@/components/JsonView/JsonValue";
import classNames from "classnames";
import JsonArray from "@/components/JsonView/JsonArray";
/**
* 对数据进行格式化展示
* @param data 数据
* @constructor
*/
type JsonDataProps = {
data: object;
} & _CommonProps;
const JsonData = ({ data, ...restProps }: JsonDataProps) => {
const [isShowJson, setIsShowJson] = useState<boolean>(false);
const { secondaryIndexKeys, onClickValue } = restProps;
const renderStack: string[] = [];
const indentStyle = {
paddingLeft: "20px",
};
useEffect(() => {
return () => {
setIsShowJson(false);
};
}, []);
/**
* 处理数据类型
* @param key
* @param val
*/
const handleValueTypes = (key: string, val: any) => {
const isIndex = () => {
if (!secondaryIndexKeys || secondaryIndexKeys?.length <= 0) return false;
return !!secondaryIndexKeys.find((item) => item.keyItem === key);
};
let indexKey = "";
if (isIndex()) {
const currentSecondaryIndex = secondaryIndexKeys?.find(
(item) => item.keyItem === key
);
indexKey = `${currentSecondaryIndex.parentKey}.${currentSecondaryIndex.keyItem}`;
}
return (
<>
<span
className={classNames(
jsonViewStyles.jsonViewKey,
isIndex() && jsonViewStyles.jsonIndexViewKey
)}
>
"{key}"
</span>
:
<JsonValue
jsonKey={key}
val={val}
{...restProps}
onClickValue={
isIndex()
? () => | isIndex: isIndex(),
})
: onClickValue
}
/>
</>
);
};
if (!data) return <div style={indentStyle} />;
if (data instanceof Array) return <JsonArray data={data} />;
let keys = Object.keys(data);
let kvList: JSX.Element[] = [];
keys.forEach((k, idx) => {
renderStack.push(k);
let v = Reflect.get(data, k);
let isLastEle = idx >= keys.length - 1;
let dom = handleValueTypes(k, v);
kvList.push(
<div key={idx}>
{dom}
{!isLastEle ? "," : ""}
</div>
);
renderStack.pop();
});
if (renderStack.length > 0) {
return <div style={indentStyle}>{kvList}</div>;
}
return (
<div className={classNames(jsonViewStyles.jsonView)}>
{kvList.length > 0 &&
(isShowJson ? (
<div className={classNames(jsonViewStyles.jsonViewIcon)}>
<CaretDownOutlined
onClick={() => setIsShowJson(() => !isShowJson)}
/>
</div>
) : (
<div className={classNames(jsonViewStyles.jsonViewIcon)}>
<CaretRightOutlined
onClick={() => setIsShowJson(() => !isShowJson)}
/>
</div>
))}
<span>{</span>
{isShowJson && kvList.length > 0 && (
<div style={indentStyle}>{kvList}</div>
)}
<span>}</span>
</div>
);
};
export default JsonData; | onClickValue?.(val, {
indexKey, |
eggWhites.js | // eggWhites.js
//
// Copyright (c) 2016 Stefan Wirth
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
(function() {
'use strict';
angular.module('eggWhites',[])
.constant('KHALED_IPSUM_API', 'https://eggwhites.herokuapp.com/khaledIpsum')
.directive('khaledIpsum', ['khaledService', function(khaledService) {
return {
restrict: 'E',
link: function(scope, element, attributes) {
var numberOfEggWhites = attributes.paragraphs || 5;
khaledService.paveTheRoadToSuccessWith(numberOfEggWhites)
.then(function(eggWhites) {
eggWhites.forEach(function(eggWhite) {
var p = angular.element('<p class="khaledIpsum"></p>');
p.html(eggWhite);
element.append(p);
});
});
}
};
}])
.factory('khaledService', ['$http', '$q', 'KHALED_IPSUM_API', function($http, $q, KHALED_IPSUM_API) {
var service = {
paveTheRoadToSuccessWith: paveTheRoadToSuccessWith
};
return service;
function | (numberOfEggWhites) {
return $http.get(KHALED_IPSUM_API, {
params: {
paragraphs: numberOfEggWhites
}
})
.then(function(khaledIpsum) {
var roadToSuccess = khaledIpsum.data.roadToSuccess;
var paragraphs = roadToSuccess.split('|');
return $q.when(paragraphs);
});
}
}]);
})(); | paveTheRoadToSuccessWith |
organization-detail-organization-detail-module.js | (window["webpackJsonp"] = window["webpackJsonp"] || []).push([["organization-detail-organization-detail-module"],{
/***/ "./src/app/layout/organization-detail/organization-detail-routing.module.ts":
/*!**********************************************************************************!*\
!*** ./src/app/layout/organization-detail/organization-detail-routing.module.ts ***!
\**********************************************************************************/
/*! exports provided: OrganizationDetailRoutingModule */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "OrganizationDetailRoutingModule", function() { return OrganizationDetailRoutingModule; });
/* harmony import */ var _angular_core__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! @angular/core */ "./node_modules/@angular/core/fesm5/core.js");
/* harmony import */ var _angular_router__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! @angular/router */ "./node_modules/@angular/router/fesm5/router.js");
/* harmony import */ var _organization_detail_component__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./organization-detail.component */ "./src/app/layout/organization-detail/organization-detail.component.ts");
var __decorate = (undefined && undefined.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var routes = [
{ path: '', component: _organization_detail_component__WEBPACK_IMPORTED_MODULE_2__["OrganizationDetailComponent"] },
{ path: 'organization-detail', component: _organization_detail_component__WEBPACK_IMPORTED_MODULE_2__["OrganizationDetailComponent"] }
];
var OrganizationDetailRoutingModule = /** @class */ (function () {
function OrganizationDetailRoutingModule() {
}
OrganizationDetailRoutingModule = __decorate([
Object(_angular_core__WEBPACK_IMPORTED_MODULE_0__["NgModule"])({
imports: [_angular_router__WEBPACK_IMPORTED_MODULE_1__["RouterModule"].forChild(routes)],
exports: [_angular_router__WEBPACK_IMPORTED_MODULE_1__["RouterModule"]]
})
], OrganizationDetailRoutingModule);
return OrganizationDetailRoutingModule;
}());
/***/ }),
/***/ "./src/app/layout/organization-detail/organization-detail.component.html":
/*!*******************************************************************************!*\
!*** ./src/app/layout/organization-detail/organization-detail.component.html ***!
\*******************************************************************************/
/*! no static exports found */
/***/ (function(module, exports) {
module.exports = "<h2>Client Detail</h2>\n<div *ngIf=\"organizationId > 0 \">\n <button onclick=\"window.history.back();\" class=\"btn btn-secondary btn-space float-right\">Back</button>\n <div class=\"float-left\">\n <!--<button class=\"btn btn-primary btn-space\" (click)=\"goToGroups()\" >Groups</button>-->\n <!--<button class=\"btn btn-primary btn-space\" (click)=\"goToCategories()\" >Categories</button>-->\n <!--<button class=\"btn btn-primary btn-space\" (click)=\"goToSubCategories()\" >Sub Categories</button>-->\n <!--<button class=\"btn btn-primary btn-space\" (click)=\"goToUnits()\" >Unit</button>-->\n <!--<button class=\"btn btn-primary btn-space\" (click)=\"goToItems()\" >Items</button>-->\n <button class=\"btn btn-primary btn-space\" (click)=\"goToAddresses()\" >Addresses</button>\n <!--<button class=\"btn btn-primary btn-space\" (click)=\"goToClients()\" >Clients</button>-->\n </div>\n</div>\n<div class=\"clearfix\"></div>\n<form [formGroup]=\"registerForm\" (ngSubmit)=\"onSubmit(registerForm.value)\">\n\n <h5> Basic detail</h5>\n <hr>\n <div class=\"form-group\">\n <label>Client Name</label>\n <input type=\"text\" formControlName=\"OrganizationName\" class=\"form-control\"\n [ngClass]=\"{ 'is-invalid': submitted && f.OrganizationName.errors }\"/>\n <div *ngIf=\"submitted && f.OrganizationName.errors\" class=\"invalid-feedback\">\n <div *ngIf=\"f.OrganizationName.errors.required\">Client name is required</div>\n </div>\n </div>\n <div class=\"form-group\">\n <label>First Name</label>\n <input type=\"text\" formControlName=\"FirstName\" class=\"form-control\"\n [ngClass]=\"{ 'is-invalid': submitted && f.FirstName.errors }\"/>\n <div *ngIf=\"submitted && f.FirstName.errors\" class=\"invalid-feedback\">\n <div *ngIf=\"f.FirstName.errors.required\">First Name is required</div>\n </div>\n </div>\n\n <div class=\"form-group\">\n <label>Last Name</label>\n <input type=\"text\" formControlName=\"LastName\" class=\"form-control\"\n [ngClass]=\"{ 'is-invalid': submitted && f.LastName.errors }\"/>\n <div *ngIf=\"submitted && f.LastName.errors\" class=\"invalid-feedback\">\n <div *ngIf=\"f.LastName.errors.required\">Last Name is required</div>\n </div>\n </div>\n <div class=\"form-group\" *ngIf=\"organizationId == 0\" >\n <label>User Name(email)</label>\n <input type=\"text\" formControlName=\"UserName\" class=\"form-control\"\n [ngClass]=\"{ 'is-invalid': submitted && f.UserName.errors }\" />\n\n <div *ngIf=\"registerForm.get('UserName').hasError('isEmailUnique')\" class=\"error\">\n <div>This email has been registered already</div>\n </div>\n\n <div *ngIf=\"submitted && f.UserName.errors\" class=\"invalid-feedback\">\n <div *ngIf=\"f.UserName.errors.required\">Username is required</div>\n <div *ngIf=\"f.UserName.errors?.email\">\n Email not valid.\n </div>\n </div>\n <input type=\"hidden\" formControlName=\"UserId\" class=\"form-control\" />\n </div>\n <div *ngIf=\"organizationId == 0 \">\n <div class=\"form-group\">\n <label>Password</label>\n <input type=\"password\" formControlName=\"Password\" class=\"form-control\"\n [ngClass]=\"{ 'is-invalid': submitted && f.Password.errors }\"/>\n <div *ngIf=\"submitted && f.Password.errors\" class=\"invalid-feedback\">\n <div *ngIf=\"f.Password.errors.required\">Password is required</div>\n <div *ngIf=\"f.Password.errors.minlength\">Password must be at least 6 characters</div>\n </div>\n </div>\n <div class=\"form-group\">\n <label> Confirm Password</label>\n <input type=\"password\" formControlName=\"ConfirmPassword\" class=\"form-control\"\n [ngClass]=\"{ 'is-invalid': submitted && f.ConfirmPassword.errors }\"/>\n <div *ngIf=\"submitted && f.ConfirmPassword.errors\" class=\"invalid-feedback\">\n <div *ngIf=\"f.ConfirmPassword.errors.required\">Confirm password is required</div>\n <div *ngIf=\"f.ConfirmPassword.errors.minlength\">Password must be at least 6 characters</div>\n </div>\n <span *ngIf=\"registerForm.get('ConfirmPassword').hasError('isConfirmPasswordMatch')\" class=\"error\">\n Confirm password doesn't match\n </span>\n\n </div>\n </div>\n <!--<h5> Role detail</h5>-->\n <!--<hr>-->\n <!--<div class=\"row\">-->\n <!--<div class=\"col-md-12\">-->\n <!--<div class=\"form-group\">-->\n <!--<label> User Role</label>-->\n <!--<select class=\"form-control\" formControlName='UserRoleId' class=\"form-control\"-->\n\n <!--[ngClass]=\"{ 'is-invalid': submitted && f.UserRoleId.errors }\">-->\n <!--<option value=\"\">Please select</option>-->\n <!--<option *ngFor=\"let role of rolesModel\" [ngValue]=\"role.LTagId\">{{role.TagDescr}}</option>-->\n <!--</select>-->\n <!--<div *ngIf=\"submitted && f.UserRoleId.errors\" class=\"invalid-feedback\">-->\n <!--<div *ngIf=\"f.UserRoleId.errors.required\">Role is required</div>-->\n <!--</div>-->\n <!--</div>-->\n <!--</div>-->\n <!--</div>-->\n <h5> Phone</h5>\n <hr>\n <div class=\"row\">\n <div class=\"col-md-6\">\n <div class=\"form-group\">\n <label> Phone 1 </label>\n <input type=\"text\" class=\"form-control\" formControlName='Phone' class=\"form-control\"\n [ngClass]=\"{ 'is-invalid': submitted && f.Phone.errors }\"/>\n <div *ngIf=\"submitted && f.Phone.errors\" class=\"invalid-feedback\">\n <div *ngIf=\"f.Phone.errors.required\">phone 1 is required</div>\n </div>\n </div>\n </div>\n <div class=\"col-md-6\">\n <div class=\"form-group\">\n <label> Phone 2</label>\n <input class=\"form-control\" formControlName='Phone2' class=\"form-control\"\n [ngClass]=\"{ 'is-invalid': submitted && f.Phone2.errors }\"/>\n <div *ngIf=\"submitted && f.Phone2.errors\" class=\"invalid-feedback\">\n <div *ngIf=\"f.Phone2.errors?.minlength\">Invalid Phone, Phone must be minimum 12 character long. example- 111-111-1111 </div>\n </div>\n </div>\n </div>\n </div>\n <div class=\"form-group\">\n <button [disabled]=\"loading\" class=\"btn btn-primary\">{{btnName}}</button>\n <img *ngIf=\"loading\"\n src=\"data:image/gif;base64,R0lGODlhEAAQAPIAAP///wAAAMLCwkJCQgAAAGJiYoKCgpKSkiH/C05FVFNDQVBFMi4wAwEAAAAh/hpDcmVhdGVkIHdpdGggYWpheGxvYWQuaW5mbwAh+QQJCgAAACwAAAAAEAAQAAADMwi63P4wyklrE2MIOggZnAdOmGYJRbExwroUmcG2LmDEwnHQLVsYOd2mBzkYDAdKa+dIAAAh+QQJCgAAACwAAAAAEAAQAAADNAi63P5OjCEgG4QMu7DmikRxQlFUYDEZIGBMRVsaqHwctXXf7WEYB4Ag1xjihkMZsiUkKhIAIfkECQoAAAAsAAAAABAAEAAAAzYIujIjK8pByJDMlFYvBoVjHA70GU7xSUJhmKtwHPAKzLO9HMaoKwJZ7Rf8AYPDDzKpZBqfvwQAIfkECQoAAAAsAAAAABAAEAAAAzMIumIlK8oyhpHsnFZfhYumCYUhDAQxRIdhHBGqRoKw0R8DYlJd8z0fMDgsGo/IpHI5TAAAIfkECQoAAAAsAAAAABAAEAAAAzIIunInK0rnZBTwGPNMgQwmdsNgXGJUlIWEuR5oWUIpz8pAEAMe6TwfwyYsGo/IpFKSAAAh+QQJCgAAACwAAAAAEAAQAAADMwi6IMKQORfjdOe82p4wGccc4CEuQradylesojEMBgsUc2G7sDX3lQGBMLAJibufbSlKAAAh+QQJCgAAACwAAAAAEAAQAAADMgi63P7wCRHZnFVdmgHu2nFwlWCI3WGc3TSWhUFGxTAUkGCbtgENBMJAEJsxgMLWzpEAACH5BAkKAAAALAAAAAAQABAAAAMyCLrc/jDKSatlQtScKdceCAjDII7HcQ4EMTCpyrCuUBjCYRgHVtqlAiB1YhiCnlsRkAAAOwAAAAAAAAAAAA==\"/>\n <a (click)=\"goToManageOrganization()\" class=\"btn btn-link\">Cancel</a>\n </div>\n\n</form>\n\n\n"
/***/ }),
/***/ "./src/app/layout/organization-detail/organization-detail.component.scss":
/*!*******************************************************************************!*\
!*** ./src/app/layout/organization-detail/organization-detail.component.scss ***!
\*******************************************************************************/
/*! no static exports found */
/***/ (function(module, exports) {
module.exports = "h5 {\n color: #00b3ee; }\n\n.error {\n color: red; }\n\n.btn-space {\n margin-right: 5px; }\n"
/***/ }),
/***/ "./src/app/layout/organization-detail/organization-detail.component.ts":
/*!*****************************************************************************!*\
!*** ./src/app/layout/organization-detail/organization-detail.component.ts ***!
\*****************************************************************************/
/*! exports provided: OrganizationDetailComponent */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "OrganizationDetailComponent", function() { return OrganizationDetailComponent; });
/* harmony import */ var _angular_core__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! @angular/core */ "./node_modules/@angular/core/fesm5/core.js");
/* harmony import */ var _angular_router__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! @angular/router */ "./node_modules/@angular/router/fesm5/router.js");
/* harmony import */ var _angular_forms__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! @angular/forms */ "./node_modules/@angular/forms/fesm5/forms.js");
/* harmony import */ var _services_alert_service__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../../services/alert.service */ "./src/app/services/alert.service.ts");
/* harmony import */ var _services_tag_service__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../../services/tag.service */ "./src/app/services/tag.service.ts");
/* harmony import */ var _services_organizations_service__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../../services/organizations.service */ "./src/app/services/organizations.service.ts");
var __decorate = (undefined && undefined.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (undefined && undefined.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
var OrganizationDetailComponent = /** @class */ (function () {
function OrganizationDetailComponent(formBuilder, router, alertService, httpTagsService, organizationService, route) {
this.formBuilder = formBuilder;
this.router = router;
this.alertService = alertService;
this.httpTagsService = httpTagsService;
this.organizationService = organizationService;
this.route = route;
this.btnName = 'Register';
this.loading = false;
this.submitted = false;
}
OrganizationDetailComponent.prototype.ngOnInit = function () {
this.userId = Number(localStorage.getItem('userId'));
this.affiliateId = Number(localStorage.getItem('affiliateId'));
this.organizationId = Number(localStorage.getItem('organizationId'));
this.buildForm();
if (this.organizationId > 0) {
this.btnName = 'Update';
this.getUserDetail(this.userId);
}
else {
}
this.getRoleList();
};
OrganizationDetailComponent.prototype.buildForm = function () {
if (this.organizationId > 0) {
this.registerForm = this.formBuilder.group({
UserId: [0],
FirstName: ['', [_angular_forms__WEBPACK_IMPORTED_MODULE_2__["Validators"].required, _angular_forms__WEBPACK_IMPORTED_MODULE_2__["Validators"].maxLength(30)]],
LastName: ['', [_angular_forms__WEBPACK_IMPORTED_MODULE_2__["Validators"].required, _angular_forms__WEBPACK_IMPORTED_MODULE_2__["Validators"].maxLength(80)]],
// UserName: ['', [Validators.required, Validators.email, Validators.maxLength(75)], this.isEmailUnique.bind(this)],
// UserRoleId: ['', Validators.required],
OrganizationName: ['', [_angular_forms__WEBPACK_IMPORTED_MODULE_2__["Validators"].required, _angular_forms__WEBPACK_IMPORTED_MODULE_2__["Validators"].maxLength(100)]],
Phone: ['', _angular_forms__WEBPACK_IMPORTED_MODULE_2__["Validators"].required],
Phone2: ['', _angular_forms__WEBPACK_IMPORTED_MODULE_2__["Validators"].minLength(12)],
});
}
else {
this.registerForm = this.formBuilder.group({
UserId: [0],
FirstName: ['', [_angular_forms__WEBPACK_IMPORTED_MODULE_2__["Validators"].required, _angular_forms__WEBPACK_IMPORTED_MODULE_2__["Validators"].maxLength(30)]],
LastName: ['', [_angular_forms__WEBPACK_IMPORTED_MODULE_2__["Validators"].required, _angular_forms__WEBPACK_IMPORTED_MODULE_2__["Validators"].maxLength(80)]],
UserName: ['', [_angular_forms__WEBPACK_IMPORTED_MODULE_2__["Validators"].required, _angular_forms__WEBPACK_IMPORTED_MODULE_2__["Validators"].email, _angular_forms__WEBPACK_IMPORTED_MODULE_2__["Validators"].maxLength(75)], this.isEmailUnique.bind(this)],
// UserRoleId: ['', Validators.required],
OrganizationName: ['', [_angular_forms__WEBPACK_IMPORTED_MODULE_2__["Validators"].required, _angular_forms__WEBPACK_IMPORTED_MODULE_2__["Validators"].maxLength(100)]],
Phone: ['', _angular_forms__WEBPACK_IMPORTED_MODULE_2__["Validators"].required],
Phone2: ['', _angular_forms__WEBPACK_IMPORTED_MODULE_2__["Validators"].minLength(12)],
ConfirmPassword: ['', [_angular_forms__WEBPACK_IMPORTED_MODULE_2__["Validators"].required, _angular_forms__WEBPACK_IMPORTED_MODULE_2__["Validators"].minLength(3)], this.pwdMatchValidator.bind(this)],
Password: ['', [_angular_forms__WEBPACK_IMPORTED_MODULE_2__["Validators"].required, _angular_forms__WEBPACK_IMPORTED_MODULE_2__["Validators"].minLength(3)]],
});
}
};
Object.defineProperty(OrganizationDetailComponent.prototype, "f", {
// convenience getter for easy access to form fields
get: function () {
return this.registerForm.controls;
},
enumerable: true,
configurable: true
});
OrganizationDetailComponent.prototype.onSubmit = function (organizationDetail) {
var _this = this;
this.submitted = true;
this.getFormValidationErrors(this.registerForm);
if (this.registerForm.invalid) {
return;
}
else {
this.loading = true;
organizationDetail.AffiliateId = this.affiliateId;
this.organizationService.create(organizationDetail)
.subscribe(function (data) {
_this.router.navigate(['./manage-organizations']);
if (_this.userId > 0)
_this.alertService.success(_this.userId > 0 ? 'Client updated successfully' : 'Client registered successfully', true);
_this.loading = false;
}, function (error) {
_this.alertService.error(error);
_this.loading = false;
});
}
};
OrganizationDetailComponent.prototype.getFormValidationErrors = function (fg) {
Object.keys(fg.controls).forEach(function (key) {
var controlErrors = fg.get(key).errors;
if (controlErrors != null) {
Object.keys(controlErrors).forEach(function (keyError) {
console.log('Key control: ' + key + ', keyError: ' + keyError + ', err value: ', controlErrors[keyError]);
// fg.controls[key].setErrors({'incorrect': true});
});
}
});
};
OrganizationDetailComponent.prototype.getUserDetail = function (userId) {
var _this = this;
this.organizationService.getOrganizationDetailByUserId(userId, true)
.subscribe(function (userData) {
_this.contactId = userData.ContactId;
_this.registerForm.patchValue({
UserId: userData.UserId,
OrganizationName: userData.OrganizationName,
FirstName: userData.FirstName,
LastName: userData.LastName,
UserName: userData.UserName,
UserRoleId: userData.UserRoleId,
Phone: userData.Phone,
Phone2: userData.Phone2,
});
});
};
OrganizationDetailComponent.prototype.getRoleList = function () {
var _this = this;
this.httpTagsService.getTagsByType('Customer Role', true) | // private getUserGroupList() {
// this.httpTagsService.getTagsByType('User Group', true)
// .subscribe(userGrpup => {
// this.userGroupsModel = userGrpup;
// });
//
// }
OrganizationDetailComponent.prototype.isEmailUnique = function (control) {
var _this = this;
var q = new Promise(function (resolve) {
if (_this.userId == 0) {
setTimeout(function () {
_this.organizationService.isEmailRegisterd(control.value).subscribe(function (data) {
if (data == null) {
resolve(null);
}
else {
resolve({ 'isEmailUnique': true });
}
}, function () {
resolve({ 'isEmailUnique': false });
});
}, 1000);
}
});
return q;
};
OrganizationDetailComponent.prototype.pwdMatchValidator = function (control) {
var _this = this;
var q = new Promise(function (resolve) {
_this.registerForm.get('Password').value === control.value
? resolve(null) : resolve({ 'isConfirmPasswordMatch': true });
});
return q;
};
OrganizationDetailComponent.prototype.goToAddresses = function () {
localStorage.setItem('contactId', this.contactId.toString());
this.router.navigate(['manage-addresses'], { queryParams: { id: this.contactId } });
};
OrganizationDetailComponent.prototype.goToGroups = function () {
localStorage.getItem('organizationId');
this.router.navigate(['manage-groups']);
};
OrganizationDetailComponent.prototype.goToCategories = function () {
localStorage.getItem('organizationId');
this.router.navigate(['manage-categories']);
};
OrganizationDetailComponent.prototype.goToSubCategories = function () {
localStorage.getItem('organizationId');
this.router.navigate(['manage-subcategories']);
};
OrganizationDetailComponent.prototype.goToUnits = function () {
localStorage.getItem('organizationId');
this.router.navigate(['manage-units']);
};
OrganizationDetailComponent.prototype.goToItems = function () {
localStorage.getItem('organizationId');
this.router.navigate(['manage-items']);
};
OrganizationDetailComponent.prototype.goToClients = function () {
localStorage.getItem('organizationId');
this.router.navigate(['manage-clients']);
};
OrganizationDetailComponent.prototype.goToManageOrganization = function () {
localStorage.removeItem('userId');
localStorage.removeItem('organizationId');
localStorage.removeItem('organizationName');
this.router.navigate(['manage-organizations']);
};
OrganizationDetailComponent = __decorate([
Object(_angular_core__WEBPACK_IMPORTED_MODULE_0__["Component"])({
selector: 'user-detail',
template: __webpack_require__(/*! ./organization-detail.component.html */ "./src/app/layout/organization-detail/organization-detail.component.html"),
styles: [__webpack_require__(/*! ./organization-detail.component.scss */ "./src/app/layout/organization-detail/organization-detail.component.scss")]
}),
__metadata("design:paramtypes", [_angular_forms__WEBPACK_IMPORTED_MODULE_2__["FormBuilder"],
_angular_router__WEBPACK_IMPORTED_MODULE_1__["Router"],
_services_alert_service__WEBPACK_IMPORTED_MODULE_3__["AlertService"],
_services_tag_service__WEBPACK_IMPORTED_MODULE_4__["TagsService"],
_services_organizations_service__WEBPACK_IMPORTED_MODULE_5__["OrganizationsService"],
_angular_router__WEBPACK_IMPORTED_MODULE_1__["ActivatedRoute"]])
], OrganizationDetailComponent);
return OrganizationDetailComponent;
}());
/***/ }),
/***/ "./src/app/layout/organization-detail/organization-detail.module.ts":
/*!**************************************************************************!*\
!*** ./src/app/layout/organization-detail/organization-detail.module.ts ***!
\**************************************************************************/
/*! exports provided: OrganizationDetailModule */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "OrganizationDetailModule", function() { return OrganizationDetailModule; });
/* harmony import */ var _angular_core__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! @angular/core */ "./node_modules/@angular/core/fesm5/core.js");
/* harmony import */ var _angular_common__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! @angular/common */ "./node_modules/@angular/common/fesm5/common.js");
/* harmony import */ var _ng_bootstrap_ng_bootstrap__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! @ng-bootstrap/ng-bootstrap */ "./node_modules/@ng-bootstrap/ng-bootstrap/index.js");
/* harmony import */ var _angular_forms__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! @angular/forms */ "./node_modules/@angular/forms/fesm5/forms.js");
/* harmony import */ var angular_datatables__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! angular-datatables */ "./node_modules/angular-datatables/index.js");
/* harmony import */ var _shared__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../../shared */ "./src/app/shared/index.ts");
/* harmony import */ var _organization_detail_component__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./organization-detail.component */ "./src/app/layout/organization-detail/organization-detail.component.ts");
/* harmony import */ var _organization_detail_routing_module__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./organization-detail-routing.module */ "./src/app/layout/organization-detail/organization-detail-routing.module.ts");
var __decorate = (undefined && undefined.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var OrganizationDetailModule = /** @class */ (function () {
function OrganizationDetailModule() {
}
OrganizationDetailModule = __decorate([
Object(_angular_core__WEBPACK_IMPORTED_MODULE_0__["NgModule"])({
imports: [
_angular_common__WEBPACK_IMPORTED_MODULE_1__["CommonModule"],
_ng_bootstrap_ng_bootstrap__WEBPACK_IMPORTED_MODULE_2__["NgbCarouselModule"].forRoot(),
_ng_bootstrap_ng_bootstrap__WEBPACK_IMPORTED_MODULE_2__["NgbAlertModule"].forRoot(),
_organization_detail_routing_module__WEBPACK_IMPORTED_MODULE_7__["OrganizationDetailRoutingModule"],
_shared__WEBPACK_IMPORTED_MODULE_5__["StatModule"],
angular_datatables__WEBPACK_IMPORTED_MODULE_4__["DataTablesModule"],
_angular_forms__WEBPACK_IMPORTED_MODULE_3__["ReactiveFormsModule"]
],
declarations: [_organization_detail_component__WEBPACK_IMPORTED_MODULE_6__["OrganizationDetailComponent"]
]
})
], OrganizationDetailModule);
return OrganizationDetailModule;
}());
/***/ })
}]);
//# sourceMappingURL=organization-detail-organization-detail-module.js.map | .subscribe(function (roles) {
_this.rolesModel = roles;
});
}; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.