max_stars_repo_path
stringlengths 4
245
| max_stars_repo_name
stringlengths 7
115
| max_stars_count
int64 101
368k
| id
stringlengths 2
8
| content
stringlengths 6
1.03M
|
---|---|---|---|---|
modules/utorrent.py
|
scambra/HTPC-Manager
| 422 |
136713
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'quentingerome'
import requests
from cherrypy.lib.auth2 import require, member_of
import logging
import htpc
import cherrypy
from HTMLParser import HTMLParser
from htpc.helpers import striphttp
logger = logging.getLogger('modules.utorrent')
class AuthTokenParser(HTMLParser):
token = None
def handle_data(self, data):
self._token = data
def token(self, html):
self._token = None
self.feed(html)
return self._token
fields = {
'name': 2,
'id': 0,
'status': 1,
'size': 3,
'percentage_done': 4,
'dl': 5,
'up': 6,
'dl_speed': 9,
'up_speed': 8,
'eta': 10,
'ratio': 7,
}
status = {
1: 'Started',
2: 'Checking',
4: 'Started&Checked',
8: 'Checked',
16: 'Error',
32: 'Paused',
64: 'Queued',
128: 'Loaded'
}
def _get_torrent_state(state_sum):
"""
Returns a list of all states of the torrent
:param value: int
:return: str
"""
states = []
for ps in sorted(status.keys(), reverse=True):
if not state_sum:
break
if ps <= state_sum:
states.append(ps)
state_sum -= ps
return states
def TorrentResult(values):
"""
:param values:
:type values: list
:return:
:rtype: dict
"""
def get_result(vals):
for key, idx in fields.items():
if key != 'status':
yield key, vals[idx]
else:
yield key, _get_torrent_state(vals[idx])
return dict([(k, v) for k, v in get_result(values)])
class ConnectionError(Exception):
pass
class UTorrent(object):
_token = ''
_cookies = None
def __init__(self):
self.sess = requests.Session()
htpc.MODULES.append({
'name': 'uTorrent',
'id': 'utorrent',
'test': htpc.WEBDIR + 'utorrent/ping',
'fields': [
{'type': 'bool', 'label': 'Enable', 'name': 'utorrent_enable'},
{'type': 'text', 'label': 'Menu name', 'name': 'utorrent_name'},
{'type': 'text', 'label': 'IP / Host *', 'name': 'utorrent_host'},
{'type': 'text', 'label': 'Port', 'placeholder': '8080', 'name': 'utorrent_port'},
{'type': 'text', 'label': 'Username', 'name': 'utorrent_username'},
{'type': 'password', 'label': 'Password', 'name': 'utorrent_password'},
{'type': 'text', 'label': 'Reverse proxy link', 'placeholder': '', 'desc': 'Reverse proxy link, e.g. https://utorrent.domain.com', 'name': 'utorrent_reverse_proxy_link'},
]
})
@cherrypy.expose()
@require()
def index(self):
return htpc.LOOKUP.get_template('utorrent.html').render(scriptname='utorrent', webinterface=self.webinterface())
def webinterface(self):
ip = htpc.settings.get('utorrent_host')
port = htpc.settings.get('utorrent_port')
url = 'http://%s:%s/gui/' % (ip, port)
if htpc.settings.get('utorrent_reverse_proxy_link'):
url = htpc.settings.get('utorrent_reverse_proxy_link')
return url
@cherrypy.tools.json_out()
@cherrypy.expose()
@require()
def torrents(self):
req = self.fetch('&list=1')
if req:
torrents = req.json()['torrents']
return {'torrents': [TorrentResult(tor) for tor in torrents], 'result': req.status_code}
else:
return {'result': 500}
@cherrypy.expose()
@require(member_of(htpc.role_user))
@cherrypy.tools.json_out()
def start(self, torrent_id):
return self.do_action('start', hash=torrent_id).json()
@cherrypy.expose()
@require(member_of(htpc.role_user))
@cherrypy.tools.json_out()
def stop(self, torrent_id):
return self.do_action('stop', hash=torrent_id).json()
@cherrypy.expose()
@require(member_of(htpc.role_user))
@cherrypy.tools.json_out()
def remove(self, torrent_id):
return self.do_action('remove', hash=torrent_id).json()
@cherrypy.expose()
@require(member_of(htpc.role_user))
@cherrypy.tools.json_out()
def remove_data(self, torrent_id):
return self.do_action('removedata', hash=torrent_id).json()
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def add_url(self, url):
try:
res = self.do_action('add-url', s=url)
return {'result': res.status_code}
except ConnectionError, e:
logger.exception(e)
@cherrypy.tools.json_out()
@require()
@cherrypy.expose()
def get_speed_limit(self):
r = self.do_action('getsettings')
d = {}
if r:
for k in r.json()['settings']:
if 'max_dl_rate' in k:
d['dl'] = k[2]
elif 'max_ul_rate' in k:
d['ul'] = k[2]
return d
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def to_client(self, link, torrentname, **kwargs):
try:
logger.info('Added %s to uTorrent' % torrentname)
res = self.do_action('add-url', s=link)
return {'result': res.status_code}
except Exception as e:
logger.error('Failed to sendt %s to uTorrent %s %s' % (link, torrentname, e))
def change_label(self, hash, label):
return self.do_action('setprops', hash=hash, s='label', v=label)
@cherrypy.expose()
@require(member_of(htpc.role_admin))
@cherrypy.tools.json_out()
def ping(self, utorrent_host='', utorrent_port='',
utorrent_username='', utorrent_password='', **kwargs):
logger.debug("Testing uTorrent connectivity")
res = self.fetch('&list=1', host=utorrent_host, port=utorrent_port, username=utorrent_username, password=<PASSWORD>)
if res.status_code == 200:
return True
else:
logger.error("Unable to contact uTorrent via " + self._get_url(utorrent_host, utorrent_port))
return
def do_action(self, action, hash=None, s=None, **kwargs):
"""
:param action:
:param hash:
:param kwargs:
:rtype: requests.Response
:return:
"""
if action not in ('start', 'stop', 'pause', 'forcestart', 'unpause', 'remove', 'removedata', 'add-url', 'recheck', 'setprio',
'queuebottom', 'queuetop', 'queuedown', 'queueup', 'getfiles', 'getsettings', 'setsetting'):
raise AttributeError
if action == 'add-url':
return self.fetch('&action=%s&s=%s' % (action, s))
params_str = ''.join(["&%s=%s" % (k, v) for k, v in kwargs.items()])
if hash is None:
# getsettings
return self.fetch('&action=%s%s' % (action, params_str))
return self.fetch('&action=%s%s&hash=%s' % (action, params_str, hash))
@cherrypy.expose()
@require(member_of(htpc.role_user))
@cherrypy.tools.json_out()
def change_speed(self, **kw):
if 'max_ul_rate' or 'max_dl_rate' in kw:
self.do_action('setsetting', kw)
else:
logger.error('Wrong parameters given')
@cherrypy.expose()
@require(member_of(htpc.role_user))
def set_upspeed(self, speed, *arg, **kw):
return self.fetch('&action=setsetting&s=max_ul_rate&v=' + speed)
@cherrypy.expose()
@require(member_of(htpc.role_user))
def set_downspeed(self, speed, *arg, **kw):
return self.fetch('&action=setsetting&s=max_dl_rate&v=' + speed)
def _get_url(self, host=None, port=None):
u_host = host or htpc.settings.get('utorrent_host')
u_port = port or htpc.settings.get('utorrent_port')
return 'http://{}:{}/gui/'.format(striphttp(u_host), u_port)
def auth(self, host, port, username, pwd):
logger.debug('Fetching auth token')
token_page = self.sess.get(self._get_url(host, port) + 'token.html', auth=(username, pwd))
self._token = AuthTokenParser().token(token_page.content)
self._cookies = token_page.cookies
logger.debug('Auth token is %s' % self._token)
return self._token
def fetch(self, args, username='', password='', host='', port=''):
"""
:param args:
:rtype: requests.Response
:return:
"""
password = password or htpc.settings.get('utorrent_password', '')
username = username or htpc.settings.get('utorrent_username', '')
host = host or htpc.settings.get('utorrent_host')
port = port or htpc.settings.get('utorrent_port')
token_str = '?token=%s' % self._token
url = self._get_url(host, port) + token_str + args
logger.debug('Fetching %s' % url)
try:
r = self.sess.get(url, timeout=5, auth=(username, password))
# Api returns 300 if invalid token according to the docs but it really returns 400
# ut 3.4.5 returns 401 when you try to get the token
if r.status_code in [401, 400, 300]:
token = self.auth(host, port, username, password)
if token:
return self.fetch(args)
elif r.status_code == 404:
logger.error('Check your settings, invalid username or password')
elif r.status_code == 200:
if r:
return r
except Exception as e:
logger.error('Failed to fetch %s with args %s %s' % (url, args, e), exc_info=True)
|
SimG4Core/PrintGeomInfo/python/g4PrintGeomSummary_cfi.py
|
ckamtsikis/cmssw
| 852 |
136752
|
<filename>SimG4Core/PrintGeomInfo/python/g4PrintGeomSummary_cfi.py
import FWCore.ParameterSet.Config as cms
def printGeomSummary(process):
process.load("SimGeneral.HepPDTESSource.pdt_cfi")
process.load("Geometry.TrackerNumberingBuilder.trackerNumberingGeometry_cfi")
process.load("Geometry.MuonNumbering.muonNumberingInitialization_cfi")
process.load("Geometry.EcalCommonData.ecalSimulationParameters_cff")
process.load("Geometry.HcalCommonData.hcalDDDSimConstants_cff")
process.load("IOMC.RandomEngine.IOMC_cff")
process.load('IOMC.EventVertexGenerators.VtxSmearedFlat_cfi')
process.load('GeneratorInterface.Core.generatorSmeared_cfi')
process.load('FWCore.MessageService.MessageLogger_cfi')
process.load("SimG4Core.Application.g4SimHits_cfi")
process.source = cms.Source("EmptySource")
process.generator = cms.EDProducer("FlatRandomEGunProducer",
PGunParameters = cms.PSet(
PartID = cms.vint32(14),
MinEta = cms.double(-3.5),
MaxEta = cms.double(3.5),
MinPhi = cms.double(-3.14159265359),
MaxPhi = cms.double(3.14159265359),
MinE = cms.double(9.99),
MaxE = cms.double(10.01)
),
AddAntiParticle = cms.bool(False),
Verbosity = cms.untracked.int32(0),
firstRun = cms.untracked.uint32(1)
)
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(1)
)
process.p1 = cms.Path(process.generator*process.VtxSmeared*process.generatorSmeared*process.g4SimHits)
process.g4SimHits.UseMagneticField = False
process.g4SimHits.Physics.type = 'SimG4Core/Physics/DummyPhysics'
process.g4SimHits.Physics.DummyEMPhysics = True
process.g4SimHits.Physics.DefaultCutValue = 10.
process.g4SimHits.Watchers = cms.VPSet(cms.PSet(
NodeNames = cms.untracked.vstring(),
type = cms.string('PrintGeomSummary')
))
return(process)
|
mirage/libs/common/sdr/hackrf_definitions.py
|
tlechien/mirage
| 123 |
136767
|
<gh_stars>100-1000
from ctypes import *
import subprocess
from os.path import isfile
'''
This file contains the definitions used by HackRF one, e.g. library path or constants.
'''
HACKRFLIB_AVAILABLE = False
# Autofind the path of libhackrf.so
possiblePaths = [
"/usr/local/lib/libhackrf.so",
"/usr/lib/x86_64-linux-gnu/libhackrf.so",
"/usr/lib64/libhackrf.so"
]
pathToLib = None
for path in possiblePaths:
if isfile(path):
pathToLib = path
break
# The following line could be used to autofind the path, but it's probably too invasive and slow at startup
#pathToLib = subprocess.check_output("find / -name libhackrf.so 2> /dev/null | head -n1" , shell=True).decode('ascii').replace("\n","")
if pathToLib is not None:
libhackrf = CDLL(pathToLib)
HACKRFLIB_AVAILABLE = True
# Enum definitions
def enum(*sequential, **named):
enums = dict(zip(sequential, range(len(sequential))), **named)
return type('Enum', (), enums)
HackRfVendorRequest = enum(
HACKRF_VENDOR_REQUEST_SET_TRANSCEIVER_MODE = 1,
HACKRF_VENDOR_REQUEST_MAX2837_WRITE = 2,
HACKRF_VENDOR_REQUEST_MAX2837_READ = 3,
HACKRF_VENDOR_REQUEST_SI5351C_WRITE = 4,
HACKRF_VENDOR_REQUEST_SI5351C_READ = 5,
HACKRF_VENDOR_REQUEST_SAMPLE_RATE_SET = 6,
HACKRF_VENDOR_REQUEST_BASEBAND_FILTER_BANDWIDTH_SET = 7,
HACKRF_VENDOR_REQUEST_RFFC5071_WRITE = 8,
HACKRF_VENDOR_REQUEST_RFFC5071_READ = 9,
HACKRF_VENDOR_REQUEST_SPIFLASH_ERASE = 10,
HACKRF_VENDOR_REQUEST_SPIFLASH_WRITE = 11,
HACKRF_VENDOR_REQUEST_SPIFLASH_READ = 12,
HACKRF_VENDOR_REQUEST_CPLD_WRITE = 13,
HACKRF_VENDOR_REQUEST_BOARD_ID_READ = 14,
HACKRF_VENDOR_REQUEST_VERSION_STRING_READ = 15,
HACKRF_VENDOR_REQUEST_SET_FREQ = 16,
HACKRF_VENDOR_REQUEST_AMP_ENABLE = 17,
HACKRF_VENDOR_REQUEST_BOARD_PARTID_SERIALNO_READ = 18,
HACKRF_VENDOR_REQUEST_SET_LNA_GAIN = 19,
HACKRF_VENDOR_REQUEST_SET_VGA_GAIN = 20,
HACKRF_VENDOR_REQUEST_SET_TXVGA_GAIN = 21
)
HackRfConstants = enum(
LIBUSB_ENDPOINT_IN = 0x80,
LIBUSB_ENDPOINT_OUT = 0x00,
HACKRF_DEVICE_OUT = 0x40,
HACKRF_DEVICE_IN = 0xC0,
HACKRF_USB_VID = 0x1d50,
HACKRF_USB_PID = 0x6089
)
HackRfError = enum(
HACKRF_SUCCESS = 0,
HACKRF_TRUE = 1,
HACKRF_ERROR_INVALID_PARAM = -2,
HACKRF_ERROR_NOT_FOUND = -5,
HACKRF_ERROR_BUSY = -6,
HACKRF_ERROR_NO_MEM = -11,
HACKRF_ERROR_LIBUSB = -1000,
HACKRF_ERROR_THREAD = -1001,
HACKRF_ERROR_STREAMING_THREAD_ERR = -1002,
HACKRF_ERROR_STREAMING_STOPPED = -1003,
HACKRF_ERROR_STREAMING_EXIT_CALLED = -1004,
HACKRF_ERROR_USB_API_VERSION = -1005,
HACKRF_ERROR_NOT_LAST_DEVICE = -2000,
HACKRF_ERROR_OTHER = -9999,
# Python defaults to returning none
HACKRF_ERROR = None
)
HackRfTranscieverMode = enum(
HACKRF_TRANSCEIVER_MODE_OFF = 0,
HACKRF_TRANSCEIVER_MODE_RECEIVE = 1,
HACKRF_TRANSCEIVER_MODE_TRANSMIT = 2
)
# Data structures
_libusb_device_handle = c_void_p
_pthread_t = c_ulong
class hackrf_device(Structure):
pass
class hackrf_device_list(Structure):
_fields_ = [("serial_number", POINTER(POINTER(c_char))),
("usb_board_ids",c_int),
("usb_device_index",POINTER(c_int)),
("devicecount",c_int),
("usb_devices",POINTER(c_void_p)),
("usb_device_count",c_int)]
class hackrf_transfer(Structure):
_fields_ = [("hackrf_device", POINTER(hackrf_device)),
("buffer", POINTER(c_byte)),
("buffer_length", c_int),
("valid_length", c_int),
("rx_ctx", c_void_p),
("tx_ctx", c_void_p) ]
hackrflibcallback = CFUNCTYPE(c_int, POINTER(hackrf_transfer))
hackrf_device._fields_ = [("usb_device", POINTER(_libusb_device_handle)),
("transfers", POINTER(POINTER(hackrf_transfer))),
("callback", hackrflibcallback),
("transfer_thread_started", c_int),
("transfer_thread", _pthread_t),
("transfer_count", c_uint32),
("buffer_size", c_uint32),
("streaming", c_int),
("rx_ctx", c_void_p),
("tx_ctx", c_void_p) ]
# extern ADDAPI hackrf_device_list_t* ADDCALL hackrf_device_list();
libhackrf.hackrf_device_list.restype = POINTER(hackrf_device_list)
libhackrf.hackrf_device_list.argtypes = []
# extern ADDAPI int ADDCALL hackrf_init();
libhackrf.hackrf_init.restype = c_int
libhackrf.hackrf_init.argtypes = []
# extern ADDAPI int ADDCALL hackrf_exit();
libhackrf.hackrf_exit.restype = c_int
libhackrf.hackrf_exit.argtypes = []
# extern ADDAPI int ADDCALL hackrf_open(hackrf_device** device);
libhackrf.hackrf_open.restype = c_int
libhackrf.hackrf_open.argtypes = [POINTER(POINTER(hackrf_device))]
# extern ADDAPI int ADDCALL hackrf_open_by_serial(const char* const desired_serial_number, hackrf_device** device);
libhackrf.hackrf_open_by_serial.restype = c_int
libhackrf.hackrf_open_by_serial.argtypes = [POINTER(c_char),POINTER(POINTER(hackrf_device))]
# extern ADDAPI int ADDCALL hackrf_close(hackrf_device* device);
libhackrf.hackrf_close.restype = c_int
libhackrf.hackrf_close.argtypes = [POINTER(hackrf_device)]
# extern ADDAPI int ADDCALL hackrf_start_rx(hackrf_device* device,
# hackrf_sample_block_cb_fn callback, void* rx_ctx);
libhackrf.hackrf_start_rx.restype = c_int
libhackrf.hackrf_start_rx.argtypes = [POINTER(hackrf_device), hackrflibcallback, c_void_p]
# extern ADDAPI int ADDCALL hackrf_stop_rx(hackrf_device* device);
libhackrf.hackrf_stop_rx.restype = c_int
libhackrf.hackrf_stop_rx.argtypes = [POINTER(hackrf_device)]
# extern ADDAPI int ADDCALL hackrf_start_tx(hackrf_device* device,
# hackrf_sample_block_cb_fn callback, void* tx_ctx);
libhackrf.hackrf_start_tx.restype = c_int
libhackrf.hackrf_start_tx.argtypes = [POINTER(hackrf_device), hackrflibcallback, c_void_p]
# extern ADDAPI int ADDCALL hackrf_stop_tx(hackrf_device* device);
libhackrf.hackrf_stop_tx.restype = c_int
libhackrf.hackrf_stop_tx.argtypes = [POINTER(hackrf_device)]
# extern ADDAPI int ADDCALL hackrf_is_streaming(hackrf_device* device);
libhackrf.hackrf_is_streaming.restype = c_int
libhackrf.hackrf_is_streaming.argtypes = [POINTER(hackrf_device)]
# extern ADDAPI int ADDCALL hackrf_max2837_read(hackrf_device* device,
# uint8_t register_number, uint16_t* value);
libhackrf.hackrf_max2837_read.restype = c_int
libhackrf.hackrf_max2837_read.argtypes = [
POINTER(hackrf_device), c_uint8, POINTER(c_uint16)]
# extern ADDAPI int ADDCALL hackrf_max2837_write(hackrf_device* device,
# uint8_t register_number, uint16_t value);
libhackrf.hackrf_max2837_write.restype = c_int
libhackrf.hackrf_max2837_write.argtypes = [POINTER(hackrf_device), c_uint8, c_uint16]
# extern ADDAPI int ADDCALL hackrf_si5351c_read(hackrf_device* device,
# uint16_t register_number, uint16_t* value);
libhackrf.hackrf_si5351c_read.restype = c_int
libhackrf.hackrf_si5351c_read.argtypes = [
POINTER(hackrf_device), c_uint16, POINTER(c_uint16)]
# extern ADDAPI int ADDCALL hackrf_si5351c_write(hackrf_device* device,
# uint16_t register_number, uint16_t value);
libhackrf.hackrf_si5351c_write.restype = c_int
libhackrf.hackrf_si5351c_write.argtypes = [POINTER(hackrf_device), c_uint16, c_uint16]
# extern ADDAPI int ADDCALL
# hackrf_set_baseband_filter_bandwidth(hackrf_device* device, const
# uint32_t bandwidth_hz);
libhackrf.hackrf_set_baseband_filter_bandwidth.restype = c_int
libhackrf.hackrf_set_baseband_filter_bandwidth.argtypes = [
POINTER(hackrf_device), c_uint32]
# extern ADDAPI int ADDCALL hackrf_rffc5071_read(hackrf_device* device,
# uint8_t register_number, uint16_t* value);
libhackrf.hackrf_rffc5071_read.restype = c_int
libhackrf.hackrf_rffc5071_read.argtypes = [
POINTER(hackrf_device), c_uint8, POINTER(c_uint16)]
# extern ADDAPI int ADDCALL hackrf_rffc5071_write(hackrf_device*
# device, uint8_t register_number, uint16_t value);
libhackrf.hackrf_rffc5071_write.restype = c_int
libhackrf.hackrf_rffc5071_write.argtypes = [POINTER(hackrf_device), c_uint8, c_uint16]
# extern ADDAPI int ADDCALL hackrf_spiflash_erase(hackrf_device*
# device);
libhackrf.hackrf_spiflash_erase.restype = c_int
libhackrf.hackrf_spiflash_erase.argtypes = [POINTER(hackrf_device)]
# extern ADDAPI int ADDCALL hackrf_spiflash_write(hackrf_device*
# device, const uint32_t address, const uint16_t length, unsigned char*
# const data);
libhackrf.hackrf_spiflash_write.restype = c_int
libhackrf.hackrf_spiflash_write.argtypes = [
POINTER(hackrf_device), c_uint32, c_uint16, POINTER(c_ubyte)]
# extern ADDAPI int ADDCALL hackrf_spiflash_read(hackrf_device* device,
# const uint32_t address, const uint16_t length, unsigned char* data);
libhackrf.hackrf_spiflash_read.restype = c_int
libhackrf.hackrf_spiflash_read.argtypes = [
POINTER(hackrf_device), c_uint32, c_uint16, POINTER(c_ubyte)]
# extern ADDAPI int ADDCALL hackrf_cpld_write(hackrf_device* device,
# unsigned char* const data, const unsigned int total_length);
libhackrf.hackrf_cpld_write.restype = c_int
libhackrf.hackrf_cpld_write.argtypes = [POINTER(hackrf_device), POINTER(c_ubyte), c_uint]
# extern ADDAPI int ADDCALL hackrf_board_id_read(hackrf_device* device,
# uint8_t* value);
libhackrf.hackrf_board_id_read.restype = c_int
libhackrf.hackrf_board_id_read.argtypes = [POINTER(hackrf_device), POINTER(c_uint8)]
# extern ADDAPI int ADDCALL hackrf_version_string_read(hackrf_device*
# device, char* version, uint8_t length);
libhackrf.hackrf_version_string_read.restype = c_int
libhackrf.hackrf_version_string_read.argtypes = [POINTER(hackrf_device), POINTER(c_char), c_uint8]
# extern ADDAPI int ADDCALL hackrf_set_freq(hackrf_device* device,
# const uint64_t freq_hz);
libhackrf.hackrf_set_freq.restype = c_int
libhackrf.hackrf_set_freq.argtypes = [POINTER(hackrf_device), c_uint64]
# extern ADDAPI int ADDCALL hackrf_set_freq_explicit(hackrf_device* device,
# const uint64_t if_freq_hz, const uint64_t lo_freq_hz,
# const enum rf_path_filter path);,
# libhackrf.hackrf_set_freq_explicit.restype = c_int
# libhackrf.hackrf_set_freq_explicit.argtypes = [c_uint64,
# c_uint64, ]
# extern ADDAPI int ADDCALL
# hackrf_set_sample_rate_manual(hackrf_device* device, const uint32_t
# freq_hz, const uint32_t divider);
libhackrf.hackrf_set_sample_rate_manual.restype = c_int
libhackrf.hackrf_set_sample_rate_manual.argtypes = [
POINTER(hackrf_device), c_uint32, c_uint32]
# extern ADDAPI int ADDCALL hackrf_set_sample_rate(hackrf_device*
# device, const double freq_hz);
libhackrf.hackrf_set_sample_rate.restype = c_int
libhackrf.hackrf_set_sample_rate.argtypes = [POINTER(hackrf_device), c_double]
# extern ADDAPI int ADDCALL hackrf_set_amp_enable(hackrf_device*
# device, const uint8_t value);
libhackrf.hackrf_set_amp_enable.restype = c_int
libhackrf.hackrf_set_amp_enable.argtypes = [POINTER(hackrf_device), c_uint8]
# extern ADDAPI int ADDCALL
# hackrf_board_partid_serialno_read(hackrf_device* device,
# read_partid_serialno_t* read_partid_serialno);
libhackrf.hackrf_board_partid_serialno_read.restype = c_int
libhackrf.hackrf_board_partid_serialno_read.argtypes = [POINTER(hackrf_device)]
# extern ADDAPI int ADDCALL hackrf_set_lna_gain(hackrf_device* device,
# uint32_t value);
libhackrf.hackrf_set_lna_gain.restype = c_int
libhackrf.hackrf_set_lna_gain.argtypes = [POINTER(hackrf_device), c_uint32]
# extern ADDAPI int ADDCALL hackrf_set_vga_gain(hackrf_device* device,
# uint32_t value);
libhackrf.hackrf_set_vga_gain.restype = c_int
libhackrf.hackrf_set_vga_gain.argtypes = [POINTER(hackrf_device), c_uint32]
# extern ADDAPI int ADDCALL hackrf_set_txvga_gain(hackrf_device*
# device, uint32_t value);
libhackrf.hackrf_set_txvga_gain.restype = c_int
libhackrf.hackrf_set_txvga_gain.argtypes = [POINTER(hackrf_device), c_uint32]
# extern ADDAPI int ADDCALL hackrf_set_antenna_enable(hackrf_device*
# device, const uint8_t value);
libhackrf.hackrf_set_antenna_enable.restype = c_int
libhackrf.hackrf_set_antenna_enable.argtypes = [POINTER(hackrf_device), c_uint8]
# extern ADDAPI const char* ADDCALL hackrf_error_name(enum hackrf_error errcode);
# libhackrf.hackrf_error_name.restype = POINTER(c_char)
# libhackrf.hackrf_error_name.argtypes = []
# extern ADDAPI const char* ADDCALL hackrf_board_id_name(enum hackrf_board_id board_id);
libhackrf.hackrf_board_id_name.restype = c_char_p
libhackrf.hackrf_board_id_name.argtypes = [c_uint8]
# extern ADDAPI const char* ADDCALL hackrf_filter_path_name(const enum rf_path_filter path);
# libhackrf.hackrf_filter_path_name.restype = POINTER(c_char)
# libhackrf.hackrf_filter_path_name.argtypes = []
# extern ADDAPI uint32_t ADDCALL
# hackrf_compute_baseband_filter_bw_round_down_lt(const uint32_t
# bandwidth_hz);
libhackrf.hackrf_compute_baseband_filter_bw_round_down_lt.restype = c_uint32
libhackrf.hackrf_compute_baseband_filter_bw_round_down_lt.argtypes = [c_uint32]
# extern ADDAPI int ADDCALL hackrf_usb_api_version_read(hackrf_device* device, uint16_t* version);
libhackrf.hackrf_usb_api_version_read.restype = c_int
libhackrf.hackrf_usb_api_version_read.argtypes = [POINTER(hackrf_device),POINTER(c_uint16)]
# extern ADDAPI uint32_t ADDCALL
# hackrf_compute_baseband_filter_bw(const uint32_t bandwidth_hz);
libhackrf.hackrf_compute_baseband_filter_bw.restype = c_uint32
libhackrf.hackrf_compute_baseband_filter_bw.argtypes = [c_uint32]
|
pyfr/solvers/baseadvecdiff/elements.py
|
carlospereiraf/PyFR
| 126 |
136770
|
# -*- coding: utf-8 -*-
from pyfr.backends.base.kernels import ComputeMetaKernel
from pyfr.polys import get_polybasis
from pyfr.solvers.baseadvec import BaseAdvectionElements
class BaseAdvectionDiffusionElements(BaseAdvectionElements):
@property
def _scratch_bufs(self):
bufs = {'scal_fpts', 'vect_fpts', 'vect_upts'}
if 'flux' in self.antialias:
bufs |= {'scal_qpts', 'vect_qpts'}
if self._soln_in_src_exprs:
bufs |= {'scal_upts_cpy'}
return bufs
def set_backend(self, backend, nscalupts, nonce, linoff):
super().set_backend(backend, nscalupts, nonce, linoff)
kernel = self._be.kernel
kprefix = 'pyfr.solvers.baseadvecdiff.kernels'
slicem = self._slice_mat
# Register our pointwise kernels
self._be.pointwise.register(f'{kprefix}.gradcoru')
self._be.pointwise.register(f'{kprefix}.gradcorulin')
# Mesh regions
regions = self._mesh_regions
self.kernels['_copy_fpts'] = lambda: kernel(
'copy', self._vect_fpts.slice(0, self.nfpts), self._scal_fpts
)
self.kernels['tgradpcoru_upts'] = lambda: kernel(
'mul', self.opmat('M4 - M6*M0'), self.scal_upts_inb,
out=self._vect_upts
)
self.kernels['tgradcoru_upts'] = lambda: kernel(
'mul', self.opmat('M6'), self._vect_fpts.slice(0, self.nfpts),
out=self._vect_upts, beta=1.0
)
# Template arguments for the physical gradient kernel
tplargs = {
'ndims': self.ndims,
'nvars': self.nvars,
'nverts': len(self.basis.linspts),
'jac_exprs': self.basis.jac_exprs
}
if 'curved' in regions:
self.kernels['gradcoru_upts_curved'] = lambda: kernel(
'gradcoru', tplargs=tplargs,
dims=[self.nupts, regions['curved']],
gradu=slicem(self._vect_upts, 'curved'),
smats=self.smat_at('upts', 'curved'),
rcpdjac=self.rcpdjac_at('upts', 'curved')
)
if 'linear' in regions:
self.kernels['gradcoru_upts_linear'] = lambda: kernel(
'gradcorulin', tplargs=tplargs,
dims=[self.nupts, regions['linear']],
gradu=slicem(self._vect_upts, 'linear'),
upts=self.upts, verts=self.ploc_at('linspts', 'linear')
)
def gradcoru_fpts():
nupts, nfpts = self.nupts, self.nfpts
vupts, vfpts = self._vect_upts, self._vect_fpts
# Exploit the block-diagonal form of the operator
muls = [kernel('mul', self.opmat('M0'),
vupts.slice(i*nupts, (i + 1)*nupts),
vfpts.slice(i*nfpts, (i + 1)*nfpts))
for i in range(self.ndims)]
return ComputeMetaKernel(muls)
self.kernels['gradcoru_fpts'] = gradcoru_fpts
if 'flux' in self.antialias:
def gradcoru_qpts():
nupts, nqpts = self.nupts, self.nqpts
vupts, vqpts = self._vect_upts, self._vect_qpts
# Exploit the block-diagonal form of the operator
muls = [self._be.kernel('mul', self.opmat('M7'),
vupts.slice(i*nupts, (i + 1)*nupts),
vqpts.slice(i*nqpts, (i + 1)*nqpts))
for i in range(self.ndims)]
return ComputeMetaKernel(muls)
self.kernels['gradcoru_qpts'] = gradcoru_qpts
# Shock capturing
shock_capturing = self.cfg.get('solver', 'shock-capturing', 'none')
if shock_capturing == 'artificial-viscosity':
tags = {'align'}
# Register the kernels
self._be.pointwise.register(
'pyfr.solvers.baseadvecdiff.kernels.shocksensor'
)
# Obtain the scalar variable to be used for shock sensing
shockvar = self.convarmap[self.ndims].index(self.shockvar)
# Obtain the name, degrees, and order of our solution basis
ubname = self.basis.ubasis.name
ubdegs = self.basis.ubasis.degrees
uborder = self.basis.ubasis.order
# Obtain the degrees of a basis whose order is one lower
lubdegs = get_polybasis(ubname, max(0, uborder - 1)).degrees
# Compute the intersection
ind_modes = [d not in lubdegs for d in ubdegs]
# Template arguments
tplargs_artvisc = dict(
nvars=self.nvars, nupts=self.nupts, svar=shockvar,
c=self.cfg.items_as('solver-artificial-viscosity', float),
order=self.basis.order, ind_modes=ind_modes,
invvdm=self.basis.ubasis.invvdm.T
)
# Allocate space for the artificial viscosity vector
self.artvisc = self._be.matrix((1, self.neles),
extent=nonce + 'artvisc', tags=tags)
# Apply the sensor to estimate the required artificial viscosity
self.kernels['shocksensor'] = lambda: self._be.kernel(
'shocksensor', tplargs=tplargs_artvisc, dims=[self.neles],
u=self.scal_upts_inb, artvisc=self.artvisc
)
elif shock_capturing == 'none':
self.artvisc = None
else:
raise ValueError('Invalid shock capturing scheme')
def get_artvisc_fpts_for_inter(self, eidx, fidx):
nfp = self.nfacefpts[fidx]
return (self.artvisc.mid,)*nfp, (0,)*nfp, (eidx,)*nfp
|
tests/test_zero/test_mem_collector.py
|
RichardoLuo/ColossalAI
| 1,630 |
136779
|
<gh_stars>1000+
import torch
import colossalai
import pytest
import torch.multiprocessing as mp
import torch.nn as nn
import torch.nn.functional as F
from colossalai.utils.cuda import get_current_device
from colossalai.utils.memory import colo_device_memory_capacity, colo_set_process_memory_fraction
from colossalai.zero.init_ctx import ZeroInitContext
from colossalai.zero.sharded_model import ShardedModelV2
from colossalai.zero.shard_utils import BucketTensorShardStrategy
from colossalai.utils import free_port
from colossalai.testing import rerun_if_address_is_in_use
from functools import partial
class MyTestModel(torch.nn.Module):
def __init__(self) -> None:
super().__init__()
self.proj1 = nn.Linear(512, 512)
self.weight = nn.Parameter(torch.randn(1024, 512))
self.proj2 = nn.Linear(1024, 512)
def forward(self, x):
x = self.proj1(x)
x = F.linear(x, self.weight)
x = self.proj2(x)
return x
def run_mem_collector_testing():
cuda_capacity = colo_device_memory_capacity(get_current_device())
fraction = (50 * 1024**2) / cuda_capacity
# limit max memory to 50MB
colo_set_process_memory_fraction(fraction)
shard_strategy = BucketTensorShardStrategy()
with ZeroInitContext(target_device=get_current_device(), shard_strategy=shard_strategy, shard_param=True):
model = MyTestModel()
model = ShardedModelV2(module=model,
shard_strategy=shard_strategy,
reduce_scatter_bucket_size_mb=1,
tensor_placement_policy='auto')
data = torch.randn(2, 512, device=get_current_device())
output = model(data)
loss = torch.mean(output)
model.backward(loss)
cuda_model_data_list = model._memstats_collector.model_data_list('cuda')
assert cuda_model_data_list == [1311744, 1836032, 1836032, 1311744, 1836032, 1836032]
cuda_non_model_data_list = model._memstats_collector.non_model_data_list('cuda')
assert cuda_non_model_data_list[0] > cuda_non_model_data_list[1]
assert cuda_non_model_data_list[-2] > cuda_non_model_data_list[-1]
def run_dist(rank, world_size, port):
colossalai.launch(config={}, rank=rank, world_size=world_size, host='localhost', port=port, backend='nccl')
run_mem_collector_testing()
@pytest.mark.dist
@rerun_if_address_is_in_use()
def test_mem_collector(world_size=2):
run_func = partial(run_dist, world_size=world_size, port=free_port())
mp.spawn(run_func, nprocs=world_size)
if __name__ == '__main__':
test_mem_collector()
|
kafka_influxdb/encoder/collectd_json_encoder.py
|
gldnspud/kafka-influxdb
| 224 |
136834
|
<reponame>gldnspud/kafka-influxdb
try:
import ujson as json
except ImportError:
import json
import logging
try:
# Test for mypy support (requires Python 3)
from typing import List, Text
except:
pass
class Encoder(object):
"""
An encoder for the Collectd JSON format
See https://collectd.org/wiki/index.php/JSON
Sample measurements:
[{"values":[0],"dstypes":["derive"],"dsnames":["value"],"time":1436372292.412,"interval":10.000,"host":"26f2fc918f50","plugin":"cpu","plugin_instance":"1","type":"cpu","type_instance":"interrupt"}]
[
{
"values": [1901474177],
"dstypes": ["counter"],
"dsnames": ["value"],
"time": 1280959128,
"interval": 10,
"host": "leeloo.octo.it",
"plugin": "cpu",
"plugin_instance": "0",
"type": "cpu",
"type_instance": "idle"
}
]
The following measurement format is also supported, which has more than one value for each sample.
[{"values":[0.2, 0.3],"dstypes":["derive"],"dsnames":["cpu_usage", "mem_usage"],"time":1436372292.412,"interval":10.000,"host":"26f2fc918f50","plugin":"cpu","plugin_instance":"1","type":"cpu","type_instance":"interrupt"}]
"""
def encode(self, msg):
# type: (bytes) -> List[Text]
measurements = []
for line in msg.decode().split("\n"):
try:
# Set flag for float precision to get the same
# results for Python 2 and 3.
json_object = self.parse_line(line)
except ValueError as e:
logging.debug("Error in encoder: %s", e)
continue
for entry in json_object:
try:
# to set plugin, plugin_instance as the measurement name, just need pass ['plugin', 'plugin_instance']
measurement = Encoder.format_measurement_name(
entry, ['plugin', 'plugin_instance', 'type'])
tags = Encoder.format_tags(
entry, ['host', 'type_instance'])
value = Encoder.format_value(entry)
time = Encoder.format_time(entry)
measurements.append(Encoder.compose_data(
measurement, tags, value, time))
except Exception as e:
logging.debug("Error in input data: %s. Skipping.", e)
continue
return measurements
@staticmethod
def parse_line(line):
# return json.loads(line, {'precise_float': True})
# for influxdb version > 0.9, timestamp is an integer
return json.loads(line)
# following methods are added to support customizing measurement name, tags much more flexible
@staticmethod
def compose_data(measurement, tags, value, time):
data = "{0!s},{1!s} {2!s} {3!s}".format(measurement, tags, value, time)
return data
@staticmethod
def format_measurement_name(entry, args):
name = []
for arg in args:
if arg in entry:
# avoid to add extra _ if some entry value is None
if entry[arg] != '':
name.append(entry[arg])
return '_'.join(name)
@staticmethod
def format_tags(entry, args):
tag = []
for arg in args:
if arg in entry:
# to avoid add None as tag value
if entry[arg] != '':
tag.append("{0!s}={1!s}".format(arg, entry[arg]))
return ','.join(tag)
@staticmethod
def format_time(entry):
return int(float(entry['time']))
@staticmethod
def format_value(entry):
values = entry['values']
if len(values) == 1:
return "value={0!s}".format(entry['values'][0])
else:
# influxdb supports writing a record with multiple field values.
# e.g: 'cpu_load_short,host=server01,region=us-west mem=0.1,cpu=0.2 1422568543702900257'
field_pairs = []
for key, value in zip(entry['dsnames'], values):
field_pairs.append("{0!s}={1!s}".format(key, value))
return ','.join(field_pairs)
|
dataset/merge.py
|
Pandinosaurus/roadtracer
| 171 |
136880
|
import numpy
from PIL import Image
import scipy.ndimage
import sys
path = sys.argv[1]
region = sys.argv[2]
x_start = int(sys.argv[3])
y_start = int(sys.argv[4])
x_end = int(sys.argv[5])
y_end = int(sys.argv[6])
out_fname = sys.argv[7]
x_len = x_end - x_start
y_len = y_end - y_start
merged_im = numpy.zeros((x_len * 4096, y_len * 4096, 3), dtype='uint8')
for i in xrange(x_len):
for j in xrange(y_len):
fname = '{}/{}_{}_{}_sat.png'.format(path, region, x_start + i, y_start + j)
merged_im[i*4096:(i+1)*4096, j*4096:(j+1)*4096, :] = scipy.ndimage.imread(fname)[:, :, 0:3].swapaxes(0, 1)
Image.fromarray(merged_im.swapaxes(0, 1)).save(out_fname)
|
ufora/FORA/python/PurePython/LogisticRegressionTests.py
|
ufora/ufora
| 571 |
136902
|
<reponame>ufora/ufora<gh_stars>100-1000
# Copyright 2015 Ufora Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pandas
import numpy
import numpy.testing
from pyfora.unique import unique
from pyfora.algorithms import BinaryLogisticRegressionFitter
class LogisticRegressionTests(object):
methods = ['majorization', 'newton-cg']
def test_unique(self):
x = [5,5,4,2,4,2,1,3,3,5,6]
def f():
return unique(x)
self.assertEqual(
self.evaluateWithExecutor(f),
list(set(x))
)
def exampleData(self):
X = pandas.DataFrame({'A': [-1,0,1], 'B': [0,1,1]})
y = pandas.DataFrame({'C': [0,1,1]})
return X, y
def test_binary_logistic_regression_coefficients(self):
for method in LogisticRegressionTests.methods:
self.binary_logistic_regression_coefficients(method)
def binary_logistic_regression_coefficients(self, method):
X, y = self.exampleData()
def f():
fit = BinaryLogisticRegressionFitter(
C=1.0/len(X),
hasIntercept=True,
method=method
).fit(X, y)
return fit.intercept, fit.coefficients
computedIntercept, computedCoefficients = self.evaluateWithExecutor(f)
expectedIntercept = -0.10102151
expectedCoefficients = numpy.array([-0.26901034, -0.25372016])
numpy.testing.assert_almost_equal(
computedIntercept,
expectedIntercept,
decimal=4
)
numpy.testing.assert_allclose(
computedCoefficients,
expectedCoefficients,
rtol=0.1
)
def test_binary_logistic_regression_probabilities(self):
for method in LogisticRegressionTests.methods:
self.binary_logistic_regression_probabilities(method)
def binary_logistic_regression_probabilities(self, method):
X, y = self.exampleData()
def f():
fit = BinaryLogisticRegressionFitter(
C=1.0/len(X),
hasIntercept=True,
method=method
).fit(X, y)
return fit.predict_probability(X)
expectedPredictedProbabilities = [0.45810128, 0.58776695, 0.6510714]
computedProbabilities = self.evaluateWithExecutor(f)
numpy.testing.assert_allclose(
computedProbabilities,
expectedPredictedProbabilities,
rtol=0.1
)
def test_binary_logistic_regression_predict(self):
for method in LogisticRegressionTests.methods:
self.binary_logistic_regression_probabilities(method)
def binary_logistic_regression_predict(self, method):
X, y = self.exampleData()
def f():
fit = BinaryLogisticRegressionFitter(
C=1.0/len(X),
hasIntercept=True,
method=method
).fit(X, y)
return fit.predict(X)
numpy.testing.assert_array_equal(
self.evaluateWithExecutor(f),
numpy.array([0, 1, 1])
)
def test_binary_logistic_regression_score(self):
for method in LogisticRegressionTests.methods:
self.binary_logistic_regression_score(method)
def binary_logistic_regression_score(self, method):
X, y = self.exampleData()
def f():
fit = BinaryLogisticRegressionFitter(
C=1.0/len(X),
hasIntercept=True,
method=method
).fit(X, y)
return fit.score(X, y)
self.assertEqual(self.evaluateWithExecutor(f), 1.0)
|
dojo/tools/nessus_was/parser.py
|
axelpavageau/django-DefectDojo
| 1,772 |
136934
|
<reponame>axelpavageau/django-DefectDojo<filename>dojo/tools/nessus_was/parser.py
import csv
import io
import logging
import re
import sys
from defusedxml import ElementTree
from dojo.models import Endpoint, Finding, Test
LOGGER = logging.getLogger(__name__)
class NessusWASCSVParser(object):
def _convert_severity(self, val):
if "None" == val:
return 'Info'
if val is None:
return 'Info'
else:
return val.title()
def _format_cve(self, val):
if val is None:
return None
elif "" == val:
return None
cve_match = re.findall(r"CVE-[0-9]+-[0-9]+", val.upper(), re.IGNORECASE)
if cve_match:
return cve_match
return None
def get_findings(self, filename, test: Test):
content = filename.read()
if type(content) is bytes:
content = content.decode('utf-8')
csv.field_size_limit(int(sys.maxsize / 10)) # the request/resp are big
reader = csv.DictReader(io.StringIO(content))
dupes = dict()
for row in reader:
# get title, skip entries with empty titles
title = row.get('Name')
if not title:
continue
# get description, mitigation, impact etc. from respective columns
description = row.get('Synopsis')
mitigation = str(row.get('Solution'))
impact = row.get('Description')
references = row.get('See Also')
cvssv3 = row.get('CVSSv3')
# get severity from 'Risk' column and manage columns with no 'Risk' value
severity = self._convert_severity(row.get('Risk'))
if 'CVE' in row:
detected_cve = self._format_cve(str(row.get('CVE')))
cve = None
if detected_cve:
# FIXME support more than one CVE in Nessus WAS CSV parser
cve = detected_cve[0]
if len(detected_cve) > 1:
LOGGER.warning("more than one CVE for a finding. NOT supported by Nessus CSV parser")
# manage multiple columns falling under one category (e.g. description being synopsis + plugin output)
dupe_key = severity + title + row.get('Host', 'No host') + str(row.get('Port', 'No port')) + row.get('Synopsis', 'No synopsis')
if dupe_key in dupes:
find = dupes[dupe_key]
if 'Plugin Output' in row:
find.description += row.get('Plugin Output')
else:
if 'Plugin Output' in row:
description = description + str(row.get('Plugin Output'))
find = Finding(title=title,
test=test,
cve=cve,
cvssv3_score=cvssv3,
description=description,
severity=severity,
mitigation=mitigation,
impact=impact,
references=references)
find.unsaved_endpoints = list()
dupes[dupe_key] = find
# manage endpoints
endpoint = Endpoint(
protocol=row.get('Protocol').lower() if 'Protocol' in row else None,
host=row.get('Host', row.get('IP Address', 'localhost')),
port=row.get('Port')
)
find.unsaved_endpoints.append(endpoint)
return list(dupes.values())
class NessusWASXMLParser(object):
def get_findings(self, file, test):
nscan = ElementTree.parse(file)
root = nscan.getroot()
if 'NessusClientData_v2' not in root.tag:
raise ValueError('This version of Nessus report is not supported. Please make sure the export is '
'formatted using the NessusClientData_v2 schema.')
dupes = {}
for report in root.iter("Report"):
for host in report.iter("ReportHost"):
ip = host.attrib['name']
fqdn = host.find(".//HostProperties/tag[@name='host-fqdn']").text if host.find(
".//HostProperties/tag[@name='host-fqdn']") is not None else None
for item in host.iter("ReportItem"):
port = None
if float(item.attrib["port"]) > 0:
port = item.attrib["port"]
protocol = None
if str(item.attrib["svc_name"]):
protocol = re.sub(r'[^A-Za-z0-9\-\+]+', "", item.attrib["svc_name"])
if protocol == 'www':
protocol = 'http'
description = ""
plugin_output = None
if item.findtext("synopsis"):
description = item.find("synopsis").text + "\n\n"
if item.findtext("plugin_output"):
plugin_output = "Plugin Output: " + ip + (
(":" + port) if port is not None else "") + \
" \n```\n" + item.find("plugin_output").text + \
"\n```\n\n"
description += plugin_output
nessus_severity_id = int(item.attrib["severity"])
severity = self.get_text_severity(nessus_severity_id)
impact = ""
if item.findtext("description"):
impact = item.find("description").text + "\n\n"
if item.findtext("cvss"):
impact += "CVSS Score: " + item.find("cvss").text + "\n"
if item.findtext("cvssv3"):
impact += "CVSSv3 Score: " + item.find("cvssv3").text + "\n"
mitigation = item.find("solution").text if item.find("solution") is not None else "N/A"
references = ""
for ref in item.iter("see_also"):
refs = ref.text.split()
for r in refs:
references += r + "\n"
for xref in item.iter("xref"):
references += xref.text + "\n"
cve = None
if item.findtext("cve"):
cve = item.find("cve").text
cwe = None
if item.findtext("cwe"):
cwe = item.find("cwe").text
title = item.attrib["pluginName"]
dupe_key = severity + title
if dupe_key in dupes:
find = dupes[dupe_key]
if plugin_output is not None:
find.description += plugin_output
else:
find = Finding(title=title,
test=test,
description=description,
severity=severity,
mitigation=mitigation,
impact=impact,
references=references,
cwe=cwe,
cve=cve)
find.unsaved_endpoints = list()
dupes[dupe_key] = find
if '://' in fqdn:
endpoint = Endpoint.from_uri(fqdn)
else:
if protocol == 'general':
endpoint = Endpoint(host=fqdn if fqdn else ip)
else:
endpoint = Endpoint(protocol=protocol,
host=fqdn if fqdn else ip,
port=port)
find.unsaved_endpoints.append(endpoint)
return list(dupes.values())
def get_text_severity(self, severity_id):
"""Convert data of the report into severity"""
if severity_id == 4:
return 'Critical'
elif severity_id == 3:
return 'High'
elif severity_id == 2:
return 'Medium'
elif severity_id == 1:
return 'Low'
else:
return 'Info'
class NessusWASParser(object):
def get_scan_types(self):
return ["Nessus WAS Scan"]
def get_label_for_scan_types(self, scan_type):
return "Nessus WAS Scan"
def get_description_for_scan_types(self, scan_type):
return "Reports can be imported as CSV or .nessus (XML) report formats."
def get_findings(self, filename, test):
if filename.name.lower().endswith('.xml') or filename.name.lower().endswith('.nessus'):
return NessusWASXMLParser().get_findings(filename, test)
elif filename.name.lower().endswith('.csv'):
return NessusWASCSVParser().get_findings(filename, test)
else:
raise ValueError('Filename extension not recognized. Use .xml, .nessus or .csv')
|
hubspot/crm/extensions/accounting/models/invoice_pdf_response.py
|
Ronfer/hubspot-api-python
| 117 |
136936
|
# coding: utf-8
"""
Accounting Extension
These APIs allow you to interact with HubSpot's Accounting Extension. It allows you to: * Specify the URLs that HubSpot will use when making webhook requests to your external accounting system. * Respond to webhook calls made to your external accounting system by HubSpot # noqa: E501
The version of the OpenAPI document: v3
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from hubspot.crm.extensions.accounting.configuration import Configuration
class InvoicePdfResponse(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {"result": "str", "invoice": "str"}
attribute_map = {"result": "@result", "invoice": "invoice"}
def __init__(self, result=None, invoice=None, local_vars_configuration=None): # noqa: E501
"""InvoicePdfResponse - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._result = None
self._invoice = None
self.discriminator = None
if result is not None:
self.result = result
self.invoice = invoice
@property
def result(self):
"""Gets the result of this InvoicePdfResponse. # noqa: E501
Designates if the response is a success ('OK') or failure ('ERR'). # noqa: E501
:return: The result of this InvoicePdfResponse. # noqa: E501
:rtype: str
"""
return self._result
@result.setter
def result(self, result):
"""Sets the result of this InvoicePdfResponse.
Designates if the response is a success ('OK') or failure ('ERR'). # noqa: E501
:param result: The result of this InvoicePdfResponse. # noqa: E501
:type: str
"""
allowed_values = ["OK", "ERR"] # noqa: E501
if self.local_vars_configuration.client_side_validation and result not in allowed_values: # noqa: E501
raise ValueError("Invalid value for `result` ({0}), must be one of {1}".format(result, allowed_values)) # noqa: E501
self._result = result
@property
def invoice(self):
"""Gets the invoice of this InvoicePdfResponse. # noqa: E501
The bytes of the invoice PDF. # noqa: E501
:return: The invoice of this InvoicePdfResponse. # noqa: E501
:rtype: str
"""
return self._invoice
@invoice.setter
def invoice(self, invoice):
"""Sets the invoice of this InvoicePdfResponse.
The bytes of the invoice PDF. # noqa: E501
:param invoice: The invoice of this InvoicePdfResponse. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and invoice is None: # noqa: E501
raise ValueError("Invalid value for `invoice`, must not be `None`") # noqa: E501
if (
self.local_vars_configuration.client_side_validation and invoice is not None and not re.search(r"^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$", invoice)
): # noqa: E501
raise ValueError(r"Invalid value for `invoice`, must be a follow pattern or equal to `/^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$/`") # noqa: E501
self._invoice = invoice
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items()))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, InvoicePdfResponse):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, InvoicePdfResponse):
return True
return self.to_dict() != other.to_dict()
|
extraPackages/Pillow-6.0.0/Tests/test_image_getdata.py
|
dolboBobo/python3_ios
| 130 |
136937
|
<reponame>dolboBobo/python3_ios<filename>extraPackages/Pillow-6.0.0/Tests/test_image_getdata.py
from .helper import PillowTestCase, hopper
class TestImageGetData(PillowTestCase):
def test_sanity(self):
data = hopper().getdata()
len(data)
list(data)
self.assertEqual(data[0], (20, 20, 70))
def test_roundtrip(self):
def getdata(mode):
im = hopper(mode).resize((32, 30))
data = im.getdata()
return data[0], len(data), len(list(data))
self.assertEqual(getdata("1"), (0, 960, 960))
self.assertEqual(getdata("L"), (16, 960, 960))
self.assertEqual(getdata("I"), (16, 960, 960))
self.assertEqual(getdata("F"), (16.0, 960, 960))
self.assertEqual(getdata("RGB"), ((11, 13, 52), 960, 960))
self.assertEqual(getdata("RGBA"), ((11, 13, 52, 255), 960, 960))
self.assertEqual(getdata("CMYK"), ((244, 242, 203, 0), 960, 960))
self.assertEqual(getdata("YCbCr"), ((16, 147, 123), 960, 960))
|
mvpa2/cmdline/cmd_plotmotionqc.py
|
nno/PyMVPA
| 227 |
136962
|
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
#
# See COPYING file distributed along with the PyMVPA package for the
# copyright and license terms.
#
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
"""Generate a QC plot for (BOLD fMRI) motion estimates of multiple segments.
The generated figure consists of two subplots: one for translation and one for
rotation. The L2-norm for each motion type is plotted. Segment boundaries
are indicated with dashed vertical lines. The following statistics are
visualized
1. Range across subjects (min, max) with a light gray shaded area
2. 50% percentile around the mean with a medium gray shaded area
3. Standard error of the mean (SEM; +/-) with a dark gray shaded area
4. Median across subjects with a black line
5. Outlier subjects are represented as individual red lines
Outliers are defined as subjects that exceed the threshold anywhere within
a given segment. In that case the entire motion time series for that segment is
plotted as an outlier.
Example
pymvpa2 plotmotionqc \
-s sub*/func/*run-1_bold_mc.txt \
-s sub*/func/*run-2_bold_mc.txt \
--savefig motion.png
"""
# magic line for manpage summary
# man: -*- % BOLD fMRI motion QC plot
__docformat__ = 'restructuredtext'
import argparse
import numpy as np
parser_args = {
'formatter_class': argparse.RawDescriptionHelpFormatter,
}
def setup_parser(parser):
parser.add_argument(
'-s', '--segment', metavar='FILE', type=np.loadtxt, nargs='+',
action='append',
help="""two or more text files with motion estimate time series.
This option can be given multiple times (with multiple time series
each to generate a multi-segment plot (e.g. for multiple run).""")
parser.add_argument(
'--estimate-order', metavar='LABEL', default='transrot',
choices=('transrot', 'rottrans'),
help="""column order of estimates in the files. `transrot` indicates
translation first, followed by rotation. `rottrans` refers to the
oposite order. [Default: 'transrot']""")
parser.add_argument(
'--rad2deg', action='store_true',
help="""If specified, rotation estimates are assumed to be in radian
and will be converted to degrees.""")
parser.add_argument(
'--outlier-minthresh', type=float, default=None,
help="""absolute minimum threshold of outlier detection. Only value
larger than this this threshold will ever be considered as an
outlier. [Default: None]""")
parser.add_argument(
'--outlier-stdthresh', type=float, default=None,
help="""minimum threshold in units of standard deviation
for outlier detection. [Default: None]""")
parser.add_argument(
'--savefig', metavar='FILENAME', nargs=1,
help="""file name to store the QC figure under. Without this option
the figure is shown in an interactive viewer.""")
return parser
def motionqc_plot(data, outlier_abs_minthresh=None, outlier_stdthresh=None, ylabel=None):
import pylab as pl
from mvpa2.misc.plot import timeseries_boxplot, concat_ts_boxplot_stats
from mvpa2.misc.stats import compute_ts_boxplot_stats
# segments x [subjects x timepoints x props]
segment_sizes = [d.shape[1] for d in data]
# get stats for all segments and concatenate them
stats = concat_ts_boxplot_stats(
[compute_ts_boxplot_stats(
d,
outlier_abs_minthresh=outlier_abs_minthresh,
outlier_thresh=outlier_stdthresh,
aggfx=np.linalg.norm,
greedy_outlier=True)
for d in data])
outlier = None
if outlier_stdthresh:
outlier = [list(np.where(np.sum(np.logical_not(o.mask), axis=0))[0])
for o in stats[1]]
# plot
timeseries_boxplot(
stats[0]['median'],
mean=stats[0]['mean'],
std=stats[0]['std'],
n=stats[0]['n'],
min=stats[0]['min'],
max=stats[0]['max'],
p25=stats[0]['p25'],
p75=stats[0]['p75'],
outlierd=stats[1],
segment_sizes=segment_sizes)
xp, xl = pl.xticks()
pl.xticks(xp, ['' for i in xl])
pl.xlim((0, len(stats[0]['n'])))
if ylabel:
pl.ylabel(ylabel)
pl.xlabel('time')
return outlier
def run(args):
import pylab as pl
from mvpa2.base import verbose
# segments x [subjects x timepoints x properties]
data = [np.array(s) for s in args.segment]
# put in standard property order: first translation, then rotation
if args.estimate_order == 'rottrans':
data = [d[:, :, (3, 4, 5, 0, 1, 2)] for d in data]
# convert rotations, now known to be last
if args.rad2deg:
for d in data:
v = d[:, :, 3:]
np.rad2deg(v, v)
# and plot
# figure setup
fig = pl.figure(figsize=(12, 5))
# translation
ax = pl.subplot(211)
outlier = motionqc_plot(
[d[..., :3] for d in data],
args.outlier_minthresh,
args.outlier_stdthresh,
"translation\nestimate L2-norm")
if outlier:
verbose(
0,
"Detected per-segment translation outlier input samples {0} (zero-based)".format(
outlier))
# rotation
ax = pl.subplot(212)
outlier = motionqc_plot(
[d[..., 3:] for d in data],
args.outlier_minthresh,
args.outlier_stdthresh,
"rotation\nestimate L2-norm")
if outlier:
verbose(
0,
"Detected per-segment rotation outlier input samples {0} (zero-based)".format(
outlier))
if args.savefig is None:
pl.show()
else:
pl.savefig(args.savefig[0])
|
app/src/s3_cleanup.py
|
troydieter/aws-auto-cleanup
| 322 |
136973
|
<reponame>troydieter/aws-auto-cleanup
import sys
import threading
import boto3
from src.helper import Helper
class S3Cleanup:
def __init__(self, logging, whitelist, settings, execution_log):
self.logging = logging
self.whitelist = whitelist
self.settings = settings
self.execution_log = execution_log
self.region = "global"
self._client_s3 = None
self._resource_s3 = None
self.is_dry_run = Helper.get_setting(self.settings, "general.dry_run", True)
@property
def client_s3(self):
if not self._client_s3:
self._client_s3 = boto3.client("s3")
return self._client_s3
@property
def resource_s3(self):
if not self._resource_s3:
self._resource_s3 = boto3.resource("s3")
return self._resource_s3
def run(self):
self.buckets()
def buckets(self):
"""
Deletes Buckets. All Bucket Objects, Versions and Deleted Markers
are first deleted before the Bucket can be deleted.
"""
self.logging.debug("Started cleanup of S3 Buckets.")
is_cleaning_enabled = Helper.get_setting(
self.settings, "services.s3.bucket.clean", False
)
resource_maximum_age = Helper.get_setting(
self.settings, "services.s3.bucket.ttl", 7
)
resource_whitelist = Helper.get_whitelist(self.whitelist, "s3.bucket")
semaphore = threading.Semaphore(value=5)
if is_cleaning_enabled:
try:
resources = self.client_s3.list_buckets().get("Buckets")
except:
self.logging.error("Could not list all S3 Buckets.")
self.logging.error(sys.exc_info()[1])
return False
# threads list
threads = []
for resource in resources:
threads.append(
threading.Thread(
target=self.delete_bucket,
args=(
semaphore,
resource,
resource_whitelist,
resource_maximum_age,
),
)
)
# start all threads
for thread in threads:
thread.start()
# make sure that all threads have finished
for thread in threads:
thread.join()
self.logging.debug("Finished cleanup of S3 Buckets.")
return True
else:
self.logging.info("Skipping cleanup of S3 Buckets.")
return True
def delete_bucket(
self, semaphore, resource, resource_whitelist, resource_maximum_age
):
semaphore.acquire()
resource_id = resource.get("Name")
resource_date = resource.get("CreationDate")
resource_age = Helper.get_day_delta(resource_date).days
resource_action = None
if resource_id not in resource_whitelist:
if resource_age > resource_maximum_age:
# delete bucket policy
try:
if not self.is_dry_run:
self.client_s3.delete_bucket_policy(Bucket=resource_id)
except:
self.logging.error(
f"Could not delete Bucket Policy for S3 Bucket '{resource_id}'."
)
self.logging.error(sys.exc_info()[1])
resource_action = "ERROR"
else:
self.logging.debug(
f"Deleted Bucket Policy for S3 Bucket '{resource_id}'."
)
bucket_resource = self.resource_s3.Bucket(resource_id)
# delete all objects
try:
if not self.is_dry_run:
bucket_resource.objects.delete()
except:
self.logging.error(
f"Could not delete all Objects from S3 Bucket '{resource_id}'."
)
self.logging.error(sys.exc_info()[1])
resource_action = "ERROR"
else:
self.logging.debug(
f"Deleted all Objects from S3 Bucket '{resource_id}'."
)
# delete all Versions and DeleteMarkers
try:
if not self.is_dry_run:
bucket_resource.object_versions.delete()
except:
self.logging.error(
f"Could not get all Versions and Delete Markers from S3 Bucket '{resource_id}'."
)
self.logging.error(sys.exc_info()[1])
resource_action = "ERROR"
else:
self.logging.debug(
f"Deleted all Versions and Delete Markers from S3 Bucket '{resource_id}'."
)
# delete bucket
try:
if not self.is_dry_run:
self.client_s3.delete_bucket(Bucket=resource_id)
except:
self.logging.error(
f"Could not delete S3 Bucket '{resource_id}'."
)
self.logging.error(sys.exc_info()[1])
resource_action = "ERROR"
else:
self.logging.info(
f"S3 Bucket '{resource_id}' was created {resource_age} days ago "
"and has been deleted."
)
resource_action = "DELETE"
else:
self.logging.debug(
f"S3 Bucket '{resource_id}' was created {resource_age} days ago "
"(less than TTL setting) and has not been deleted."
)
resource_action = "SKIP - TTL"
else:
self.logging.debug(
f"S3 Bucket '{resource_id}' has been whitelisted and has not been deleted."
)
resource_action = "SKIP - WHITELIST"
Helper.record_execution_log_action(
self.execution_log,
self.region,
"S3",
"Bucket",
resource_id,
resource_action,
)
semaphore.release()
return True
|
tests/test_distributed_splunk_image.py
|
ridvan70/docker-splunk
| 535 |
136996
|
#!/usr/bin/env python
# encoding: utf-8
import pytest
import time
import re
import os
import tarfile
import requests
import tarfile
import docker
import json
import urllib
import yaml
import subprocess
from shutil import copy, copytree, rmtree
from executor import Executor
from docker.types import Mount
# Code to suppress insecure https warnings
import urllib3
from urllib3.exceptions import InsecureRequestWarning, SubjectAltNameWarning
urllib3.disable_warnings(InsecureRequestWarning)
urllib3.disable_warnings(SubjectAltNameWarning)
global PLATFORM
PLATFORM = "debian-9"
OLD_SPLUNK_VERSION = "7.3.4"
def pytest_generate_tests(metafunc):
# This is called for every test. Only get/set command line arguments
# if the argument is specified in the list of test "fixturenames".
option_value = metafunc.config.option.platform
global PLATFORM
PLATFORM = option_value
class TestDockerSplunk(Executor):
@classmethod
def setup_class(cls):
super(TestDockerSplunk, cls).setup_class(PLATFORM)
def setup_method(self, method):
# Make sure all running containers are removed
self._clean_docker_env()
self.compose_file_name = None
self.project_name = None
self.DIR = None
def teardown_method(self, method):
if self.compose_file_name and self.project_name:
if self.DIR:
command = "docker-compose -p {} -f {} down --volumes --remove-orphans".format(self.project_name, os.path.join(self.DIR, self.compose_file_name))
else:
command = "docker-compose -p {} -f test_scenarios/{} down --volumes --remove-orphans".format(self.project_name, self.compose_file_name)
out, err, rc = self._run_command(command)
self._clean_docker_env()
if self.DIR:
try:
rmtree(self.DIR)
except OSError:
pass
self.compose_file_name, self.project_name, self.DIR = None, None, None
def test_compose_3idx1cm_custom_repl_factor(self):
self.project_name = self.generate_random_string()
# Generate default.yml
cid = self.client.create_container(self.SPLUNK_IMAGE_NAME, tty=True, command="create-defaults")
self.client.start(cid.get("Id"))
output = self.get_container_logs(cid.get("Id"))
self.client.remove_container(cid.get("Id"), v=True, force=True)
# Get the password
password = re.search(r"^ password: (.*?)\n", output, flags=re.MULTILINE|re.DOTALL).group(1).strip()
assert password and password != "<PASSWORD>"
# Change repl factor & search factor
output = re.sub(r' replication_factor: 3', r''' replication_factor: 2''', output)
output = re.sub(r' search_factor: 3', r''' search_factor: 1''', output)
# Write the default.yml to a file
with open(os.path.join(self.SCENARIOS_DIR, "defaults", "{}.yml".format(self.project_name)), "w") as f:
f.write(output)
# Standup deployment
try:
self.compose_file_name = "3idx1cm.yaml"
container_count, rc = self.compose_up(defaults_url="/tmp/defaults/{}.yml".format(self.project_name))
assert rc == 0
# Wait for containers to come up
assert self.wait_for_containers(container_count, label="com.docker.compose.project={}".format(self.project_name), timeout=600)
# Get container logs
container_mapping = {"cm1": "cm", "idx1": "idx", "idx2": "idx", "idx3": "idx"}
for container in container_mapping:
# Check ansible version & configs
ansible_logs = self.get_container_logs("{}_{}_1".format(self.project_name, container))
self.check_ansible(ansible_logs)
# Check values in log output
inventory_json = self.extract_json("{}_{}_1".format(self.project_name, container))
self.check_common_keys(inventory_json, container_mapping[container])
try:
assert inventory_json["splunk_indexer"]["hosts"] == ["idx1", "idx2", "idx3"]
assert inventory_json["splunk_cluster_master"]["hosts"] == ["cm1"]
except KeyError as e:
self.logger.error(e)
raise e
# Check Splunkd on all the containers
assert self.check_splunkd("admin", self.password)
# Make sure apps are installed, and shcluster is setup properly
containers = self.client.containers(filters={"label": "com.docker.compose.project={}".format(self.project_name)})
assert len(containers) == 4
for container in containers:
container_name = container["Names"][0].strip("/").split("_")[1]
splunkd_port = self.client.port(container["Id"], 8089)[0]["HostPort"]
if container_name == "cm1":
# Check the replication factor & search factor
url = "https://localhost:{}/services/cluster/config/config?output_mode=json".format(splunkd_port)
kwargs = {"auth": ("admin", self.password), "verify": False}
status, content = self.handle_request_retry("GET", url, kwargs)
assert status == 200
assert json.loads(content)["entry"][0]["content"]["replication_factor"] == 2
assert json.loads(content)["entry"][0]["content"]["search_factor"] == 1
except Exception as e:
self.logger.error(e)
raise e
finally:
try:
os.remove(os.path.join(self.SCENARIOS_DIR, "defaults", "{}.yml".format(self.project_name)))
except OSError as e:
pass
def test_compose_1idx3sh1cm1dep(self):
self.project_name = self.generate_random_string()
# Generate default.yml -- for SHC, we need a common default.yml otherwise things won't work
cid = self.client.create_container(self.SPLUNK_IMAGE_NAME, tty=True, command="create-defaults")
self.client.start(cid.get("Id"))
output = self.get_container_logs(cid.get("Id"))
self.client.remove_container(cid.get("Id"), v=True, force=True)
# Get the password
password = re.search(r"^ password: (.*?)\n", output, flags=re.MULTILINE|re.DOTALL).group(1).strip()
assert password and password != "null"
# Write the default.yml to a file
with open(os.path.join(self.SCENARIOS_DIR, "defaults", "{}.yml".format(self.project_name)), "w") as f:
f.write(output)
# Tar the app before spinning up the scenario
with tarfile.open(os.path.join(self.FIXTURES_DIR, "{}.tgz".format(self.project_name)), "w:gz") as tar:
tar.add(self.EXAMPLE_APP, arcname=os.path.basename(self.EXAMPLE_APP))
# Standup deployment
try:
self.compose_file_name = "1idx3sh1cm1dep.yaml"
container_count, rc = self.compose_up(defaults_url="/tmp/defaults/{}.yml".format(self.project_name), apps_url="http://appserver/{}.tgz".format(self.project_name))
assert rc == 0
# Wait for containers to come up
assert self.wait_for_containers(container_count, label="com.docker.compose.project={}".format(self.project_name), timeout=600)
# Get container logs
container_mapping = {"sh1": "sh", "sh2": "sh", "sh3": "sh", "cm1": "cm", "idx1": "idx", "dep1": "dep"}
for container in container_mapping:
# Check ansible version & configs
ansible_logs = self.get_container_logs("{}_{}_1".format(self.project_name, container))
self.check_ansible(ansible_logs)
# Check values in log output
inventory_json = self.extract_json("{}_{}_1".format(self.project_name, container))
self.check_common_keys(inventory_json, container_mapping[container])
try:
assert inventory_json["splunk_indexer"]["hosts"] == ["idx1"]
assert inventory_json["splunk_search_head_captain"]["hosts"] == ["sh1"]
assert inventory_json["splunk_search_head"]["hosts"] == ["sh2", "sh3"]
assert inventory_json["splunk_cluster_master"]["hosts"] == ["cm1"]
assert inventory_json["splunk_deployer"]["hosts"] == ["dep1"]
except KeyError as e:
self.logger.error(e)
raise e
# Check Splunkd on all the containers
assert self.check_splunkd("admin", self.password)
# Make sure apps are installed, and shcluster is setup properly
containers = self.client.containers(filters={"label": "com.docker.compose.project={}".format(self.project_name)})
assert len(containers) == 7
for container in containers:
# Skip the nginx container
if "nginx" in container["Image"]:
continue
container_name = container["Names"][0].strip("/").split("_")[1]
splunkd_port = self.client.port(container["Id"], 8089)[0]["HostPort"]
if container_name in {"sh1", "sh2", "sh3", "idx1"}:
# Check the app and version
url = "https://localhost:{}/servicesNS/nobody/splunk_app_example/configs/conf-app/launcher?output_mode=json".format(splunkd_port)
kwargs = {"auth": ("admin", self.password), "verify": False}
status, content = self.handle_request_retry("GET", url, kwargs)
assert status == 200
assert json.loads(content)["entry"][0]["content"]["version"] == "0.0.1"
# Make sure preferred captain is set
if container_name == "sh1":
url = "https://localhost:{}/servicesNS/nobody/system/configs/conf-server/shclustering?output_mode=json".format(splunkd_port)
kwargs = {"auth": ("admin", self.password), "verify": False}
status, content = self.handle_request_retry("GET", url, kwargs)
assert json.loads(content)["entry"][0]["content"]["preferred_captain"] == "1"
# Search results won't return the correct results immediately :(
time.sleep(30)
RETRIES = 10
IMPLICIT_WAIT = 6
for n in range(RETRIES):
try:
self.logger.info("Attempt #{}: checking internal search host count".format(n+1))
search_providers, distinct_hosts = self.search_internal_distinct_hosts("{}_sh1_1".format(self.project_name), password=self.password)
assert len(search_providers) == 2
assert "idx1" in search_providers and "sh1" in search_providers
assert distinct_hosts == 6
break
except Exception as e:
self.logger.error("Attempt #{} error: {}".format(n+1, str(e)))
if n < RETRIES-1:
time.sleep(IMPLICIT_WAIT)
continue
raise e
except Exception as e:
self.logger.error(e)
raise e
finally:
try:
os.remove(os.path.join(self.SCENARIOS_DIR, "defaults", "{}.yml".format(self.project_name)))
os.remove(os.path.join(self.FIXTURES_DIR, "{}.tgz".format(self.project_name)))
except OSError as e:
pass
def test_compose_1uf1so(self):
# Standup deployment
self.compose_file_name = "1uf1so.yaml"
self.project_name = self.generate_random_string()
container_count, rc = self.compose_up()
assert rc == 0
# Wait for containers to come up
assert self.wait_for_containers(container_count, label="com.docker.compose.project={}".format(self.project_name))
# Get container logs
container_mapping = {"so1": "so", "uf1": "uf"}
for container in container_mapping:
# Check ansible version & configs
ansible_logs = self.get_container_logs("{}_{}_1".format(self.project_name, container))
self.check_ansible(ansible_logs)
# Check values in log output
inventory_json = self.extract_json("{}_{}_1".format(self.project_name, container))
self.check_common_keys(inventory_json, container_mapping[container])
try:
assert inventory_json["splunk_standalone"]["hosts"] == ["so1"]
except KeyError as e:
self.logger.error(e)
raise e
# Search results won't return the correct results immediately :(
time.sleep(30)
search_providers, distinct_hosts = self.search_internal_distinct_hosts("{}_so1_1".format(self.project_name), password=self.password)
assert len(search_providers) == 1
assert search_providers[0] == "so1"
assert distinct_hosts == 2
def test_compose_3idx1cm_default_repl_factor(self):
self.project_name = self.generate_random_string()
# Generate default.yml
cid = self.client.create_container(self.SPLUNK_IMAGE_NAME, tty=True, command="create-defaults")
self.client.start(cid.get("Id"))
output = self.get_container_logs(cid.get("Id"))
self.client.remove_container(cid.get("Id"), v=True, force=True)
# Get the password
password = re.search(r"^ password: (.*?)\n", output, flags=re.MULTILINE|re.DOTALL).group(1).strip()
assert password and password != "<PASSWORD>"
# Write the default.yml to a file
with open(os.path.join(self.SCENARIOS_DIR, "defaults", "{}.yml".format(self.project_name)), "w") as f:
f.write(output)
# Standup deployment
try:
self.compose_file_name = "3idx1cm.yaml"
container_count, rc = self.compose_up(defaults_url="/tmp/defaults/{}.yml".format(self.project_name))
assert rc == 0
# Wait for containers to come up
assert self.wait_for_containers(container_count, label="com.docker.compose.project={}".format(self.project_name), timeout=600)
# Get container logs
container_mapping = {"cm1": "cm", "idx1": "idx", "idx2": "idx", "idx3": "idx"}
for container in container_mapping:
# Check ansible version & configs
ansible_logs = self.get_container_logs("{}_{}_1".format(self.project_name, container))
self.check_ansible(ansible_logs)
# Check values in log output
inventory_json = self.extract_json("{}_{}_1".format(self.project_name, container))
self.check_common_keys(inventory_json, container_mapping[container])
try:
assert inventory_json["splunk_indexer"]["hosts"] == ["idx1", "idx2", "idx3"]
assert inventory_json["splunk_cluster_master"]["hosts"] == ["cm1"]
except KeyError as e:
self.logger.error(e)
raise e
# Check Splunkd on all the containers
assert self.check_splunkd("admin", self.password)
# Make sure apps are installed, and shcluster is setup properly
containers = self.client.containers(filters={"label": "com.docker.compose.project={}".format(self.project_name)})
assert len(containers) == 4
for container in containers:
container_name = container["Names"][0].strip("/").split("_")[1]
splunkd_port = self.client.port(container["Id"], 8089)[0]["HostPort"]
if container_name == "cm1":
# Check the replication factor & search factor
url = "https://localhost:{}/services/cluster/config/config?output_mode=json".format(splunkd_port)
kwargs = {"auth": ("admin", self.password), "verify": False}
status, content = self.handle_request_retry("GET", url, kwargs)
assert status == 200
assert json.loads(content)["entry"][0]["content"]["replication_factor"] == 3
assert json.loads(content)["entry"][0]["content"]["search_factor"] == 3
except Exception as e:
self.logger.error(e)
raise e
finally:
try:
os.remove(os.path.join(self.SCENARIOS_DIR, "defaults", "{}.yml".format(self.project_name)))
except OSError as e:
pass
def test_compose_1so1cm_connected(self):
# Standup deployment
self.compose_file_name = "1so1cm_connected.yaml"
self.project_name = self.generate_random_string()
container_count, rc = self.compose_up()
assert rc == 0
# Wait for containers to come up
assert self.wait_for_containers(container_count, label="com.docker.compose.project={}".format(self.project_name))
# Get container logs
container_mapping = {"so1": "so", "cm1": "cm"}
for container in container_mapping:
# Check ansible version & configs
ansible_logs = self.get_container_logs("{}_{}_1".format(self.project_name, container))
self.check_ansible(ansible_logs)
# Check values in log output
inventory_json = self.extract_json("{}_{}_1".format(self.project_name, container))
self.check_common_keys(inventory_json, container_mapping[container])
# Check Splunkd on all the containers
assert self.check_splunkd("admin", self.password)
# Check connections
containers = self.client.containers(filters={"label": "com.docker.compose.project={}".format(self.project_name)})
for container in containers:
container_name = container["Names"][0].strip("/").split('_')[1]
splunkd_port = self.client.port(container["Id"], 8089)[0]["HostPort"]
if container_name == "cm1":
status, content = self.handle_request_retry("GET", "https://localhost:{}/services/cluster/master/searchheads?output_mode=json".format(splunkd_port),
{"auth": ("admin", self.password), "verify": False})
assert status == 200
output = json.loads(content)
assert len(output["entry"]) == 2
for sh in output["entry"]:
assert sh["content"]["label"] in ["cm1", "so1"]
assert sh["content"]["status"] == "Connected"
def test_compose_1so1cm_unconnected(self):
# Standup deployment
self.compose_file_name = "1so1cm_unconnected.yaml"
self.project_name = self.generate_random_string()
container_count, rc = self.compose_up()
assert rc == 0
# Wait for containers to come up
assert self.wait_for_containers(container_count, label="com.docker.compose.project={}".format(self.project_name))
# Get container logs
container_mapping = {"so1": "so", "cm1": "cm"}
for container in container_mapping:
# Check ansible version & configs
ansible_logs = self.get_container_logs("{}_{}_1".format(self.project_name, container))
self.check_ansible(ansible_logs)
# Check values in log output
inventory_json = self.extract_json("{}_{}_1".format(self.project_name, container))
self.check_common_keys(inventory_json, container_mapping[container])
# Check Splunkd on all the containers
assert self.check_splunkd("admin", self.password)
# Check connections
containers = self.client.containers(filters={"label": "com.docker.compose.project={}".format(self.project_name)})
for container in containers:
container_name = container["Names"][0].strip("/").split('_')[1]
splunkd_port = self.client.port(container["Id"], 8089)[0]["HostPort"]
if container_name == "cm1":
status, content = self.handle_request_retry("GET", "https://localhost:{}/services/cluster/master/searchheads?output_mode=json".format(splunkd_port),
{"auth": ("admin", self.password), "verify": False})
assert status == 200
output = json.loads(content)
assert len(output["entry"]) == 1
assert output["entry"][0]["content"]["label"] == "cm1"
assert output["entry"][0]["content"]["status"] == "Connected"
def test_adhoc_1cm_idxc_pass4symmkey(self):
# Create the container
cid = None
try:
self.project_name = self.generate_random_string()
cid = self.client.create_container(self.SPLUNK_IMAGE_NAME, tty=True, ports=[8089], name=self.project_name,
environment={
"DEBUG": "true",
"SPLUNK_START_ARGS": "--accept-license",
"SPLUNK_PASSWORD": <PASSWORD>,
"SPLUNK_ROLE": "splunk_cluster_master",
"SPLUNK_INDEXER_URL": "idx1",
"SPLUNK_IDXC_PASS4SYMMKEY": "<PASSWORD>",
"SPLUNK_IDXC_LABEL": "keepsummersafe",
},
host_config=self.client.create_host_config(port_bindings={8089: ("0.0.0.0",)})
)
cid = cid.get("Id")
self.client.start(cid)
# Poll for the container to be ready
assert self.wait_for_containers(1, name=self.project_name)
# Check splunkd
splunkd_port = self.client.port(cid, 8089)[0]["HostPort"]
url = "https://localhost:{}/services/server/info".format(splunkd_port)
kwargs = {"auth": ("admin", self.password), "verify": False}
status, content = self.handle_request_retry("GET", url, kwargs)
assert status == 200
# Check if the cluster label and pass4SymmKey line up
exec_command = self.client.exec_create(cid, "cat /opt/splunk/etc/system/local/server.conf", user="splunk")
std_out = self.client.exec_start(exec_command)
assert "cluster_label = keepsummersafe" in std_out
pass4SymmKey = re.search(r'\[clustering\].*?pass4SymmKey = (.*?)\n', std_out, flags=re.MULTILINE|re.DOTALL).group(1).strip()
exec_command = self.client.exec_create(cid, "/opt/splunk/bin/splunk show-decrypted --value '{}'".format(pass4SymmKey), user="splunk")
std_out = self.client.exec_start(exec_command)
assert "keepsummerbeingliketotallystokedaboutlikethegeneralvibeandstuff" in std_out
except Exception as e:
self.logger.error(e)
raise e
finally:
if cid:
self.client.remove_container(cid, v=True, force=True)
def test_compose_1cm_smartstore(self):
# Generate default.yml
self.project_name = self.generate_random_string()
self.DIR = os.path.join(self.FIXTURES_DIR, self.project_name)
os.mkdir(self.DIR)
cid = self.client.create_container(self.SPLUNK_IMAGE_NAME, tty=True, command="create-defaults")
self.client.start(cid.get("Id"))
output = self.get_container_logs(cid.get("Id"))
self.client.remove_container(cid.get("Id"), v=True, force=True)
# Get the password
password = re.search(r"^ password: (.*?)\n", output, flags=re.MULTILINE|re.DOTALL).group(1).strip()
assert password and password != "<PASSWORD>"
# Add a custom conf file
output = re.sub(r' smartstore: null', r''' smartstore:
index:
- indexName: default
remoteName: remote_vol
scheme: s3
remoteLocation: smartstore-test
s3:
access_key: abcd
secret_key: 1234
endpoint: https://s3-region.amazonaws.com''', output)
# Write the default.yml to a file
with open(os.path.join(self.DIR, "default.yml"), "w") as f:
f.write(output)
# Create the container and mount the default.yml
cid = None
try:
cid = self.client.create_container(self.SPLUNK_IMAGE_NAME, tty=True, ports=[8089],
volumes=["/tmp/defaults/default.yml"], name=self.project_name,
environment={
"DEBUG": "true",
"SPLUNK_START_ARGS": "--accept-license",
"SPLUNK_PASSWORD": <PASSWORD>,
"SPLUNK_ROLE": "splunk_cluster_master",
"SPLUNK_INDEXER_URL": "idx1"
},
host_config=self.client.create_host_config(binds=[self.DIR + "/default.yml:/tmp/defaults/default.yml"],
port_bindings={8089: ("0.0.0.0",)})
)
cid = cid.get("Id")
self.client.start(cid)
# Poll for the container to be ready
assert self.wait_for_containers(1, name=self.project_name)
# Check splunkd
splunkd_port = self.client.port(cid, 8089)[0]["HostPort"]
url = "https://localhost:{}/services/server/info".format(splunkd_port)
kwargs = {"auth": ("admin", <PASSWORD>), "verify": False}
status, content = self.handle_request_retry("GET", url, kwargs)
assert status == 200
# Check if the created file exists
exec_command = self.client.exec_create(cid, "cat /opt/splunk/etc/master-apps/_cluster/local/indexes.conf", user="splunk")
std_out = self.client.exec_start(exec_command)
assert 'remotePath = volume:remote_vol/$_index_name' in std_out
assert 'repFactor = auto' in std_out
assert '[volume:remote_vol]' in std_out
assert 'storageType = remote' in std_out
assert 'path = s3://smartstore-test' in std_out
assert 'remote.s3.access_key = abcd' in std_out
assert 'remote.s3.secret_key = 1234' in std_out
assert 'remote.s3.endpoint = https://s3-region.amazonaws.com' in std_out
except Exception as e:
self.logger.error(e)
raise e
finally:
if cid:
self.client.remove_container(cid, v=True, force=True)
try:
os.remove(os.path.join(self.DIR, "default.yml"))
except OSError:
pass
def test_compose_1sh1cm(self):
# Standup deployment
self.compose_file_name = "1sh1cm.yaml"
self.project_name = self.generate_random_string()
container_count, rc = self.compose_up()
assert rc == 0
# Wait for containers to come up
assert self.wait_for_containers(container_count, label="com.docker.compose.project={}".format(self.project_name))
# Get container logs
container_mapping = {"sh1": "sh", "cm1": "cm"}
for container in container_mapping:
# Check ansible version & configs
ansible_logs = self.get_container_logs("{}_{}_1".format(self.project_name, container))
self.check_ansible(ansible_logs)
# Check values in log output
inventory_json = self.extract_json("{}_{}_1".format(self.project_name, container))
self.check_common_keys(inventory_json, container_mapping[container])
# Check Splunkd on all the containers
assert self.check_splunkd("admin", self.password)
# Check connections
containers = self.client.containers(filters={"label": "com.docker.compose.project={}".format(self.project_name)})
for container in containers:
container_name = container["Names"][0].strip("/").split('_')[1]
splunkd_port = self.client.port(container["Id"], 8089)[0]["HostPort"]
if container_name == "cm1":
status, content = self.handle_request_retry("GET", "https://localhost:{}/services/cluster/master/searchheads?output_mode=json".format(splunkd_port),
{"auth": ("admin", self.password), "verify": False})
assert status == 200
output = json.loads(content)
# There's only 1 "standalone" search head connected and 1 cluster master
assert len(output["entry"]) == 2
for sh in output["entry"]:
assert sh["content"]["label"] == "sh1" or sh["content"]["label"] == "cm1"
assert sh["content"]["status"] == "Connected"
def test_compose_1sh1cm1dmc(self):
# Standup deployment
self.compose_file_name = "1sh1cm1dmc.yaml"
self.project_name = self.generate_random_string()
container_count, rc = self.compose_up()
assert rc == 0
# Wait for containers to come up
assert self.wait_for_containers(container_count, label="com.docker.compose.project={}".format(self.project_name))
containers = self.client.containers(filters={"label": "com.docker.compose.project={}".format(self.project_name)})
self.check_dmc(containers, 2, 0, 2, 1, 3)
def test_compose_1sh2idx2hf1dmc(self):
# Standup deployment
self.compose_file_name = "1sh2idx2hf1dmc.yaml"
self.project_name = self.generate_random_string()
container_count, rc = self.compose_up()
assert rc == 0
# Wait for containers to come up
assert self.wait_for_containers(container_count, label="com.docker.compose.project={}".format(self.project_name))
containers = self.client.containers(filters={"label": "com.docker.compose.project={}".format(self.project_name)})
self.check_dmc(containers, 3, 2, 2, 0, 4)
def test_compose_3idx1cm1dmc(self):
# Standup deployment
self.compose_file_name = "3idx1cm1dmc.yaml"
self.project_name = self.generate_random_string()
container_count, rc = self.compose_up()
assert rc == 0
# Wait for containers to come up
assert self.wait_for_containers(container_count, label="com.docker.compose.project={}".format(self.project_name), timeout=900)
containers = self.client.containers(filters={"label": "com.docker.compose.project={}".format(self.project_name)})
self.check_dmc(containers, 4, 3, 2, 1, 5)
def test_compose_1uf1so1dmc(self):
# Standup deployment
self.compose_file_name = "1uf1so1dmc.yaml"
self.project_name = self.generate_random_string()
container_count, rc = self.compose_up()
assert rc == 0
# Wait for containers to come up
assert self.wait_for_containers(container_count, label="com.docker.compose.project={}".format(self.project_name))
containers = self.client.containers(filters={"label": "com.docker.compose.project={}".format(self.project_name)})
self.check_dmc(containers, 1, 1, 1, 0, 2)
def test_compose_1so1dmc(self):
# Standup deployment
self.compose_file_name = "1so1dmc.yaml"
self.project_name = self.generate_random_string()
container_count, rc = self.compose_up()
assert rc == 0
# Wait for containers to come up
assert self.wait_for_containers(container_count, label="com.docker.compose.project={}".format(self.project_name))
containers = self.client.containers(filters={"label": "com.docker.compose.project={}".format(self.project_name)})
self.check_dmc(containers, 1, 1, 1, 0, 2)
def test_compose_2idx2sh1dmc(self):
# Standup deployment
self.compose_file_name = "2idx2sh1dmc.yaml"
self.project_name = self.generate_random_string()
container_count, rc = self.compose_up()
assert rc == 0
# Wait for containers to come up
assert self.wait_for_containers(container_count, label="com.docker.compose.project={}".format(self.project_name))
containers = self.client.containers(filters={"label": "com.docker.compose.project={}".format(self.project_name)})
self.check_dmc(containers, 4, 2, 3, 0, 5)
def test_compose_2idx2sh(self):
# Standup deployment
self.compose_file_name = "2idx2sh.yaml"
self.project_name = self.generate_random_string()
container_count, rc = self.compose_up()
assert rc == 0
# Wait for containers to come up
assert self.wait_for_containers(container_count, label="com.docker.compose.project={}".format(self.project_name))
# Get container logs
container_mapping = {"sh1": "sh", "sh2": "sh", "idx1": "idx", "idx2": "idx"}
for container in container_mapping:
# Check ansible version & configs
ansible_logs = self.get_container_logs("{}_{}_1".format(self.project_name, container))
self.check_ansible(ansible_logs)
# Check values in log output
inventory_json = self.extract_json("{}_{}_1".format(self.project_name, container))
self.check_common_keys(inventory_json, container_mapping[container])
try:
assert inventory_json["splunk_indexer"]["hosts"] == ["idx1", "idx2"]
assert inventory_json["splunk_search_head"]["hosts"] == ["sh1", "sh2"]
except KeyError as e:
self.logger.error(e)
raise e
# Check Splunkd on all the containers
assert self.check_splunkd("admin", self.password)
# Check connections
idx_list = ["idx1", "idx2"]
containers = self.client.containers(filters={"label": "com.docker.compose.project={}".format(self.project_name)})
for container in containers:
c_name = container["Labels"]["com.docker.compose.service"]
if "sh1" in c_name or "sh2" in c_name:
splunkd_port = self.client.port(container["Id"], 8089)[0]["HostPort"]
url = "https://localhost:{}/services/search/distributed/peers?output_mode=json".format(splunkd_port)
kwargs = {"auth": ("admin", self.password), "verify": False}
status, content = self.handle_request_retry("GET", url, kwargs)
assert status == 200
output = json.loads(content)
peers = [x["content"]["peerName"] for x in output["entry"]]
assert len(peers) == 2 and set(peers) == set(idx_list)
# Search results won't return the correct results immediately :(
time.sleep(30)
search_providers, distinct_hosts = self.search_internal_distinct_hosts("{}_sh1_1".format(self.project_name), password=self.password)
assert len(search_providers) == 3
assert "idx1" in search_providers and "idx2" in search_providers and "sh1" in search_providers
assert distinct_hosts == 4
def test_compose_2idx2sh1cm(self):
# Standup deployment
self.compose_file_name = "2idx2sh1cm.yaml"
self.project_name = self.generate_random_string()
container_count, rc = self.compose_up()
assert rc == 0
# Wait for containers to come up
assert self.wait_for_containers(container_count, label="com.docker.compose.project={}".format(self.project_name), timeout=600)
# Get container logs
container_mapping = {"sh1": "sh", "sh2": "sh", "idx1": "idx", "idx2": "idx", "cm1": "cm"}
for container in container_mapping:
# Check ansible version & configs
ansible_logs = self.get_container_logs("{}_{}_1".format(self.project_name, container))
self.check_ansible(ansible_logs)
# Check values in log output
inventory_json = self.extract_json("{}_{}_1".format(self.project_name, container))
self.check_common_keys(inventory_json, container_mapping[container])
try:
assert inventory_json["splunk_cluster_master"]["hosts"] == ["cm1"]
assert inventory_json["splunk_indexer"]["hosts"] == ["idx1", "idx2"]
assert inventory_json["splunk_search_head"]["hosts"] == ["sh1", "sh2"]
except KeyError as e:
self.logger.error(e)
raise e
# Check Splunkd on all the containers
assert self.check_splunkd("admin", self.password)
# Check connections
idx_list = ["idx1", "idx2"]
sh_list = ["sh1", "sh2", "cm1"]
containers = self.client.containers(filters={"label": "com.docker.compose.project={}".format(self.project_name)})
for container in containers:
container_name = container["Names"][0].strip("/").split('_')[1]
splunkd_port = self.client.port(container["Id"], 8089)[0]["HostPort"]
if container_name == "cm1":
status, content = self.handle_request_retry("GET", "https://localhost:{}/services/cluster/master/searchheads?output_mode=json".format(splunkd_port),
{"auth": ("admin", self.password), "verify": False})
assert status == 200
output = json.loads(content)
for sh in output["entry"]:
if sh["content"]["label"] in sh_list and sh["content"]["status"] == "Connected":
sh_list.remove(sh["content"]["label"])
status, content = self.handle_request_retry("GET", "https://localhost:{}/services/cluster/master/peers?output_mode=json".format(splunkd_port),
{"auth": ("admin", self.password), "verify": False})
assert status == 200
output = json.loads(content)
for idx in output["entry"]:
if idx["content"]["label"] in idx_list and idx["content"]["status"] == "Up":
idx_list.remove(idx["content"]["label"])
assert len(idx_list) == 0 and len(sh_list) == 0
# Add one more indexer
self.compose_file_name = "2idx2sh1cm_idx3.yaml"
_, rc = self.compose_up()
assert rc == 0
# Wait for containers to come up
assert self.wait_for_containers(container_count+1, label="com.docker.compose.project={}".format(self.project_name), timeout=600)
retries = 10
for n in range(retries):
try:
status, content = self.handle_request_retry("GET", "https://localhost:{}/services/cluster/master/peers?output_mode=json".format(splunkd_port),
{"auth": ("admin", self.password), "verify": False})
assert status == 200
output = json.loads(content)
assert len(output["entry"]) == 3
indexers = []
for idx in output["entry"]:
indexers.append(idx["content"]["label"])
assert "idx1" in indexers
assert "idx2" in indexers
assert "idx3" in indexers
break
except Exception as err:
time.sleep(10)
if n < retries-1:
continue
raise err
def test_compose_1deployment1cm(self):
self.project_name = self.generate_random_string()
# Tar the app before spinning up the scenario
with tarfile.open(os.path.join(self.FIXTURES_DIR, "{}.tgz".format(self.project_name)), "w:gz") as tar:
tar.add(self.EXAMPLE_APP, arcname=os.path.basename(self.EXAMPLE_APP))
# Generate default.yml
cid = self.client.create_container(self.SPLUNK_IMAGE_NAME, tty=True, command="create-defaults")
self.client.start(cid.get("Id"))
output = self.get_container_logs(cid.get("Id"))
self.client.remove_container(cid.get("Id"), v=True, force=True)
# Add a custom conf file
output = re.sub(r' group: splunk', r''' group: splunk
conf:
- key: user-prefs
value:
directory: /opt/splunk/etc/users/admin/user-prefs/local
content:
general:
default_namespace: appboilerplate
search_syntax_highlighting: dark
search_assistant:
"serverClass:secrets:app:test": {}''', output)
# Write the default.yml to a file
with open(os.path.join(self.SCENARIOS_DIR, "defaults", "{}.yml".format(self.project_name)), "w") as f:
f.write(output)
# Standup deployment
try:
self.compose_file_name = "1deployment1cm.yaml"
container_count, rc = self.compose_up(defaults_url="/tmp/defaults/{}.yml".format(self.project_name), apps_url="http://appserver/{}.tgz".format(self.project_name))
assert rc == 0
# Wait for containers to come up
assert self.wait_for_containers(container_count, label="com.docker.compose.project={}".format(self.project_name))
# Get container logs
container_mapping = {"cm1": "cm", "depserver1": "deployment_server"}
for container in container_mapping:
# Check ansible version & configs
ansible_logs = self.get_container_logs("{}_{}_1".format(self.project_name, container))
self.check_ansible(ansible_logs)
# Check values in log output
inventory_json = self.extract_json("{}_{}_1".format(self.project_name, container))
self.check_common_keys(inventory_json, container_mapping[container])
# Check Splunkd on all the containers
assert self.check_splunkd("admin", self.password)
# Make sure apps are installed and certain subdirectories are excluded
containers = self.client.containers(filters={"label": "com.docker.compose.project={}".format(self.project_name)})
assert len(containers) == 3
for container in containers:
# Skip the nginx container
if "nginx" in container["Image"]:
continue
container_name = container["Names"][0].strip("/").split("_")[1]
splunkd_port = self.client.port(container["Id"], 8089)[0]["HostPort"]
if container_name == "depserver1":
# Check the app and version
url = "https://localhost:{}/servicesNS/nobody/splunk_app_example/configs/conf-app/launcher?output_mode=json".format(splunkd_port)
resp = requests.get(url, auth=("admin", self.password), verify=False)
# Deployment server should *not* install the app
assert resp.status_code == 404
# Check that the app exists in etc/apps
exec_command = self.client.exec_create(container["Id"], "ls /opt/splunk/etc/apps/splunk_app_example/local/", user="splunk")
std_out = self.client.exec_start(exec_command)
assert "savedsearches.conf" in std_out
assert "app.conf" in std_out
exec_command = self.client.exec_create(container["Id"], "cat /opt/splunk/etc/apps/splunk_app_example/local/app.conf", user="splunk")
std_out = self.client.exec_start(exec_command)
assert "state = disabled" in std_out
# Check that the app exists in etc/deployment-apps
exec_command = self.client.exec_create(container["Id"], "ls /opt/splunk/etc/deployment-apps/splunk_app_example/local/", user="splunk")
std_out = self.client.exec_start(exec_command)
assert "savedsearches.conf" in std_out
assert "app.conf" in std_out
exec_command = self.client.exec_create(container["Id"], "cat /opt/splunk/etc/deployment-apps/splunk_app_example/local/app.conf", user="splunk")
std_out = self.client.exec_start(exec_command)
assert "# Autogenerated file " == std_out
if container_name == "cm1":
# Check if the created file exists
exec_command = self.client.exec_create(container["Id"], "cat /opt/splunk/etc/users/admin/user-prefs/local/user-prefs.conf", user="splunk")
std_out = self.client.exec_start(exec_command)
assert "[serverClass:secrets:app:test]" in std_out
assert "[general]" in std_out
assert "default_namespace = appboilerplate" in std_out
assert "search_syntax_highlighting = dark" in std_out
assert "search_assistant" in std_out
RETRIES = 5
for i in range(RETRIES):
try:
# Check the app and version
url = "https://localhost:{}/servicesNS/nobody/splunk_app_example/configs/conf-app/launcher?output_mode=json".format(splunkd_port)
kwargs = {"auth": ("admin", self.password), "verify": False}
status, content = self.handle_request_retry("GET", url, kwargs)
assert status == 200
assert json.loads(content)["entry"][0]["content"]["version"] == "0.0.1"
exec_command = self.client.exec_create(container["Id"], "ls /opt/splunk/etc/apps/splunk_app_example/local/", user="splunk")
std_out = self.client.exec_start(exec_command)
assert "savedsearches.conf" in std_out
assert "app.conf" in std_out
exec_command = self.client.exec_create(container["Id"], "cat /opt/splunk/etc/apps/splunk_app_example/local/app.conf", user="splunk")
std_out = self.client.exec_start(exec_command)
assert "# Autogenerated file" in std_out
assert "state = enabled" in std_out
except Exception as e:
self.logger.error(e)
if i < RETRIES-1:
time.sleep(30)
continue
raise e
except Exception as e:
self.logger.error(e)
raise e
finally:
try:
os.remove(os.path.join(self.SCENARIOS_DIR, "defaults", "{}.yml".format(self.project_name)))
os.remove(os.path.join(self.FIXTURES_DIR, "{}.tgz".format(self.project_name)))
except OSError as e:
pass
def test_compose_1deployment1so(self):
self.project_name = self.generate_random_string()
# Tar the app before spinning up the scenario
with tarfile.open(os.path.join(self.FIXTURES_DIR, "{}.tgz".format(self.project_name)), "w:gz") as tar:
tar.add(self.EXAMPLE_APP, arcname=os.path.basename(self.EXAMPLE_APP))
# Standup deployment
try:
self.compose_file_name = "1deployment1so.yaml"
container_count, rc = self.compose_up(apps_url="http://appserver/{}.tgz".format(self.project_name))
assert rc == 0
# Wait for containers to come up
assert self.wait_for_containers(container_count, label="com.docker.compose.project={}".format(self.project_name))
# Get container logs
container_mapping = {"{}_so1_1".format(self.project_name): "so", "{}_depserver1_1".format(self.project_name): "deployment_server"}
for container in container_mapping:
# Check ansible version & configs
ansible_logs = self.get_container_logs(container)
self.check_ansible(ansible_logs)
# Check values in log output
inventory_json = self.extract_json(container)
self.check_common_keys(inventory_json, container_mapping[container])
# Check Splunkd on all the containers
assert self.check_splunkd("admin", self.password)
# Make sure apps are installed and certain subdirectories are excluded
containers = self.client.containers(filters={"label": "com.docker.compose.project={}".format(self.project_name)})
assert len(containers) == 3
for container in containers:
# Skip the nginx container
if "nginx" in container["Image"]:
continue
container_name = container["Names"][0].strip("/").split('_')[1]
splunkd_port = self.client.port(container["Id"], 8089)[0]["HostPort"]
if container_name == "depserver1":
# Check the app and version
url = "https://localhost:{}/servicesNS/nobody/splunk_app_example/configs/conf-app/launcher?output_mode=json".format(splunkd_port)
resp = requests.get(url, auth=("admin", self.password), verify=False)
# Deployment server should *not* install the app
assert resp.status_code == 404
# Check that the app exists in etc/apps
exec_command = self.client.exec_create(container["Id"], "ls /opt/splunk/etc/apps/splunk_app_example/local/", user="splunk")
std_out = self.client.exec_start(exec_command)
assert "savedsearches.conf" in std_out
assert "app.conf" in std_out
exec_command = self.client.exec_create(container["Id"], "cat /opt/splunk/etc/apps/splunk_app_example/local/app.conf", user="splunk")
std_out = self.client.exec_start(exec_command)
assert "state = disabled" in std_out
# Check that the app exists in etc/deployment-apps
exec_command = self.client.exec_create(container["Id"], "ls /opt/splunk/etc/deployment-apps/splunk_app_example/local/", user="splunk")
std_out = self.client.exec_start(exec_command)
assert "savedsearches.conf" in std_out
assert "app.conf" in std_out
exec_command = self.client.exec_create(container["Id"], "cat /opt/splunk/etc/deployment-apps/splunk_app_example/local/app.conf", user="splunk")
std_out = self.client.exec_start(exec_command)
assert "# Autogenerated file " == std_out
if container_name == "so1":
RETRIES = 5
for i in range(RETRIES):
try:
# Check the app and version
url = "https://localhost:{}/servicesNS/nobody/splunk_app_example/configs/conf-app/launcher?output_mode=json".format(splunkd_port)
kwargs = {"auth": ("admin", self.password), "verify": False}
status, content = self.handle_request_retry("GET", url, kwargs)
assert status == 200
assert json.loads(content)["entry"][0]["content"]["version"] == "0.0.1"
exec_command = self.client.exec_create(container["Id"], "ls /opt/splunk/etc/apps/splunk_app_example/local/", user="splunk")
std_out = self.client.exec_start(exec_command)
assert "savedsearches.conf" in std_out
assert "app.conf" in std_out
exec_command = self.client.exec_create(container["Id"], "cat /opt/splunk/etc/apps/splunk_app_example/local/app.conf", user="splunk")
std_out = self.client.exec_start(exec_command)
assert "# Autogenerated file" in std_out
assert "state = enabled" in std_out
except Exception as e:
self.logger.error(e)
if i < RETRIES-1:
time.sleep(30)
continue
raise e
except Exception as e:
self.logger.error(e)
raise e
finally:
try:
os.remove(os.path.join(self.FIXTURES_DIR, "{}.tgz".format(self.project_name)))
except OSError:
pass
def test_compose_1deployment1uf(self):
self.project_name = self.generate_random_string()
# Tar the app before spinning up the scenario
with tarfile.open(os.path.join(self.FIXTURES_DIR, "{}.tgz".format(self.project_name)), "w:gz") as tar:
tar.add(self.EXAMPLE_APP, arcname=os.path.basename(self.EXAMPLE_APP))
# Standup deployment
try:
self.compose_file_name = "1deployment1uf.yaml"
container_count, rc = self.compose_up(apps_url="http://appserver/{}.tgz".format(self.project_name))
assert rc == 0
# Wait for containers to come up
assert self.wait_for_containers(container_count, label="com.docker.compose.project={}".format(self.project_name))
# Get container logs
container_mapping = {"{}_uf1_1".format(self.project_name): "uf", "{}_depserver1_1".format(self.project_name): "deployment_server"}
for container in container_mapping:
# Check ansible version & configs
ansible_logs = self.get_container_logs(container)
self.check_ansible(ansible_logs)
# Check values in log output
inventory_json = self.extract_json(container)
self.check_common_keys(inventory_json, container_mapping[container])
# Check Splunkd on all the containers
assert self.check_splunkd("admin", self.password)
# Make sure apps are installed, and shcluster is setup properly
containers = self.client.containers(filters={"label": "com.docker.compose.project={}".format(self.project_name)})
assert len(containers) == 3
for container in containers:
# Skip the nginx container
if "nginx" in container["Image"]:
continue
container_name = container["Names"][0].strip("/")
container_name = container_name.split('_')[1]
splunkd_port = self.client.port(container["Id"], 8089)[0]["HostPort"]
if container_name == "depserver1":
# Check the app and version
url = "https://localhost:{}/servicesNS/nobody/splunk_app_example/configs/conf-app/launcher?output_mode=json".format(splunkd_port)
resp = requests.get(url, auth=("admin", self.password), verify=False)
# Deployment server should *not* install the app
assert resp.status_code == 404
# Check that the app exists in etc/apps
exec_command = self.client.exec_create(container["Id"], "ls /opt/splunk/etc/apps/splunk_app_example/local/", user="splunk")
std_out = self.client.exec_start(exec_command)
assert "savedsearches.conf" in std_out
assert "app.conf" in std_out
exec_command = self.client.exec_create(container["Id"], "cat /opt/splunk/etc/apps/splunk_app_example/local/app.conf", user="splunk")
std_out = self.client.exec_start(exec_command)
assert "state = disabled" in std_out
# Check that the app exists in etc/deployment-apps
exec_command = self.client.exec_create(container["Id"], "ls /opt/splunk/etc/deployment-apps/splunk_app_example/local/", user="splunk")
std_out = self.client.exec_start(exec_command)
assert "savedsearches.conf" in std_out
assert "app.conf" in std_out
exec_command = self.client.exec_create(container["Id"], "cat /opt/splunk/etc/deployment-apps/splunk_app_example/local/app.conf", user="splunk")
std_out = self.client.exec_start(exec_command)
assert "# Autogenerated file " == std_out
if container_name == "uf1":
RETRIES = 5
for i in range(RETRIES):
try:
# Check the app and version
url = "https://localhost:{}/servicesNS/nobody/splunk_app_example/configs/conf-app/launcher?output_mode=json".format(splunkd_port)
kwargs = {"auth": ("admin", self.password), "verify": False}
status, content = self.handle_request_retry("GET", url, kwargs)
assert status == 200
assert json.loads(content)["entry"][0]["content"]["version"] == "0.0.1"
exec_command = self.client.exec_create(container["Id"], "ls /opt/splunkforwarder/etc/apps/splunk_app_example/local/", user="splunk")
std_out = self.client.exec_start(exec_command)
assert "savedsearches.conf" in std_out
assert "app.conf" in std_out
exec_command = self.client.exec_create(container["Id"], "cat /opt/splunkforwarder/etc/apps/splunk_app_example/local/app.conf", user="splunk")
std_out = self.client.exec_start(exec_command)
assert "# Autogenerated file" in std_out
assert "state = enabled" in std_out
except Exception as e:
self.logger.error(e)
if i < RETRIES-1:
time.sleep(30)
continue
raise e
except Exception as e:
self.logger.error(e)
raise e
finally:
try:
os.remove(os.path.join(self.FIXTURES_DIR, "{}.tgz".format(self.project_name)))
except OSError:
pass
def test_compose_3idx1cm_splunktcp_ssl(self):
self.project_name = self.generate_random_string()
# Generate default.yml
cid = self.client.create_container(self.SPLUNK_IMAGE_NAME, tty=True, command="create-defaults")
self.client.start(cid.get("Id"))
output = self.get_container_logs(cid.get("Id"))
self.client.remove_container(cid.get("Id"), v=True, force=True)
# Get the password
password = re.search(r"^ password: (.*?)\n", output, flags=re.MULTILINE|re.DOTALL).group(1).strip()
assert password and password != "null"
# Commands to generate self-signed certificates for Splunk here: https://docs.splunk.com/Documentation/Splunk/latest/Security/ConfigureSplunkforwardingtousesignedcertificates
passphrase = "<PASSWORD>"
cmds = [
"openssl genrsa -aes256 -passout pass:{pw} -out {path}/ca.key 2048".format(pw=passphrase, path=self.DEFAULTS_DIR),
"openssl req -new -key {path}/ca.key -passin pass:{pw} -out {path}/ca.csr -subj /CN=localhost".format(pw=passphrase, path=self.DEFAULTS_DIR),
"openssl x509 -req -in {path}/ca.csr -sha512 -passin pass:{pw} -signkey {path}/ca.key -CAcreateserial -out {path}/ca.pem -days 3".format(pw=passphrase, path=self.DEFAULTS_DIR),
"openssl genrsa -aes256 -passout pass:{pw} -out {path}/server.key 2048".format(pw=passphrase, path=self.DEFAULTS_DIR),
"openssl req -new -passin pass:{pw} -key {path}/server.key -out {path}/server.csr -subj /CN=localhost".format(pw=passphrase, path=self.DEFAULTS_DIR),
"openssl x509 -req -passin <PASSWORD>:{pw} -in {path}/server.csr -SHA256 -CA {path}/ca.pem -CAkey {path}/ca.key -CAcreateserial -out {path}/server.pem -days 3".format(pw=passphrase, path=self.DEFAULTS_DIR),
"cat {path}/server.pem {path}/server.key {path}/ca.pem > {path}/cert.pem".format(path=self.DEFAULTS_DIR)
]
for cmd in cmds:
execute_cmd = subprocess.check_output(["/bin/sh", "-c", cmd])
# Update s2s ssl settings
output = re.sub(r''' s2s:.*?ssl: false''', r''' s2s:
ca: /tmp/defaults/ca.pem
cert: /tmp/defaults/cert.pem
enable: true
password: {}
port: 9997
ssl: true'''.format(passphrase), output, flags=re.DOTALL)
# Write the default.yml to a file
with open(os.path.join(self.DEFAULTS_DIR, "{}.yml".format(self.project_name)), "w") as f:
f.write(output)
# Standup deployment
try:
self.compose_file_name = "3idx1cm.yaml"
container_count, rc = self.compose_up(defaults_url="/tmp/defaults/{}.yml".format(self.project_name))
assert rc == 0
# Wait for containers to come up
assert self.wait_for_containers(container_count, label="com.docker.compose.project={}".format(self.project_name), timeout=600)
# Get container logs
container_mapping = {"cm1": "cm", "idx1": "idx", "idx2": "idx", "idx3": "idx"}
for container in container_mapping:
# Check ansible version & configs
ansible_logs = self.get_container_logs("{}_{}_1".format(self.project_name, container))
self.check_ansible(ansible_logs)
# Check values in log output
inventory_json = self.extract_json("{}_{}_1".format(self.project_name, container))
self.check_common_keys(inventory_json, container_mapping[container])
try:
assert inventory_json["splunk_indexer"]["hosts"] == ["idx1", "idx2", "idx3"]
assert inventory_json["splunk_cluster_master"]["hosts"] == ["cm1"]
except KeyError as e:
self.logger.error(e)
raise e
# Check Splunkd on all the containers
assert self.check_splunkd("admin", self.password)
# Make sure apps are installed, and shcluster is setup properly
containers = self.client.containers(filters={"label": "com.docker.compose.project={}".format(self.project_name)})
assert len(containers) == 4
for container in containers:
container_name = container["Names"][0].strip("/").split("_")[1]
cid = container["Id"]
exec_command = self.client.exec_create(cid, "cat /opt/splunk/etc/system/local/inputs.conf", user="splunk")
std_out = self.client.exec_start(exec_command)
assert "[splunktcp-ssl:9997]" in std_out
assert "disabled = 0" in std_out
assert "[SSL]" in std_out
assert "serverCert = /tmp/defaults/cert.pem" in std_out
assert "[sslConfig]" not in std_out
assert "rootCA = /tmp/defaults/ca.pem" in std_out
if container_name == "cm1":
exec_command = self.client.exec_create(cid, "cat /opt/splunk/etc/system/local/outputs.conf", user="splunk")
std_out = self.client.exec_start(exec_command)
assert "clientCert = /tmp/defaults/cert.pem" in std_out
assert "sslPassword" in std_out
assert "useClientSSLCompression = true" in std_out
# Check that data is being forwarded properly
time.sleep(30)
search_providers, distinct_hosts = self.search_internal_distinct_hosts("{}_cm1_1".format(self.project_name), password=<PASSWORD>)
assert len(search_providers) == 4
assert "idx1" in search_providers
assert "idx2" in search_providers
assert "idx3" in search_providers
assert distinct_hosts == 4
except Exception as e:
self.logger.error(e)
raise e
finally:
files = [
os.path.join(self.DEFAULTS_DIR, "ca.key"),
os.path.join(self.DEFAULTS_DIR, "ca.csr"),
os.path.join(self.DEFAULTS_DIR, "ca.pem"),
os.path.join(self.DEFAULTS_DIR, "server.key"),
os.path.join(self.DEFAULTS_DIR, "server.csr"),
os.path.join(self.DEFAULTS_DIR, "server.pem"),
os.path.join(self.DEFAULTS_DIR, "cert.pem"),
os.path.join(self.DEFAULTS_DIR, "{}.yml".format(self.project_name))
]
self.cleanup_files(files)
|
Chapter05/5_8_verify_email.py
|
shamir456/Python-Network-Programming-Cookbook-Second-Edition
| 125 |
136997
|
<reponame>shamir456/Python-Network-Programming-Cookbook-Second-Edition<filename>Chapter05/5_8_verify_email.py
#!/usr/bin/env python
# Python Network Programming Cookbook, Second Edition -- Chapter - 5
# This program is optimized for Python 2.7.12 and Python 3.5.2.
# It may run on any other version with/without modifications.
import re
import smtplib
import dns.resolver
import argparse
def mail_checker(fromAddress, toAddress):
regex = '^[a-z0-9][a-z0-9._%+-]{0,63}@[a-z0-9-]+(\.[a-z0-9-]+)*(\.[a-z]{2,})$'
addressToVerify = str(toAddress)
match = re.match(regex, addressToVerify)
if match == None:
print('Bad Syntax in the address to verify. Re-enter the correct value')
raise ValueError('Bad Syntax')
splitAddress = addressToVerify.split('@')
domain = str(splitAddress[1])
records = dns.resolver.query(domain, 'MX')
mxRecord = records[0].exchange
mxRecord = str(mxRecord)
server = smtplib.SMTP()
server.set_debuglevel(1)
try:
server.connect(mxRecord)
except Exception as e:
print ("Mail Check Failed Due to Error: %s" %str(e))
return
server.helo(server.local_hostname)
server.mail(fromAddress)
code, message = server.rcpt(str(addressToVerify))
server.quit()
if code == 250:
print('Successfully verified the email: %s', fromAddress)
else:
print('Failed to verify the email: %s', fromAddress)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Mail Server Example')
parser.add_argument('--fromAddress', action="store", dest="fromAddress", type=str, required=True)
parser.add_argument('--toAddress', action="store", dest="toAddress", type=str, required=True)
given_args = parser.parse_args()
mail_checker(given_args.fromAddress, given_args.toAddress)
|
Tests/testing/core/helpers.py
|
wsqjny/Falcor
| 1,615 |
136999
|
'''
Module with various helpers functions.
'''
import os
import re
import subprocess
import time
import socket
from urllib.parse import urlparse
class GitError(Exception):
pass
def get_git_head_branch(path):
'''
Return the git HEAD branch name by reading from .git/HEAD file.
'''
try:
head = open(os.path.join(path, '.git/HEAD')).read()
# HEAD either contains a reference to refs/heads or a sha1
return re.search(r'(ref: refs\/heads\/)?(.*)$', head).group(2)
except (IOError, OSError, AttributeError) as e:
raise GitError(e)
def get_git_remote_origin(path, remote='origin'):
'''
Return the git remote origin by reading from .git/config file.
'''
try:
config = open(os.path.join(path, '.git/config')).read()
return re.search(r'^\[remote \"%s\"\].*\n.*url = (.*)$' % (remote), config, flags=re.MULTILINE).group(1)
except (IOError, OSError, AttributeError) as e:
raise GitError(e)
def get_hostname():
'''
Return the hostname.
'''
return socket.gethostname()
def get_vcs_root(path):
'''
Return the git version control system root (gitlab-master or github).
'''
url = get_git_remote_origin(path)
url = urlparse(url)
url = url.netloc.split('.')
for u in url:
if u.startswith("git@"): u = u.replace("git@", "")
if u == "gitlab-master" or u == "github": return u
print("Error. Unknown VCS root `" + url[0] + "`")
return url[0].lower()
def mirror_folders(src_dir, dst_dir):
'''
Mirror contents from src_dir to dst_dir.
'''
if os.name != 'nt':
raise RuntimeError('mirror_folders() is not implemented for this OS')
args = ["Robocopy.exe", str(src_dir), str(dst_dir), "/MIR", "/FFT", "/Z", "/XA:H", "/W:5", "/np"]
process = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output = process.communicate()[0]
return process.returncode <= 7, output.decode('ascii')
|
senseact_mod/senseact/devices/dxl/dxl_driver_v1.py
|
homayoonfarrahi/cycle-time-study
| 188 |
137053
|
<filename>senseact_mod/senseact/devices/dxl/dxl_driver_v1.py
# Copyright (c) 2018, The SenseAct Authors.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
"""
Requires DynamixelSDK repo written by Robotis- https://github.com/ROBOTIS-GIT/DynamixelSDK.
In particular, we use the commit version - "d6420db120daf9d777a2cb83e9f2ba27687504e7"
This script is modeled after the dxl_commv1 script for ease of substitution and usage. It's not elegant or optimal.
But it's needed for backward compatibility. It doesn't contain every single function available within the dxl_commv1.
But it does contain the key function calls that are required within dxl_communicator.py.
"""
import sys
import glob
import ctypes
import senseact.lib.DynamixelSDK.python.dynamixel_functions_py.dynamixel_functions as dynamixel
from sys import platform
PROTOCOL_VERSION = 1
COMM_SUCCESS = 0
is_ctypes_driver = True
def make_connection(baudrate, timeout, port_str='None'):
""" Establishes serial connection with a dxl device.
If port_str is 'None', the function searches for a serial port address and
connects to the first device found.
Args:
baudrate: an integer representing a baudrate to connect at
timeout: a float representing connection timeout parameter in seconds
port_str: A string containing the serial port address (e.g., /dev/ttyACM0 or /dev/ttyUSB0 on linux)
Returns:
An instance of dynamixel.portHandler defined in C
"""
if (port_str is None) or (port_str == 'None'):
check_ports = ["/dev/ttyUSB*", "/dev/ttyACM*"]
if platform == 'darwin':
check_ports = ["/dev/tty.usb*"] + check_ports
elif type(port_str) == str:
check_ports = [port_str]
else:
raise TypeError("port_str should be string")
found_port = None
for potential_port in check_ports:
found_ports = glob.glob(potential_port)
if len(found_ports) > 0:
found_port = found_ports[0]
break
if found_port is None:
raise IOError("Could not find specified port: {}".format(port_str))
print("Attempting to open device {}".format(found_port))
# Initialize PortHandler Structs
# Set the port path
# Get methods and members of PortHandlerLinux
port_num = dynamixel.portHandler(found_port.encode('utf-8'))
# Initialize PacketHandler Structs
dynamixel.packetHandler()
# Open port
if dynamixel.openPort(port_num):
print("Succeeded to open the port!")
else:
raise IOError("Failed to open the port!")
# Set port baudrate
if dynamixel.setBaudRate(port_num, baudrate):
print("Baudrate set to: {}".format(baudrate))
else:
raise IOError("Failed to change the baudrate!")
# Set port timeout
timeout = int(timeout * 1000) # Convert to milli-seconds
if dynamixel.setPacketTimeoutMSec(port_num, timeout):
print("Timeout set to: {}!".format(timeout))
else:
raise IOError("Failed to change the timeout!")
return port_num
def read_a_block(port, idn, read_block, read_wait_time):
""" Reads a block of sensor values from dxl device.
Args:
port: Dynamixel portHandler object
idn: An integer representing the DXL ID number
read_block: An instance of Contiguous Registers (defined in dxl_reg) containing the block of registers to read
read_wait_time: A float representing time (in seconds) to wait before reading the buffer
Returns:
A dictionary containing register names and their corresponding values
"""
vals = read_a_block_vals(port, idn, read_block, read_wait_time)
return {reg.name: val for reg, val in zip(read_block, vals)}
def read_a_block_vals(port, idn, read_block, read_wait_time=0.00001):
""" Reads a block of sensor values from dxl device.
Args:
port: Dynamixel portHandler object returned by make_connection
idn: An integer representing the DXL ID number
read_block: An instance of Contiguous Registers (defined in dxl_reg) containing the block of registers to read
read_wait_time: A float representing time (in seconds) to wait before reading the buffer
Returns:
A list containing sensor values of each register in the read_block
"""
dynamixel.readTxRx(port, PROTOCOL_VERSION, idn, read_block.offset, read_block.width)
dxl_comm_result = dynamixel.getLastTxRxResult(port, PROTOCOL_VERSION)
dxl_error = dynamixel.getLastRxPacketError(port, PROTOCOL_VERSION)
if dxl_comm_result != COMM_SUCCESS:
print(dynamixel.getTxRxResult(PROTOCOL_VERSION, dxl_comm_result))
elif dxl_error != 0:
print(dynamixel.getRxPacketError(PROTOCOL_VERSION, dxl_error))
data_pos = 0
vals = []
for reg in read_block._regs:
data = dynamixel.getDataRead(port, PROTOCOL_VERSION, reg.width, data_pos)
data_pos += reg.width
vals.append(data)
return read_block.vals_from_data(vals)
def read2bytes(port, idn, address):
""" Read 2 bytes from the control table of a DXL with id = idn, starting at the specified address
Args:
port: Dynamixel portHandler object returned by make_connection
idn: An integer representing the DXL ID number
address: An integer representing the register address in the DXL control table
Returns:
An int or float value read from the specified address of a DXL with id = idn
"""
return dynamixel.read2ByteTxRx(port, PROTOCOL_VERSION, idn, address)
def read1byte(port, idn, address):
""" Read 1 byte from the control table of a DXL with id = idn, starting at the specified address
Args:
port: Dynamixel portHandler object returned by make_connection
idn: An integer representing the DXL ID number
address: An integer representing the register address in the DXL control table
Returns:
An int or float value read from the specified address of a DXL with id = idn
"""
return dynamixel.read1ByteTxRx(port, PROTOCOL_VERSION, idn, address)
def packet_write_buffer(idn, block, buf):
""" Returns a write packet from a payload buffer.
NOTE: The namesake function in dxl_commv1 serves a specific purpose. However, this is just a filler here.
We use this function to fit with the old code. Helps with backward compatibility.
Args:
idn: An integer representing the DXL ID number
block: A block of contiguous registers
buf: A list containing values to be written to the control table
Returns:
A tuple - (address of register, list of values to be written, width of the register in bytes)
"""
reg0 = block.offset
width = block.width
return (reg0, buf, width)
def packet_read(idn, reg0, width):
""" Create an instruction packet to read data from the DXL control table.
NOTE: The namesake function in dxl_commv1 serves a specfic purpose. However, this is just a filler here.
We use this function to fit with the old code. Helps with backward compatibility.
Args:
idn: An integer representing the DXL ID number
reg0: An integer representing the register index in the control table
num_regs: An integer representing the number of registers to read from the control table starting at reg0
Returns:
A tuple - (ID of DXL device, address of register, width of the register in bytes)
"""
return (idn, reg0, width)
def write1byte(port, idn, address, data):
""" Write 1 byte to the DXL control table
Args:
port: Dynamixel portHandler object returned by make_connection
idn: An integer representing the DXL ID number
address: An integer representing the register address in the DXL control table
data: An integer. Data to be written to the register
"""
dynamixel.write1ByteTxRx(port, PROTOCOL_VERSION, idn, address, data)
def write2bytes(port, idn, address, data):
""" Write 2 bytes to the DXL control table
Args:
port: Dynamixel portHandler object returned by make_connection
idn: An integer representing the DXL ID number
address: An integer representing the register address in the DXL control table
data: An integer. Data to be written to the register
"""
dynamixel.write2ByteTxRx(port, PROTOCOL_VERSION, idn, address, data)
def loop_until_written(port, dxl_id, packet, read_wait_time=0.00001):
""" Loop until instruction packet is written in the DXL control table
Args:
port: Dynamixel portHandler object returned by make_connection
idn: An integer representing the DXL ID number
packet: A tuple - (address of register, list of values to be written, width of the register in bytes)
read_wait_time: A float representing time (in seconds) to wait before reading the buffer
"""
reg0, buf, width = packet
if width == 1:
write1byte(port, dxl_id, reg0, buf[0])
elif width == 2:
write2bytes(port, dxl_id, reg0, buf[0])
dxl_comm_result = dynamixel.getLastTxRxResult(port, PROTOCOL_VERSION)
dxl_error = dynamixel.getLastRxPacketError(port, PROTOCOL_VERSION)
if dxl_comm_result != COMM_SUCCESS:
print(dynamixel.getTxRxResult(PROTOCOL_VERSION, dxl_comm_result))
elif dxl_error != 0:
print(dynamixel.getRxPacketError(PROTOCOL_VERSION, dxl_error))
def sync_write(port, block, data):
""" Write to multiple DXLs in synchronized fashion
This instruction is used to control multiple Dynamixels simultaneously with a single Instruction Packet
transmission. When this instruction is used, several instructions can be transmitted at once, so that the
communication time is reduced when multiple Dynamixels are connected in a single channel. However, the SYNC WRITE
instruction can only be used to a single address with an identical length of data over connected Dynamixels.
ID should be transmitted as Broadcasting ID.
Args:
port: Dynamixel portHandler object returned by make_connection
block: An instance of Contiguous Registers (defined in dxl_reg) containing the register to write to
data: A zip of 2 lists - dxl_ids and values.
"""
address = block.offset
length = block.width
group_num = dynamixel.groupSyncWrite(port, PROTOCOL_VERSION, address, length)
for ind, (dxl_id, value) in enumerate(data):
dxl_addparam_result = ctypes.c_ubyte(dynamixel.groupSyncWriteAddParam(group_num, dxl_id, value, length)).value
if dxl_addparam_result != 1:
print(dxl_addparam_result)
print("[ID:%03d] groupSyncWrite addparam failed" % (dxl_id))
# Syncwrite goal position
dynamixel.groupSyncWriteTxPacket(group_num)
dxl_comm_result = dynamixel.getLastTxRxResult(port, PROTOCOL_VERSION)
if dxl_comm_result != COMM_SUCCESS:
print(dynamixel.getTxRxResult(PROTOCOL_VERSION, dxl_comm_result))
# Clear syncwrite parameter storage
dynamixel.groupSyncWriteClearParam(group_num)
def init_bulk_read(port):
""" Initialize groupBulkRead Structs
Args:
port: Dynamixel portHandler object returned by make_connection
Returns:
An instance of dynamixel.groupBulkRead defined in C
"""
group_num = dynamixel.groupBulkRead(port, PROTOCOL_VERSION)
return group_num
def bulk_read(port, blocks, dxl_ids, group_num=None):
""" Read from multiple DXL MX-64s sending one bulk read packet
This instruction is used for reading values of multiple MX series DXLs simultaneously by sending a single
Instruction Packet. The packet length is shortened compared to sending multiple READ commands, and the idle time
between the status packets being returned is also shortened to save communication time. However, this cannot be
used to read a single module. If an identical ID is designated multiple times, only the first designated
parameter will be processed.
Args:
port: Dynamixel portHandler object returned by make_connection
blocks: A list containing blocks of contiguous registers
dxl_ids: A list containing DXL id numbers
group_num: An instance of dynamixel.groupBulkRead defined in C
Returns:
A dictionary containing the motor id, the register names and their corresponding values.
For e.g., if present position and goal position are read from 2 motors, the output would be:
{'1': {'present_pos': 2.34, 'goal_pos': 3.21}, '2': {'present_pos': 1.23, 'goal_pos': 2.55}}
"""
# Initialize Group bulk read instance
if group_num == None:
group_num = init_bulk_read(port)
if not isinstance(blocks, list):
blocks = blocks[blocks]
if not isinstance(dxl_ids, list):
dxl_ids = [dxl_ids]
assert len(blocks) == len(dxl_ids)
# Add parameter storage for Dynamixel#1 present position value
for i, (id, block) in enumerate(zip(dxl_ids, blocks)):
address = block.offset
length = block.width
dxl_addparam_result = ctypes.c_ubyte(
dynamixel.groupBulkReadAddParam(group_num, id, address, length)).value
if dxl_addparam_result != 1:
print("[ID:%03d] groupBulkRead addparam failed" % (id))
# Bulkread specified address
dynamixel.groupBulkReadTxRxPacket(group_num)
dxl_comm_result = dynamixel.getLastTxRxResult(port, PROTOCOL_VERSION)
if dxl_comm_result != COMM_SUCCESS:
print(dynamixel.getTxRxResult(PROTOCOL_VERSION, dxl_comm_result))
# Read the values and convert them
vals_dict = {}
for i, (id, block) in enumerate(zip(dxl_ids, blocks)):
address = block.offset
length = block.width
# Check if groupbulkread data of Dynamixel#1 is available
dxl_getdata_result = ctypes.c_ubyte(
dynamixel.groupBulkReadIsAvailable(group_num, id, address, length)).value
if dxl_getdata_result != 1:
print("[ID:%03d] groupBulkRead getdata failed" % (id))
raw_val = dynamixel.groupBulkReadGetData(group_num, id, address, length)
data = block.vals_from_data([raw_val])
vals_dict[i] = data
return vals_dict
def clear_port(port):
""" Clears device port. """
dynamixel.clearPort(port)
def close(port):
""" Closes device port. """
dynamixel.closePort(port)
|
cv2/display-button/main-for-old_cv2.py
|
whitmans-max/python-examples
| 140 |
137069
|
#!/usr/bin/env python
from __future__ import print_function
import cv2
class Button(object):
def __init__(self, text, x, y, width, height, command=None):
self.text = text
self.x = x
self.y = y
self.width = width
self.height = height
self.left = x
self.top = y
self.right = x + width - 1
self.bottom = y + height - 1
self.hover = False
self.clicked = False
self.command = command
def handle_event(self, event, x, y, flags, param):
self.hover = (self.left <= x <= self.right and \
self.top <= y <= self.bottom)
if self.hover and flags == 1:
self.clicked = False
print(event, x, y, flags, param)
if self.command:
self.command()
def draw(self, frame):
if not self.hover:
cv2.putText(frame, "???", (40,40), FONT, 3 , (0,0,255), 2)
cv2.circle(frame, (20,20), 10 , (0,0,255), -1)
else:
cv2.putText(frame, "REC", (40,40), FONT, 3 , (0,255,0), 2)
cv2.circle(frame, (20,20), 10 , (0,255,0), -1)
# ---------------------------------------------------------------------
# keys
KEY_ESC = 27
# font
FONT = cv2.FONT_HERSHEY_PLAIN
# ---------------------------------------------------------------------
# states
running = True
# ---------------------------------------------------------------------
# create button instance
button = Button('QUIT', 0, 0, 100, 30)
# ---------------------------------------------------------------------
# create VideoCapture
vcap = cv2.VideoCapture(0) # 0=camera
# check if video capturing has been initialized already
if not vcap.isOpened():
print("ERROR INITIALIZING VIDEO CAPTURE")
exit()
else:
print("OK INITIALIZING VIDEO CAPTURE")
# get vcap property
width = int(vcap.get(cv2.cv.CV_CAP_PROP_FRAME_WIDTH))
height = int(vcap.get(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT))
#fps = float(vcap.get(cv2.cv.CV_CAP_PROP_FPS))
fps = 15.0 # use different value to get slowmotion or fastmotion effect
print('VCAP width :', width)
print('VCAP height:', height)
print('VCAP fps :', fps)
while running:
# grab, decode and return the next video frame (and "return" status)
ret, frame = vcap.read()
if not ret:
running = False
else:
# add REC to frame
#cv2.putText(frame, "REC", (40,40), FONT, 3 , (0,0,255), 2)
#cv2.circle(frame, (20,20), 10 , (0,0,255), -1)
# add instruction to frame
cv2.putText(frame,"ESC - QUIT",(width - 200,20), FONT, 1 ,(255,255,255))
# add button to frame
button.draw(frame)
# displays frame
cv2.imshow('x', frame)
# assign mouse click to method in button instance
cv2.setMouseCallback("x", button.handle_event)
# get key (get only lower 8-bits to work with chars)
key = cv2.waitKey(1) & 0xFF
if key == KEY_ESC:
print("EXIT")
running = False
# release everything
vcap.release()
cv2.destroyAllWindows()
|
challenge_1/python/carter010/reverse.py
|
rchicoli/2017-challenges
| 271 |
137073
|
<gh_stars>100-1000
# python3 --3.5
message = "Hello"
print(message[::-1]
|
tests/test_envvars.py
|
vikas0212git/iotedgedev
| 111 |
137084
|
import os
from unittest import mock
import pytest
from iotedgedev.envvars import EnvVars
from iotedgedev.output import Output
pytestmark = pytest.mark.unit
def test_get_envvar__valid():
envvars = EnvVars(Output())
deployment_template = envvars.get_envvar("DEPLOYMENT_CONFIG_TEMPLATE_FILE")
assert deployment_template is not None
def test_get_envvar__invalid():
envvars = EnvVars(Output())
testerval = envvars.get_envvar("TESTER")
assert not testerval
def test_load_valid():
envvars = EnvVars(Output())
envvars.load()
assert envvars.DEPLOYMENT_CONFIG_TEMPLATE_FILE == "deployment.template.json"
def test_verify_envvar_has_val__valid():
envvars = EnvVars(Output())
envvars.load()
result = envvars.verify_envvar_has_val("DEPLOYMENT_CONFIG_TEMPLATE_FILE", envvars.DEPLOYMENT_CONFIG_TEMPLATE_FILE)
assert not result
def test_get_envvar_key_if_val__valid():
envvars = EnvVars(Output())
assert envvars.get_envvar_key_if_val("DEPLOYMENT_CONFIG_TEMPLATE_FILE")
def test_get_envvar_key_if_val__invalid():
envvars = EnvVars(Output())
assert not envvars.get_envvar_key_if_val("TESTER")
def test_set_envvar():
envvars = EnvVars(Output())
registry_server = envvars.get_envvar("DEPLOYMENT_CONFIG_TEMPLATE_FILE")
envvars.set_envvar("DEPLOYMENT_CONFIG_TEMPLATE_FILE", "deployment.template_new.json")
new_registry_server = envvars.get_envvar("DEPLOYMENT_CONFIG_TEMPLATE_FILE")
assert new_registry_server == "deployment.template_new.json"
envvars.set_envvar("DEPLOYMENT_CONFIG_TEMPLATE_FILE", registry_server)
def test_envvar_clean():
EnvVars(Output())
envvar_clean_name = u"IOTEDGEDEV_ENVVAR_CLEAN_TEST"
os.environ[envvar_clean_name] = u"test unicode string"
@pytest.mark.parametrize(
"command, command_list",
[
("solution new test_solution", ["init", "e2e", "solution new", "new", "simulator stop"]),
("solution new", ["init", "e2e", "solution new", "new", "simulator stop"]),
("", ["init", "e2e", "", "new", "simulator stop"]),
]
)
def test_in_command_list_true(command, command_list):
envvars = EnvVars(Output())
assert envvars.in_command_list(command, command_list)
@pytest.mark.parametrize(
"command, command_list",
[
("solution add filtermodule", ["init", "e2e", "solution new", "new", "simulator stop"]),
("solution addotherstuff filtermodule", ["init", "e2e", "solution add", "new", "simulator stop"]),
("", ["init", "e2e", "solution new", "new", "simulator stop"]),
("solution new test_solution", ["init", "e2e", "", "new", "simulator stop"])
]
)
def test_in_command_list_false(command, command_list):
envvars = EnvVars(Output())
assert not envvars.in_command_list(command, command_list)
@pytest.mark.parametrize(
"command",
[
"iothub setup --update-dotenv",
""
]
)
def test_is_terse_command_true(command):
envvars = EnvVars(Output())
assert envvars.is_terse_command(command)
def test_is_terse_command_false():
envvars = EnvVars(Output())
assert not envvars.is_terse_command("solution add")
def test_default_container_registry_server_key_exists():
envvars = EnvVars(Output())
envvars.load()
assert "CONTAINER_REGISTRY_SERVER" in os.environ
@pytest.mark.parametrize(
"envvar",
[
"CONTAINER_REGISTRY_SERVER",
"CONTAINER_REGISTRY_USERNAME",
"CONTAINER_REGISTRY_PASSWORD"
]
)
def test_default_envvar_value_exists(envvar):
envvars = EnvVars(Output())
server = envvars.get_envvar(envvar)
assert server is not None
def test_container_registry_server_key_missing_sys_exit():
envvars = EnvVars(Output())
with pytest.raises(ValueError):
envvars.get_envvar("CONTAINER_REGISTRY_SERVER_UNITTEST", required=True)
@pytest.mark.parametrize(
"envvar",
[
"CONTAINER_REGISTRY_SERVER",
"CONTAINER_REGISTRY_USERNAME",
"CONTAINER_REGISTRY_PASSWORD"
]
)
def test_unique_envvar_tokens(envvar):
unique = set()
envvar_lenght = len(envvar)
is_unique = True
envvars = EnvVars(Output())
envvars.load()
for key in os.environ:
if key.startswith(envvar):
token = key[envvar_lenght:]
if token not in unique:
unique.add(token)
else:
is_unique = False
assert is_unique
@mock.patch.dict(os.environ, {
"CONTAINER_REGISTRY_SERVER_UNITTEST": "unittest.azurecr.io",
"CONTAINER_REGISTRY_USERNAME_UNITTEST": "username",
"CONTAINER_REGISTRY_PASSWORD_UNITTEST": "password"
})
def test_additional_container_registry_map_is_set_from_environ():
envvars = EnvVars(Output())
envvars.load()
assert len(envvars.CONTAINER_REGISTRY_MAP) == 2
assert 'UNITTEST' in envvars.CONTAINER_REGISTRY_MAP.keys()
assert envvars.CONTAINER_REGISTRY_MAP['UNITTEST'].server == 'unittest.azurecr.io'
assert envvars.CONTAINER_REGISTRY_MAP['UNITTEST'].username == 'username'
assert envvars.CONTAINER_REGISTRY_MAP['UNITTEST'].password == 'password'
|
Hackerrank/Apple and Orange/Apple and Orange.py
|
arushmangal/Hack-CP-DSA
| 205 |
137085
|
<filename>Hackerrank/Apple and Orange/Apple and Orange.py
#!/bin/python3
import math
import os
import random
import re
import sys
# The function accepts following parameters:
# 1. INTEGER s = start point
# 2. INTEGER t = End point
# 3. INTEGER a = location of apple tree
# 4. INTEGER b = location of orange tree
# 5. INTEGER_ARRAY apples
# 6. INTEGER_ARRAY oranges
def bwt(s,t,a,arr):
array = []
btw = 0
for i in arr:
array.append(a+i) # appending the relative locations to array with respective global origin
for i in array:
if i >= s and i <= t :
btw += 1 # checks wheather the item is there or not
return btw
def countApplesAndOranges(s, t, a, b, apples, oranges):
print(bwt(s,t,a,apples)) # return No.of apples in between trees
print(bwt(s,t,b,oranges)) # return No.of oranges in between trees
if __name__ == '__main__':
first_multiple_input = input().rstrip().split()
s = int(first_multiple_input[0])
t = int(first_multiple_input[1])
second_multiple_input = input().rstrip().split()
a = int(second_multiple_input[0])
b = int(second_multiple_input[1])
third_multiple_input = input().rstrip().split()
m = int(third_multiple_input[0])
n = int(third_multiple_input[1])
apples = list(map(int, input().rstrip().split()))
oranges = list(map(int, input().rstrip().split()))
countApplesAndOranges(s, t, a, b, apples, oranges)
|
apps/profile/management/commands/reimport_stripe_history.py
|
Paul3MK/NewsBlur
| 3,073 |
137146
|
<filename>apps/profile/management/commands/reimport_stripe_history.py
import stripe, datetime, time
from django.conf import settings
from django.core.management.base import BaseCommand
from utils import log as logging
from apps.profile.models import Profile
class Command(BaseCommand):
def add_arguments(self, parser)
parser.add_argument("-d", "--days", dest="days", nargs=1, type='int', default=365, help="Number of days to go back")
parser.add_argument("-l", "--limit", dest="limit", nargs=1, type='int', default=100, help="Charges per batch")
parser.add_argument("-s", "--start", dest="start", nargs=1, type='string', default=None, help="Offset customer_id (starting_after)")
def handle(self, *args, **options):
limit = options.get('limit')
days = int(options.get('days'))
starting_after = options.get('start')
Profile.reimport_stripe_history(limit, days, starting_after)
|
ambari-common/src/main/python/ambari_commons/unicode_tolerant_fs.py
|
likenamehaojie/Apache-Ambari-ZH
| 1,664 |
137157
|
<reponame>likenamehaojie/Apache-Ambari-ZH<gh_stars>1000+
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ambari Agent
"""
def get_encoded_string(data):
try:
return data.encode("utf8")
except UnicodeDecodeError:
return data
def unicode_walk(top, topdown=True, onerror=None, followlinks=False):
"""
Unicode tolerant version of os.walk. Can(and must) be used environments with messed locales(and other encoding-related
problems) to traverse directories trees with unicode names in files and directories. All others function seems like
to accept utf-8 encoded strings, so result of `unicode_walk` can be used without a problems.
"""
import os.path
import os
islink, join, isdir = os.path.islink, os.path.join, os.path.isdir
top = get_encoded_string(top)
try:
# Note that listdir and error are globals in this module due
# to earlier import-*.
names = os.listdir(top)
except os.error, err:
if onerror is not None:
onerror(err)
return
dirs, nondirs = [], []
for name in names:
name = get_encoded_string(name)
if isdir(join(top, name)):
dirs.append(name)
else:
nondirs.append(name)
if topdown:
yield top, dirs, nondirs
for name in dirs:
name = get_encoded_string(name)
new_path = join(top, name)
if followlinks or not islink(new_path):
for x in unicode_walk(new_path, topdown, onerror, followlinks):
yield x
if not topdown:
yield top, dirs, nondirs
|
haven/haven_results/images_fig.py
|
mariatippler/haven-ai
| 145 |
137191
|
<filename>haven/haven_results/images_fig.py
import copy
import glob
import os
import sys
import pprint
from itertools import groupby
from textwrap import wrap
import numpy as np
import pandas as pd
import pylab as plt
import tqdm
from . import plots_line
from .. import haven_utils as hu
try:
from IPython.display import Image
from IPython.display import display
except Exception:
pass
def get_images(
exp_list, savedir_base, n_exps=20, n_images=1, figsize=(12, 12), legend_list=None, dirname="images", verbose=True
):
"""[summary]
Parameters
----------
exp_list : list
A list of experiments, each defines a single set of hyper-parameters
savedir_base : str
A directory where experiments are saved
n_exps : int, optional
[description], by default 3
n_images : int, optional
[description], by default 1
height : int, optional
[description], by default 12
width : int, optional
[description], by default 12
legend_list : [type], optional
[description], by default None
dirname : str, optional
[description], by default 'images'
Returns
-------
fig_list : list
a list of pylab figures
Example
-------
>>> from haven import haven_results as hr
>>> savedir_base='../results/isps/'
>>> exp_list = hr.get_exp_list(savedir_base=savedir_base,
>>> filterby_list=[{'sampler':{'train':'basic'}}])
>>> hr.get_images(exp_list, savedir_base=savedir_base)
"""
fig_list = []
exp_count = 0
for k, exp_dict in enumerate(exp_list):
if exp_count >= n_exps:
if verbose:
print("displayed %d/%d experiment images" % (k, n_exps))
break
result_dict = {}
exp_id = hu.hash_dict(exp_dict)
result_dict["exp_id"] = exp_id
if verbose:
print("Displaying Images for Exp:", exp_id)
savedir = os.path.join(savedir_base, exp_id)
base_dir = os.path.join(savedir, dirname)
img_list = glob.glob(os.path.join(base_dir, "*.jpg"))
img_list += glob.glob(os.path.join(base_dir, "*.png"))
img_list += glob.glob(os.path.join(base_dir, "*.gif"))
img_list.sort(key=os.path.getmtime)
img_list = img_list[::-1]
img_list = img_list[:n_images]
if len(img_list) == 0:
if verbose:
print("no images in %s" % base_dir)
continue
ncols = len(img_list)
# ncols = len(exp_configs)
# from IPython.display import display
# display('%s' % ("="*50))
result_dict = {"exp_id": exp_id}
result_dict.update(copy.deepcopy(exp_dict))
score_list_path = os.path.join(savedir, "score_list.pkl")
if os.path.exists(score_list_path):
score_list = hu.load_pkl(score_list_path)
result_dict.update(score_list[-1])
# display(pd.Series(result_dict))
if legend_list is not None:
label = plots_line.get_label(legend_list, exp_dict, show_key=True)
else:
label = exp_id
if "epoch" in result_dict:
label += "_epoch:%d" % result_dict["epoch"]
# if legend_list is None:
# label = hu.hash_dict(exp_dict)
# else:
# label = '-'.join(['%s:%s' % (k, str(result_dict.get(k))) for
# k in legend_list])
for i in range(ncols):
img_fname = os.path.split(img_list[i])[-1]
title = f"{exp_id} - {img_fname}\n{label}"
fig = plt.figure(figsize=figsize)
if ".gif" in img_list[i]:
display(exp_id)
display(exp_dict)
display(title)
display(Image(img_list[i]))
else:
img = plt.imread(img_list[i])
plt.imshow(img)
# display(exp_id)
# display(exp_dict)
plt.title(title, fontsize=20)
plt.axis("off")
plt.tight_layout()
fig_list += [fig]
exp_count += 1
return fig_list
|
helpers/labml_helpers/datasets/csv.py
|
elgalu/labml
| 463 |
137210
|
import pandas as pd
import torch
from typing import Callable, List
from torch.utils.data import TensorDataset
class CsvDataset(TensorDataset):
data: pd
y_cols: List
x_cols: List
transform: Callable
test_fraction: float = 0.0
train: bool
def __init__(self, file_path: str, y_cols: List, x_cols: List, train: bool = True,
transform: Callable = lambda: None, test_fraction: float = 0.0, nrows: int = None):
self.__dict__.update(**vars())
self.data = pd.read_csv(**{'filepath_or_buffer': file_path, 'nrows': nrows})
data_length = len(self.data)
self.test_size = int(data_length * self.test_fraction)
self.train_size = data_length - self.test_size
self.train_data = self.data.iloc[0:self.train_size]
self.test_data = self.data.iloc[self.train_size:]
if train:
x, y = torch.tensor(self.train_data[self.x_cols].values), torch.tensor(self.train_data[self.y_cols].values)
else:
x, y = torch.tensor(self.test_data[self.x_cols].values), torch.tensor(self.test_data[self.y_cols].values)
super(CsvDataset, self).__init__(x, y)
|
spytest/apis/routing/evpn.py
|
shubav/sonic-mgmt
| 132 |
137212
|
<reponame>shubav/sonic-mgmt
import re
from spytest.utils import filter_and_select
from spytest import st, utils
import apis.system.port as port1
from apis.system.rest import get_rest,delete_rest,config_rest
from utilities.utils import get_interface_number_from_name
def config_bgp_evpn(dut, **kwargs):
"""
Author: <NAME> (<EMAIL>)
config_bgp_evpn(dut=data.dut1,neighbor ='172.16.31.10',remote_as='20',config='yes',config_type_list =["activate"])
config_bgp_evpn(dut=dut1,config = 'yes',config_type_list=["advertise_all_vni"],local_as="10")
config_bgp_evpn(dut=dut1,config_type_list=["vrf_rd_rt"],vrf_name="Vrf1",l3_rd="8:8",config="yes",local_as=evpn_dict["leaf3"]['local_as'])
config_bgp_evpn(dut=dut1,config_type_list=["vrf_rd_rt"],vrf_name="Vrf1",l3_both_rt="50:50",config="no", local_as=evpn_dict["leaf3"]['local_as'])
config_bgp_evpn(dut=dut1,config_type_list=["vrf_rd_rt"],vrf_name="Vrf1",l3_import_rt="51:50",config="yes", local_as=evpn_dict["leaf3"]['local_as'])
config_bgp_evpn(dut=dut1,config_type_list=["vrf_rd_rt"],vrf_name="Vrf1",l3_export_rt="52:50",config="yes", local_as=evpn_dict["leaf3"]['local_as'])
config_bgp_evpn(dut=dut1,config_type_list=["vrf_rd_rt"],vrf_name="Vrf1",l3_rd="8:8",config="no", local_as=evpn_dict["leaf3"]['local_as'])
config_bgp_evpn(dut=dut1,config_type_list=["vrf_rd_rt"],vrf_name="Vrf1",l3_rd="9:9",l3_both_rt="50:50",config="no",local_as=evpn_dict["leaf3"]['local_as'])
config_bgp_evpn(dut=data.dut1,neighbor ='172.16.31.10',remote_as='20',config='yes',config_type_list =["activate"], cli_type='klish')
config_bgp_evpn(dut=dut1,config = 'yes',config_type_list=["advertise_all_vni"],local_as="10", cli_type='klish')
Configure bgp l2vpn evpn specific commands
:param dut:
:param neighbor:
:param local_as:
:param config_type_list:
:param allowas_in:
:param attribute_unchanged:
:param route_map:
:param direction:
:param network:
:param rd:
:param vni:
:param vrf_name:
:param l3_vni_id:
:param ethtag:
:param bgp_label:
:param esi_id:
:param gw_ip:
:param router_mac:
:return:
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
if cli_type == 'click': cli_type = "vtysh"
skip_rest_cfg_type_list = [ 'nexthop_self', 'route_map', 'allowas_in', 'network', 'route_target', 'autort',
'attribute_unchanged', 'default_originate_ipv4', 'default_originate_ipv6',
'default_originate_ipv4_vrf', 'default_originate_ipv6_vrf',
'dup_addr_detection', 'flooding_disable', 'flooding_head_end_replication',
"route_server_client", "route_reflector_client" ]
if 'config' in kwargs:
config = kwargs['config']
else:
config = 'yes'
if 'vrf_name' in kwargs:
vrf_name = kwargs['vrf_name']
else:
vrf_name = "default"
if 'l3_vni_id' in kwargs:
l3_vni_id = kwargs['l3_vni_id']
if 'vtep_name' in kwargs:
vtep_name = kwargs['vtep_name']
if 'config_type_list' in kwargs:
config_type_list = kwargs['config_type_list']
if 'neighbor' in kwargs:
neighbor = kwargs['neighbor']
if 'peergroup' in kwargs and 'neighbor' not in kwargs:
neighbor = kwargs['peergroup']
if 'addr_family' in kwargs:
addr_family = kwargs['addr_family']
else:
addr_family = 'l2vpn'
if 'addr_family_modifier' in kwargs:
addr_family_modifier = kwargs['addr_family_modifier']
else:
addr_family_modifier = "evpn"
st.log('Configure BGP L2VPN address family')
addr_family_str = addr_family.upper() + '_' + addr_family_modifier.upper()
if cli_type in ['rest-put','rest-patch']:
st.banner("CFG list: {}, cli_type:{}".format(config_type_list,cli_type))
for cfg_type in config_type_list:
if cfg_type in skip_rest_cfg_type_list:
cli_type = 'klish'
st.banner("CFG type skipped: {}, cli_type:{}".format(cfg_type, cli_type))
break
if cli_type in ['rest-put','rest-patch']:
rest_urls = st.get_datastore(dut, "rest_urls")
if config.lower() == 'yes' and 'vrf_vni' not in config_type_list:
if 'local_as' in kwargs:
### AS URI
url = rest_urls['bgp_as_config'].format(vrf_name)
payload = {'openconfig-network-instance:as': int(kwargs['local_as'])}
response = config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload)
if not response:
st.banner('FAIL-OCYANG: BGP local-as config Failed')
return False
### L2VPN global URI
url = rest_urls['bgp_l2vpn_global_config'].format(vrf_name)
payload = { 'openconfig-network-instance:afi-safis': {
'afi-safi': [
{'afi-safi-name': addr_family_str,
'config':{
'afi-safi-name': addr_family_str,
}
}
]
}}
response = config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload)
if not response:
st.banner('FAIL-OCYANG: BGP {} address-family global config Failed'.format(addr_family_str))
return False
else:
if 'local_as' in kwargs:
my_cmd = 'router bgp {}\n'.format(kwargs['local_as'])
else:
my_cmd = 'router bgp\n'
my_cmd += 'address-family {} {}\n'.format(addr_family,addr_family_modifier)
if 'allowas_in' in kwargs:
allowas_in = kwargs['allowas_in']
if 'attribute_unchanged' in kwargs:
attribute_unchanged = kwargs['attribute_unchanged']
if 'route_map' in kwargs:
route_map = kwargs['route_map']
if 'direction' in kwargs:
direction = kwargs['direction']
else:
direction = 'in'
if 'advertise_ipv4' in kwargs:
advertise_ipv4 = kwargs['advertise_ipv4']
if 'advertise_ipv6' in kwargs:
advertise_ipv6 = kwargs['advertise_ipv6']
if 'advertise_ipv4_vrf' in kwargs:
advertise_ipv4 = kwargs['advertise_ipv4_vrf']
if 'advertise_ipv6_vrf' in kwargs:
advertise_ipv6 = kwargs['advertise_ipv6_vrf']
if 'dup_addr_detection' in kwargs:
dup_addr_detection = kwargs['dup_addr_detection']
if 'network' in kwargs:
network = kwargs['network']
rd = kwargs['rd']
ethtag = kwargs['ethtag']
bgp_label = kwargs['bgp_label']
esi_id = kwargs['esi_id']
gw_ip = kwargs['gw_ip']
router_mac = kwargs['router_mac']
if config.lower() == 'yes':
config_cmd = ''
elif config.lower() == 'remove_vrf':
config_cmd = 'remove_vrf'
elif config.lower() == 'remove_vni':
config_cmd = 'remove_vni'
else:
config_cmd = 'no'
if 'vni_unconfig' not in kwargs:
vni_unconfig = ''
elif kwargs['vni_unconfig'] == "yes":
vni_unconfig = 'no'
for type1 in config_type_list:
cur_type = type1
if type1 == 'vrf_vni' and config_cmd == '':
if cli_type in ['klish','rest-put','rest-patch']:
map_vrf_vni(dut, vrf_name, l3_vni_id, config='yes', vtep_name=vtep_name, cli_type=cli_type)
my_cmd = ''
else:
my_cmd = ''
my_cmd += 'vrf {} \n'.format(vrf_name)
my_cmd += 'vni {} \n'.format(l3_vni_id)
elif type1 == 'vrf_vni' and config_cmd != '':
my_cmd = ''
if cli_type in ['klish','rest-put','rest-patch']:
if config_cmd == 'remove_vrf' or config_cmd == 'remove_vni' or config_cmd == 'no':
map_vrf_vni(dut, vrf_name, l3_vni_id, config='no', vtep_name=vtep_name, cli_type=cli_type)
my_cmd = ''
else:
if config_cmd == 'remove_vrf':
my_cmd += 'no vrf {} \n'.format(vrf_name)
if config_cmd == 'remove_vni' or config_cmd == 'no':
my_cmd += 'vrf {} \n'.format(vrf_name)
my_cmd += 'no vni {} \n'.format(l3_vni_id)
elif type1 == 'activate':
if cli_type == 'klish':
neigh_name = get_interface_number_from_name(neighbor)
if isinstance(neigh_name, dict):
my_cmd += "neighbor interface {} {}\n".format(neigh_name["type"],neigh_name["number"])
else:
my_cmd += "neighbor {}\n".format(neigh_name)
my_cmd += "remote-as {}\n".format(kwargs['remote_as'])
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += "{} activate\n".format(config_cmd)
my_cmd += "exit\n"
my_cmd += "exit\n"
elif cli_type in ['click','vtysh']:
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += '{} neighbor {} activate\n'.format(config_cmd, neighbor)
elif cli_type in ['rest-put','rest-patch']:
if config.lower() == 'yes':
st.log("BGP EVPN neigh config")
url = rest_urls['bgp_neighbor_config'].format(vrf_name)
if kwargs['remote_as'] == 'external':
payload = {'openconfig-network-instance:neighbors':
{'neighbor': [
{'neighbor-address': neighbor,
'config': {
'neighbor-address': neighbor,
'peer-type': kwargs['remote_as'].upper(),
'enabled': bool(1)
}
}
]}
}
else:
payload = {'openconfig-network-instance:neighbors':
{'neighbor': [
{'neighbor-address': neighbor,
'config': {
'neighbor-address': neighbor,
'peer-as': int(kwargs['remote_as']),
'enabled': bool(1)
}
}
]}
}
response = config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload)
if not response:
st.banner('FAIL-OCYANG: BGP EVPN neighbor configuration Failed')
return False
url = rest_urls['bgp_l2vpn_neighbor_config'].format(vrf_name,neighbor)
payload = {'openconfig-network-instance:afi-safis': {
'afi-safi':[
{
'afi-safi-name': addr_family_str,
'config':{
'afi-safi-name': addr_family_str,
'enabled': True
}
}
]}
}
response = config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload)
if not response:
st.banner('FAIL-OCYANG: BGP {} address-family configuration Failed'.format(addr_family_str))
return False
else:
url = rest_urls['bgp_l2vpn_neighbor_config'].format(vrf_name, neighbor)
payload = {'openconfig-network-instance:afi-safis': {
'afi-safi':[
{
'afi-safi-name': addr_family_str,
'config':{
'afi-safi-name': addr_family_str,
'enabled': False
}
}
]}
}
response = config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload)
if not response:
st.banner('FAIL-OCYANG: BGP {} address-family no activate Failed'.format(addr_family_str))
return False
elif type1 == 'allowas_in':
#convert to REST as and when used
if cli_type == 'klish':
my_cmd += "neighbor {}\n".format(neighbor)
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += "{} allowas-in\n".format(config_cmd)
my_cmd += "exit\n"
my_cmd += "exit\n"
else:
my_cmd += '{} neighbor {} allowas-in {}\n'.format(config_cmd,neighbor,allowas_in)
elif type1 == 'attribute_unchanged':
#convert to REST as and when used
if cli_type == 'klish':
my_cmd += "neighbor {}\n".format(neighbor)
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += "{} attribute-unchanged\n".format(config_cmd)
my_cmd += "exit\n"
my_cmd += "exit\n"
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
else:
my_cmd += '{} neighbor {} attribute-unchanged {}\n'.format(config_cmd,neighbor,attribute_unchanged)
elif type1 == 'nexthop_self':
#convert to REST as and when used
if cli_type == 'klish':
my_cmd += "neighbor {}\n".format(neighbor)
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += "{} next-hop-self\n".format(config_cmd)
my_cmd += "exit\n"
my_cmd += "exit\n"
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
else:
my_cmd += '{} neighbor {} next-hop-self\n'.format(config_cmd, neighbor)
elif type1 == 'route_map':
# convert to REST as and when used
if cli_type == 'klish':
my_cmd += "neighbor {}\n".format(neighbor)
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += "{} route-map {} {}\n".format(config_cmd,route_map,direction)
my_cmd += "exit\n"
my_cmd += "exit\n"
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
else:
my_cmd += '{} neighbor {} route-map {} {}\n'.format(config_cmd,neighbor,route_map,direction)
my_cmd += 'exit\n'
elif type1 == 'route_reflector_client':
# convert to REST as and when used
if cli_type == 'klish':
my_cmd += "neighbor {}\n".format(neighbor)
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += "{} route-reflector-client\n".format(config_cmd)
my_cmd += "exit\n"
my_cmd += "exit\n"
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
else:
my_cmd += '{} neighbor {} route-reflector-client\n'.format(config_cmd, neighbor)
elif type1 == 'route_server_client':
# convert to REST as and when used
if cli_type == 'klish':
my_cmd += "neighbor {}\n".format(neighbor)
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += "{} route-server-client\n".format(config_cmd)
my_cmd += "exit\n"
my_cmd += "exit\n"
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
else:
my_cmd += '{} neighbor {} route-server-client\n'.format(config_cmd, neighbor)
elif type1 == 'disable_ebgp_connected_route_check':
if cli_type == 'klish':
my_cmd += '{} disable-ebgp-connected-route-check \n'.format(config_cmd)
my_cmd += "exit\n"
elif cli_type in ['click','vtysh']:
my_cmd += '{} bgp disable-ebgp-connected-route-check \n'.format(config_cmd)
elif cli_type in ['rest-put','rest-patch']:
url = rest_urls['ebgp_connected_route_check'].format(vrf_name)
if config.lower() == 'yes':
payload = {'openconfig-bgp-ext:disable-ebgp-connected-route-check': True}
elif config.lower() == 'no':
payload = {'openconfig-bgp-ext:disable-ebgp-connected-route-check': False}
response = config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload)
if not response:
st.banner('FAIL-OCYANG: disable-ebgp-connected-route-check configuration:{} Failed'.format(config_cmd))
return False
else:
st.error("Invalid CLI type - {}".format(cli_type))
return False
elif type1 == 'advertise_ipv4':
if cli_type in ["rest-put", "rest-patch"]:
if config.lower() == 'yes':
url = rest_urls['bgp_advertise_config'].format(vrf_name)
payload = {'openconfig-bgp-evpn-ext:advertise-list': ['IPV4_UNICAST']}
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN advertise-ipv4 in vrf:{} config Failed'.format(vrf_name))
return False
elif config.lower() == 'no':
url = rest_urls['bgp_advertise_config'].format(vrf_name)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: BGP EVPN advertise-ipv4 in vrf:{} delete Failed'.format(vrf_name))
return False
else:
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += '{} advertise ipv4 {}\n'.format(config_cmd,advertise_ipv4)
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'advertise_ipv6':
if cli_type in ["rest-put", "rest-patch"]:
if config.lower() == 'yes':
url = rest_urls['bgp_advertise_config'].format(vrf_name)
payload = {'openconfig-bgp-evpn-ext:advertise-list': ['IPV6_UNICAST']}
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN advertise-ipv6 in vrf:{} config Failed'.format(vrf_name))
return False
elif config.lower() == 'no':
url = rest_urls['bgp_advertise_config'].format(vrf_name)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: BGP EVPN advertise-ipv6 in vrf:{} delete Failed'.format(vrf_name))
return False
else:
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += '{} advertise ipv6 {}\n'.format(config_cmd,advertise_ipv6)
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'default_originate_ipv4_vrf':
# convert to REST as and when used
my_cmd = 'router bgp {} vrf {}\n'.format(kwargs['local_as'],vrf_name)
my_cmd += 'address-family l2vpn evpn\n'
my_cmd += '{} default-originate ipv4\n'.format(config_cmd)
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'default_originate_ipv6_vrf':
# convert to REST as and when used
my_cmd = 'router bgp {} vrf {}\n'.format(kwargs['local_as'],vrf_name)
my_cmd += 'address-family l2vpn evpn\n'
my_cmd += '{} default-originate ipv6\n'.format(config_cmd)
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'advertise_ipv4_vrf':
if cli_type in ["rest-put", "rest-patch"]:
if config.lower() == 'yes':
url = rest_urls['bgp_advertise_config'].format(vrf_name)
payload = {'openconfig-bgp-evpn-ext:advertise-list':['IPV4_UNICAST']}
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN advertise-ipv4 in vrf:{} config Failed'.format(vrf_name))
return False
elif config.lower() == 'no':
url = rest_urls['bgp_advertise_config'].format(vrf_name)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: BGP EVPN advertise-ipv4 in vrf:{} delete Failed'.format(vrf_name))
return False
else:
my_cmd = 'router bgp {} vrf {}\n'.format(kwargs['local_as'],vrf_name)
my_cmd += 'address-family l2vpn evpn\n'
my_cmd += '{} advertise ipv4 {}\n'.format(config_cmd,advertise_ipv4)
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'advertise_ipv6_vrf':
if cli_type in ["rest-put", "rest-patch"]:
if config.lower() == 'yes':
url = rest_urls['bgp_advertise_config'].format(vrf_name)
payload = {'openconfig-bgp-evpn-ext:advertise-list': ['IPV6_UNICAST']}
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN advertise-ipv6 in vrf:{} config Failed'.format(vrf_name))
return False
elif config.lower() == 'no':
url = rest_urls['bgp_advertise_config'].format(vrf_name)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: BGP EVPN advertise-ipv6 in vrf:{} delete Failed'.format(vrf_name))
return False
else:
my_cmd = 'router bgp {} vrf {}\n'.format(kwargs['local_as'],vrf_name)
my_cmd += 'address-family l2vpn evpn\n'
my_cmd += '{} advertise ipv6 {}\n'.format(config_cmd,advertise_ipv6)
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'vrf_rd_rt':
if cli_type in ["rest-put", "rest-patch"]:
if 'l3_rd' in kwargs:
url = rest_urls['bgp_route_distinguisher'].format(vrf_name)
if config.lower() == 'yes':
payload = {'openconfig-bgp-evpn-ext:route-distinguisher': kwargs['l3_rd']}
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN route-distinguisher config Failed')
return False
elif config.lower() == 'no':
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: BGP EVPN route-distinguisher delete Failed')
return False
if 'l3_both_rt' in kwargs:
url_i = rest_urls['bgp_import_rt'].format(vrf_name)
url_e = rest_urls['bgp_export_rt'].format(vrf_name)
if config.lower() == 'yes':
payload = {'openconfig-bgp-evpn-ext:import-rts': [kwargs['l3_both_rt']]}
if not config_rest(dut, http_method=cli_type, rest_url=url_i, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN import rt config Failed')
return False
payload = {'openconfig-bgp-evpn-ext:export-rts': [kwargs['l3_both_rt']]}
if not config_rest(dut, http_method=cli_type, rest_url=url_e, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN export rt config Failed')
return False
elif config.lower() == 'no':
if not delete_rest(dut, rest_url=url_i):
st.banner('FAIL-OCYANG: BGP EVPN import rt delete Failed')
return False
if not delete_rest(dut, rest_url=url_e):
st.banner('FAIL-OCYANG: BGP EVPN export rt delete Failed')
return False
if 'l3_import_rt' in kwargs:
url = rest_urls['bgp_import_rt'].format(vrf_name)
if config.lower() == 'yes':
payload = {'openconfig-bgp-evpn-ext:import-rts': [kwargs['l3_import_rt']]}
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN import rt config Failed')
return False
elif config.lower() == 'no':
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: BGP EVPN import rt delete Failed')
return False
if 'l3_export_rt' in kwargs:
url = rest_urls['bgp_export_rt'].format(vrf_name)
if config.lower() == 'yes':
payload = {'openconfig-bgp-evpn-ext:export-rts': [kwargs['l3_export_rt']]}
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN export rt config Failed')
return False
elif config.lower() == 'no':
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: BGP EVPN export rt delete Failed')
return False
else:
my_cmd = 'router bgp {} vrf {}\n'.format(kwargs['local_as'],vrf_name)
my_cmd += 'address-family l2vpn evpn\n'
if 'l3_rd' in kwargs:
my_cmd += '{} rd {}\n'.format(config_cmd,kwargs['l3_rd'])
if 'l3_both_rt' in kwargs:
my_cmd += '{} route-target both {}\n'.format(config_cmd,kwargs['l3_both_rt'])
if 'l3_import_rt' in kwargs:
my_cmd += '{} route-target import {}\n'.format(config_cmd,kwargs['l3_import_rt'])
if 'l3_export_rt' in kwargs:
my_cmd += '{} route-target export {}\n'.format(config_cmd,kwargs['l3_export_rt'])
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'advertise_all_vni':
if cli_type in ["rest-put", "rest-patch"]:
if config.lower() == 'yes':
url = rest_urls['bgp_advertise_all_vni'].format(vrf_name)
payload = { 'openconfig-bgp-evpn-ext:advertise-all-vni': True}
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN advertise-all-vni config Failed')
return False
elif config.lower() == 'no':
url = rest_urls['bgp_advertise_all_vni'].format(vrf_name)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: BGP EVPN advertise-all-vni delete Failed')
return False
else:
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += '{} advertise-all-vni\n'.format(config_cmd)
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'advertise_default_gw':
if cli_type in ["rest-put", "rest-patch"]:
if config.lower() == 'yes':
url = rest_urls['bgp_advertise_default_gw'].format(vrf_name)
payload = {'openconfig-bgp-evpn-ext:advertise-default-gw': True}
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN advertise-default-gw config Failed')
return False
elif config.lower() == 'no':
url = rest_urls['bgp_advertise_default_gw'].format(vrf_name)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: BGP EVPN advertise-default-gw delete Failed')
return False
else:
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += '{} advertise-default-gw\n'.format(config_cmd)
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'autort':
# convert to REST as and when used
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += '{} autort rfc8365-compatible\n'.format(config_cmd)
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'default_originate_ipv4':
# convert to REST as and when used
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += '{} default-originate ipv4\n'.format(config_cmd)
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'default_originate_ipv6':
# convert to REST as and when used
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += '{} default-originate ipv6\n'.format(config_cmd)
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'dup_addr_detection':
# convert to REST as and when used
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += '{} dup-addr-detection {}\n'.format(config_cmd,dup_addr_detection)
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'flooding_disable':
# convert to REST as and when used
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += '{} flooding disable\n'.format(config_cmd)
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'flooding_head_end_replication':
# convert to REST as and when used
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += '{} flooding head-end-replication\n'.format(config_cmd)
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'network' and config_cmd == '':
# convert to REST as and when used
if cli_type not in ['klish']:
my_cmd += 'network {} rd {} ethtag {} label {} esi {} gwip {} routermac {}\n'.format(network,rd,ethtag,bgp_label,esi_id,gw_ip,router_mac)
else:
st.error("Support not added to config - 'network'")
elif type1 == 'network' and config_cmd == 'no':
# convert to REST as and when used
if cli_type not in ['klish']:
my_cmd += '{} network {} rd {} ethtag {} label {} esi {} gwip {}\n'.format(config_cmd,network,rd,ethtag,bgp_label,esi_id,gw_ip)
else:
st.error("Support not added to config - 'network'")
elif type1 == 'route_target':
# convert to REST as and when used
if 'both_rt' in kwargs:
my_cmd += '{} route-target both {}\n'.format(config_cmd,kwargs['both_rt'])
if 'import_rt' in kwargs:
my_cmd += '{} route-target import {}\n'.format(config_cmd,kwargs['import_rt'])
if 'export_rt' in kwargs:
my_cmd += '{} route-target export {}\n'.format(config_cmd,kwargs['export_rt'])
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'vni':
if cli_type in ["rest-put", "rest-patch"]:
if config.lower() == 'yes':
url_vni = rest_urls['bgp_vni_config'].format(vrf_name)
payload = {'openconfig-bgp-evpn-ext:vni': [{
'vni-number': int(kwargs['vni']) ,
'config':{
'vni-number': int(kwargs['vni']) ,
'advertise-default-gw': True
}
}]
}
if not config_rest(dut, http_method=cli_type, rest_url=url_vni, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN vni config Failed')
return False
if vni_unconfig == 'no':
url_vni = rest_urls['bgp_vni_unconfig'].format(vrf_name,kwargs['vni'])
if not delete_rest(dut, rest_url=url_vni):
st.banner('FAIL-OCYANG: BGP EVPN vni delete Failed')
return False
if 'vni_rd' in kwargs and vni_unconfig == '':
url = rest_urls['bgp_vni_route_distinguisher'].format(vrf_name,kwargs['vni'])
if config.lower() == 'yes':
payload = {'openconfig-bgp-evpn-ext:route-distinguisher': kwargs['vni_rd']}
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN vni route-distinguisher config Failed')
return False
elif config.lower() == 'no':
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: BGP EVPN vni route-distinguisher delete Failed')
return False
if 'vni_both_rt' in kwargs and vni_unconfig == '':
url_i = rest_urls['bgp_vni_import_rt'].format(vrf_name,kwargs['vni'])
url_e = rest_urls['bgp_vni_export_rt'].format(vrf_name,kwargs['vni'])
if config.lower() == 'yes':
payload = {'openconfig-bgp-evpn-ext:import-rts': [kwargs['vni_both_rt']]}
if not config_rest(dut, http_method=cli_type, rest_url=url_i, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN vni import rt config Failed')
return False
payload = {'openconfig-bgp-evpn-ext:export-rts': [kwargs['vni_both_rt']]}
if not config_rest(dut, http_method=cli_type, rest_url=url_e, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN vni export rt config Failed')
return False
elif config.lower() == 'no':
if not delete_rest(dut, rest_url=url_i):
st.banner('FAIL-OCYANG: BGP EVPN vni import rt delete Failed')
return False
if not delete_rest(dut, rest_url=url_e):
st.banner('FAIL-OCYANG: BGP EVPN vni export rt delete Failed')
return False
if 'vni_import_rt' in kwargs and vni_unconfig == '':
url_i = rest_urls['bgp_vni_import_rt'].format(vrf_name, kwargs['vni'])
if config.lower() == 'yes':
payload = {'openconfig-bgp-evpn-ext:import-rts': [kwargs['vni_import_rt']]}
if not config_rest(dut, http_method=cli_type, rest_url=url_i, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN vni import rt config Failed')
return False
elif config.lower() == 'no':
if not delete_rest(dut, rest_url=url_i):
st.banner('FAIL-OCYANG: BGP EVPN vni import rt delete Failed')
return False
if 'vni_export_rt' in kwargs and vni_unconfig == '':
url_e = rest_urls['bgp_vni_export_rt'].format(vrf_name, kwargs['vni'])
if config.lower() == 'yes':
payload = {'openconfig-bgp-evpn-ext:export-rts': [kwargs['vni_export_rt']]}
if not config_rest(dut, http_method=cli_type, rest_url=url_e, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN vni export rt config Failed')
return False
elif config.lower() == 'no':
if not delete_rest(dut, rest_url=url_e):
st.banner('FAIL-OCYANG: BGP EVPN vni export rt delete Failed')
return False
else:
my_cmd += '{} vni {}\n'.format(vni_unconfig,kwargs['vni'])
if 'vni_rd' in kwargs and vni_unconfig == '':
my_cmd += '{} rd {}\n'.format(config_cmd,kwargs['vni_rd'])
if 'vni_both_rt' in kwargs and vni_unconfig == '':
my_cmd += '{} route-target both {}\n'.format(config_cmd,kwargs['vni_both_rt'])
if 'vni_import_rt' in kwargs and vni_unconfig == '':
my_cmd += '{} route-target import {}\n'.format(config_cmd,kwargs['vni_import_rt'])
if 'vni_export_rt' in kwargs and vni_unconfig == '':
my_cmd += '{} route-target export {}\n'.format(config_cmd,kwargs['vni_export_rt'])
if vni_unconfig != 'no':
my_cmd += 'exit\n'
if cli_type == 'klish':
my_cmd += "exit\n"
else:
st.error("config_type_list is not matching - {}".format(type1))
return False
if cli_type in ['klish'] and cur_type != 'vrf_vni':
#my_cmd += 'exit\n'
my_cmd += 'exit\n'
if cli_type not in ['rest-put', 'rest-patch']:
st.debug('\n'+my_cmd+'\n')
st.debug(my_cmd.split("\n"))
st.config(dut, my_cmd.split("\n") if cli_type == 'klish' else my_cmd, type=cli_type)
def parse_rest_output_l2vpn_evpn_vni(response):
dict = {}
vni_data = response['output'].get('openconfig-bgp-evpn-ext:vni',[])
if vni_data:
vni_item = vni_data[0]
dict['vni'] = str(vni_item.get('state',{}).get('vni-number',0))
dict['type'] = vni_item.get('state',{}).get('type','')
dict['vrfname'] = ''
dict['rd'] = vni_item.get('state',{}).get('route-distinguisher','')
dict['originip'] = vni_item.get('state',{}).get('originator','')
dict['gwmac'] = vni_item.get('state',{}).get('advertise-gw-mac',False)
dict['rt'] = vni_item.get('state',{}).get('import-rts',[])
dict['rt'] = dict['rt'] + vni_item.get('state', {}).get('export-rts', [])
return [dict]
else:
return []
def verify_bgp_l2vpn_evpn_vni_id(dut,**kwargs):
"""
Author: <NAME> (<EMAIL>)
verify_bgp_l2vpn_evpn_vni_id(dut=dut1,vni="100",rd="11:11",type="L2",vrfname="default",originip="1.1.1.1",gwmac="No",rt=['20:20','20:20'])
To verify bgp l2vpn evpn vni <vni-id>
:param dut:
:param vni:
:param type:
:param vrfname:
:param rd:
:param rt:
:param gwmac:
:param originip:
:return:
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
cli_type = "vtysh" if cli_type == 'click' else cli_type
#cli_type = 'klish'
if 'vni' not in kwargs:
st.error("Mandatory arg vni is not present")
return False
if cli_type in ['rest-put', 'rest-patch']:
st.log('KLISH output for debugging REST')
st.show(dut, "show bgp l2vpn evpn vni {}".format(kwargs['vni']), type='klish')
rest_urls = st.get_datastore(dut, "rest_urls")
url = rest_urls['bgp_vni_unconfig'].format('default',kwargs['vni'])
response = get_rest(dut, rest_url=url)
if response['output']:
result = parse_rest_output_l2vpn_evpn_vni(response)
else:
st.error("OCYANG-FAIL: verify bgp l2vpn evpn vni <id> - Get Response is empty")
return False
else:
result = st.show(dut, "show bgp l2vpn evpn vni {}".format(kwargs['vni']), type=cli_type)
if len(result) == 0:
st.error("Output is Empty")
return False
ret_val = False
for rlist in result:
count = 0
for key in kwargs:
if rlist[key] == kwargs[key]:
count = count + 1
if len(kwargs) == count:
ret_val = True
for key in kwargs:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
break
else:
for key in kwargs:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
if ret_val is False:
st.log("Fail: Not Matched all args in passed dict {} from parsed dict".format(kwargs))
return ret_val
def verify_bgp_l2vpn_evpn_summary(dut,**kwargs):
"""
Author: <NAME> (<EMAIL>)
verify_bgp_l2vpn_evpn_summary(dut=dut1,identifier="1.1.1.1",local_as="10",vrf_id="0",neighbor="172.16.31.10",version="4",pfxrcd="1",inq="0",outq="0",tblver="0",msgrcvd="3552")
verify_bgp_l2vpn_evpn_summary(dut=dut1,neighbor=["172.16.31.10","2001::2"],version=["4","4"],pfxrcd=["1","1"],inq=["0","0"],outq=["0","0"],tblver=["0","0"],as_no=["20","20"])
To verify bgp l2vpn evpn summary
:param dut:
:param identifier:
:param local_as:
:param vrf_id:
:param neighbor:
:param version:
:return:
:reteturn:
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut, **kwargs))
if cli_type == 'click':
cli_type = "vtysh"
if cli_type in ["rest-put", "rest-patch"]:
rest_urls = st.get_datastore(dut, "rest_urls")
url = rest_urls["get_evpn_neigh"]
rest_out = get_rest(dut, rest_url=url, timeout=30)
if rest_out["status"] == 200:
no_match=match=False
for key, val in kwargs.items():
if type(val) is not list:
kwargs[key] = [val]
rest_out = rest_out["output"]["openconfig-network-instance:neighbors"]["neighbor"]
for elem in rest_out:
neigh_list = elem['afi-safis']['afi-safi']
for neigh in neigh_list:
if neigh["state"]['afi-safi-name'] == "openconfig-bgp-types:L2VPN_EVPN":
evpn_neigh = elem['neighbor-address']
if 'neighbor' in kwargs:
try:
index_num = kwargs["neighbor"].index(evpn_neigh)
exp_status=kwargs["updown"][index_num]
if neigh['state']['prefixes']['received'] >= 0:
status="up"
else:
status="down"
if exp_status==status:
st.log("Match found for neighbor {} with status as {}".format(evpn_neigh,status))
match=True
else:
st.log("Match NOT found for neighbor {}; expected status: {}"
" but found: {}".format(evpn_neigh,exp_status,status))
no_match=True
except Exception:
continue
else:
st.log("specify the neighbor argument to be verified ")
return False
if no_match:
st.log("At least one of the neighbor status is wrong;"
"kindly check above logs")
return False
if match:
return True
else:
st.log("Neighbors {} not present in show output".format(kwargs["neighbor"]))
return False
else:
output = st.show(dut,"show bgp l2vpn evpn summary",type=cli_type)
if len(output) == 0:
st.error("Output is Empty")
return False
for i in range (len(output)):
pfx = output[i]['pfxrcd']
if pfx.isdigit():
if int(output[i]['pfxrcd']) > 0 or int(output[i]['pfxrcd']) == 0:
output[i]['updown'] = 'up'
else:
output[i]['updown'] = 'down'
else:
output[i]['updown'] = 'down'
count = 0
no_common_key = 0
ret_val1 = False
dict1 = {}
common_key_list = ['identifier','local_as','vrf_id','rib_entries','no_peers']
for key in kwargs:
if key in common_key_list:
no_common_key = no_common_key + 1
if no_common_key > 0:
rlist = output[0]
count = 0
for key in kwargs:
if rlist[key] == kwargs[key] and key in common_key_list:
count = count + 1
if no_common_key == count:
ret_val1 = True
for key in kwargs:
if key in common_key_list:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
for key in kwargs:
if key in common_key_list:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
for key in common_key_list:
if key in kwargs:
dict1[key] = kwargs[key]
del kwargs[key]
if no_common_key > 0 and ret_val1 is False:
st.error("DUT {} -> Match Not Found {}".format(dut,dict1))
return ret_val1
ret_val = "True"
#Converting all kwargs to list type to handle single or list of instances
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
#convert kwargs into list of dictionary
input_dict_list =[]
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
ret_val = False
return ret_val
def parse_rest_output_l2vpn_evpn_route(route):
dict = {}
dict['evpn_prefix'] = route.get('prefix','')
rd_str = route.get('route-distinguisher','')
regexp_match = re.search(r"\d+\:\d+|\d+\.\d+\.\d+\.\d+", rd_str)
dict['rd'] = regexp_match.group() if regexp_match else ''
dict['status_code'] = '*' if route.get("state",{}).get('valid-route',False) else ''
if route.get("state",{}).get('openconfig-rib-bgp-ext:best-path',False):
dict['status_code'] += '>'
dict['next_hop'] = route.get("attr-sets",{}).get("next-hop",'')
route_as_list = route.get("attr-sets",{}).get('as-path',{}).get('as-segment',[])
as_list = route_as_list[0].get('state',[]).get('member',[]) if route_as_list else []
as_path = ''
for as_num in as_list:
as_path = as_path + str(as_num) + " "
as_path = as_path.strip()
dict["path"] = as_path
return dict
def verify_bgp_l2vpn_evpn_route(dut,**kwargs):
"""
Author: <NAME> (<EMAIL>)
verify_bgp_l2vpn_evpn_route(dut=dut1,evpn_prefix="[5]:[0]:[24]:[192.168.127.12]",rd="13:2",status_code="*>",metric="0",next_hop="0.0.0.0",weight="32768",path="",origin_code="i",displayed_prefixes="5",no_of_paths="5",bgp_version="1",router_id="1.1.1.1")
verify_bgp_l2vpn_evpn_route(dut=dut1,evpn_prefix="[3]:[0]:[32]:[172.16.58.3]",rd="1.1.1.1:2",status_code="*>",metric="3276",next_hop="172.16.58.3",weight="8",path="",origin_code="i",displayed_prefixes="5",no_of_paths="5",bgp_version="1",router_id="1.1.1.1")
verify_bgp_l2vpn_evpn_route(dut=dut1,evpn_prefix="[2]:[0]:[48]:[00:21:ee:00:10:17]:[32]:[192.168.127.12]",rd="1.1.1.1:2",status_code="*>",metric="",next_hop="172.16.58.3",weight="32768",path="",origin_code="i")
To verify bgp l2vpn evpn route
:param dut:
:param bgp_verion:
:param router_id:
:param evpn_prefix:
:param rd:
:param path:
:param status_code:
:param weight:
:param metric:
:param next_hop:
:param origin_code:
:param displayed_prefixes:
:param no_of_paths:
:return:
:reteturn:
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
cli_type = "vtysh" if cli_type == 'click' else cli_type
#cli_type = 'klish'
if cli_type in ['rest-put', 'rest-patch']:
st.log('KLISH output for debugging REST')
st.show(dut, "show bgp l2vpn evpn route", type='klish')
rest_urls = st.get_datastore(dut, "rest_urls")
url = rest_urls['get_evpn_routes']
response = get_rest(dut, rest_url=url)
if response['output']:
route_list = response["output"].get("openconfig-bgp-evpn-ext:routes",{}).get("route",[])
output = []
for route in route_list:
out_dict = {}
try:
if 'evpn_prefix' in kwargs:
if kwargs['evpn_prefix'] == route['prefix']:
out_dict = parse_rest_output_l2vpn_evpn_route(route)
elif 'rd' in kwargs:
rd_str = route.get('route-distinguisher', '')
regexp_match = re.search(r"\d+\:\d+|\d+\.\d+\.\d+\.\d+", rd_str)
current_rd = regexp_match.group() if regexp_match else ''
if kwargs['rd'] == current_rd :
out_dict = parse_rest_output_l2vpn_evpn_route(route)
output.append(out_dict)
except Exception as e:
st.log("{}".format(e))
continue
else:
st.error("OCYANG-FAIL: show bgp l2vpn evpn route - Get Response is empty")
return False
skip_key_list = ['bgp_version', 'router_id', 'metric', 'weight', 'origin_code', 'rt', 'et', 'rmac',
'displayed_prefixes', 'no_of_paths']
for skip_key in skip_key_list:
if skip_key in kwargs:
del kwargs[skip_key]
else:
output = st.show(dut, "show bgp l2vpn evpn route", type=cli_type)
if len(output) == 0:
st.error("Output is Empty")
return False
if "return_output" in kwargs:
return True
for a in output:
for key in a:
output[output.index(a)][key]=output[output.index(a)][key].lstrip()
output[output.index(a)][key]=output[output.index(a)][key].rstrip()
no_common_key = 0
ret_val1 = False
dict1 = {}
common_key_list = ['bgp_version','router_id','displayed_prefixes','no_of_paths']
for key in kwargs:
if key in common_key_list:
no_common_key = no_common_key + 1
if no_common_key > 0:
rlist = output[0]
count = 0
for key in kwargs:
if rlist[key] == kwargs[key] and key in common_key_list:
count = count + 1
if no_common_key == count:
ret_val1 = True
for key in kwargs:
if key in common_key_list:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
for key in kwargs:
if key in common_key_list:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
for key in common_key_list:
if key in kwargs:
dict1[key] = kwargs[key]
del kwargs[key]
if no_common_key > 0 and ret_val1 is False:
st.error("DUT {} -> Match Not Found {}".format(dut,dict1))
return ret_val1
ret_val = "True"
#Converting all kwargs to list type to handle single or list of instances
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
#convert kwargs into list of dictionary
input_dict_list =[]
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
ret_val = False
return ret_val
def verify_bgp_l2vpn_evpn_vni(dut,**kwargs):
### NOT USED
"""
Author: <NAME> (<EMAIL>)
verify_bgp_l2vpn_evpn_vni(dut=dut1,vni="100",rd="11:11",type="L2",tenant_vrf="default",import_rt='20:20',export_rt='20:20',gw_macip="Enabled")
verify_bgp_l2vpn_evpn_vni(dut=dut1,vni="200",rd="15:15",type="L2",tenant_vrf="default",import_rt='5:5',export_rt='6:6',gw_macip="Enabled")
verify_bgp_l2vpn_evpn_vni(dut=dut1,vni=["100","200"],rd=["11:11","15:15"],type=["L2","L2"],tenant_vrf=["default","default"],import_rt=['20:20','5:5'],export_rt=['20:20','6:6'])
To verify bgp l2vpn evpn vni
:param dut:
:param vni:
:param type:
:param tenant_vrf:
:param rd:
:param bum_flooding:
:param all_vni_flag:
:param no_l2vni:
:param no_l3vni:
:param gw_macip:
:param import_rt:
:param export_rt:
:return:
:reteturn:
"""
output = st.show(dut,"show bgp l2vpn evpn vni",type="vtysh")
if len(output) == 0:
st.error("Output is Empty")
return False
count = 0
no_common_key = 0
ret_val1 = False
dict1 = {}
common_key_list = ['gw_macip','all_vni_flag','bum_flooding','no_l2vni','no_l3vni']
for key in kwargs:
if key in common_key_list:
no_common_key = no_common_key + 1
if no_common_key > 0:
rlist = output[0]
count = 0
for key in kwargs:
if rlist[key] == kwargs[key] and key in common_key_list:
count = count + 1
if no_common_key == count:
ret_val1 = True
for key in kwargs:
if key in common_key_list:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
for key in kwargs:
if key in common_key_list:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
for key in common_key_list:
if key in kwargs:
dict1[key] = kwargs[key]
del kwargs[key]
if no_common_key > 0 and ret_val1 is False:
st.error("DUT {} -> Match Not Found {}".format(dut,dict1))
return ret_val1
ret_val = "True"
#Converting all kwargs to list type to handle single or list of instances
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
#convert kwargs into list of dictionary
input_dict_list =[]
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
ret_val = False
return ret_val
def verify_bgp_l2vpn_evpn_rd(dut,**kwargs):
"""
Author: <NAME> (<EMAIL>)
verify_bgp_l2vpn_evpn_rd(dut=dut1,evpn_type_5_prefix="[5]:[0]:[24]:[192.168.127.12]",rd="13:1",rd_name="as2",status_code="*>",metric="0",next_hop="0.0.0.0",weight="32768",origin_code="i",displayed_prefixes="1")
To verify bgp l2vpn evpn rd <rd-value>
:param dut:
:param evpn_type_2_prefix:
:param evpn_type_3_prefix:
:param evpn_type_4_prefix:
:param evpn_type_5_prefix:
:param rd:
:param rd_name:
:param status_code:
:param metric:
:param next_hop:
:param origin_code:
:param displayed_prefixes:
:param total_prefixes:
:return:
:reteturn:
"""
if 'rd' not in kwargs:
st.error("Mandetory arg rd is not present")
return False
output = st.show(dut,"show bgp l2vpn evpn rd {}".format(kwargs['rd']),type="vtysh")
if len(output) == 0:
st.error("Output is Empty")
return False
for a in output:
for key in a:
output[output.index(a)][key]=output[output.index(a)][key].lstrip()
output[output.index(a)][key]=output[output.index(a)][key].rstrip()
count = 0
no_common_key = 0
ret_val1 = False
dict1 = {}
common_key_list = ['rd_name','rd','displayed_prefixes','total_prefixes']
for key in kwargs:
if key in common_key_list:
no_common_key = no_common_key + 1
if no_common_key > 0:
rlist = output[0]
count = 0
for key in kwargs:
if rlist[key] == kwargs[key] and key in common_key_list:
count = count + 1
if no_common_key == count:
ret_val1 = True
for key in kwargs:
if key in common_key_list:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
for key in kwargs:
if key in common_key_list:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
for key in common_key_list:
if key in kwargs:
dict1[key] = kwargs[key]
del kwargs[key]
if no_common_key > 0 and ret_val1 is False:
st.error("DUT {} -> Match Not Found {}".format(dut,dict1))
return ret_val1
ret_val = "True"
#Converting all kwargs to list type to handle single or list of instances
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
#convert kwargs into list of dictionary
input_dict_list =[]
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
ret_val = False
return ret_val
def verify_bgp_l2vpn_evpn_route_type_prefix(dut,**kwargs):
"""
Author: <NAME> (<EMAIL>)
verify_bgp_l2vpn_evpn_route_type_prefix(dut=dut1,evpn_type_5_prefix="[5]:[0]:[24]:[192.168.127.12]",rd="13:1",rd_name="as2",status_code="*>",metric="0",next_hop="0.0.0.0",weight="32768",origin_code="i",displayed_prefixes="1")
evpn.verify_bgp_l2vpn_evpn_route_type_prefix(dut=data.dut1,evpn_type_5_prefix="[5]:[0]:[24]:[192.168.127.12]",rd="13:2",status_code="*>",metric="0",next_hop="0.0.0.0",weight="0",path="20",origin_code="i",displayed_prefixes="4",no_of_paths="6")
To verify bgp l2vpn evpn route type prefix
:param dut:
:param evpn_type_5_prefix:
:param rd:
:param path:
:param status_code:
:param weight:
:param metric:
:param next_hop:
:param origin_code:
:param displayed_prefixes:
:param no_of_paths:
:return:
:reteturn:
"""
output = st.show(dut,"show bgp l2vpn evpn route type prefix",type="vtysh")
if len(output) == 0:
st.error("Output is Empty")
return False
count = 0
no_common_key = 0
ret_val1 = False
dict1 = {}
common_key_list = ['bgp_version','router_id','displayed_prefixes','no_of_paths']
for a in output:
for key in a:
output[output.index(a)][key]=output[output.index(a)][key].lstrip()
output[output.index(a)][key]=output[output.index(a)][key].rstrip()
for key in kwargs:
if key in common_key_list:
no_common_key = no_common_key + 1
if no_common_key > 0:
rlist = output[0]
count = 0
for key in kwargs:
if rlist[key] == kwargs[key] and key in common_key_list:
count = count + 1
if no_common_key == count:
ret_val1 = True
for key in kwargs:
if key in common_key_list:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
for key in kwargs:
if key in common_key_list:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
for key in common_key_list:
if key in kwargs:
dict1[key] = kwargs[key]
del kwargs[key]
if no_common_key > 0 and ret_val1 is False:
st.error("DUT {} -> Match Not Found {}".format(dut,dict1))
return ret_val1
ret_val = "True"
#Converting all kwargs to list type to handle single or list of instances
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
#convert kwargs into list of dictionary
input_dict_list =[]
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
ret_val = False
return ret_val
def create_overlay_intf(dut, vtep_name, ip_addr, config='yes', skip_error=False, cli_type=''):
"""
purpose:
This definition is used to create overlay interface
Arguments:
:param dut: device to be configured
:type dut: string
:param vtep_name: VTEP name to be created
:type vtep_name: string
:param ip_addr: ip address to be bound to overlay gateway
:type ip_addr: string
:param config: it takes value as 'yes' or 'no' to configure or remove overlay respectively
:type config: string
:param : cli_type
:return: None
usage:
create_overlay_intf(dut1, "dut1VTEP", "1.1.1.1", cli_type='click')
create_overlay_intf(dut1, "dut1VTEP", "1.1.1.1", config='no', cli_type='klish')
Created by: Julius <<EMAIL>
"""
cli_type = st.get_ui_type(dut,cli_type=cli_type)
if config == 'yes':
conf_str = ''
action = 'add'
else:
conf_str = 'no'
ip_addr = ''
action = 'del'
if cli_type == 'click':
command = "config vxlan {} {} {}".format(action, vtep_name, ip_addr)
elif cli_type == 'klish':
command = []
command.append('interface vxlan {}'.format(vtep_name))
command.append('{} source-ip {}'.format(conf_str, ip_addr))
command.append('exit')
elif cli_type in ["rest-put", "rest-patch"]:
rest_urls = st.get_datastore(dut, "rest_urls")
if config == 'yes':
url = rest_urls['config_vxlan_with_ip']
payload = { "openconfig-interfaces:interface":
[ { "name": vtep_name,
"config": { "name": vtep_name, "type": "IF_NVE" },
"openconfig-vxlan:vxlan-if": { "config": { "source-vtep-ip": ip_addr } }
} ]
}
### PUT and PATCH doesn't work for this URI hence use POST
### PUT and PATCH URIs does config similar to klish clis
if not config_rest(dut, http_method='post', rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: Create Vxlan Interface with src vtep IP failed')
return False
else:
url = rest_urls['delete_vxlan_ip'].format(vtep_name)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG')
url = rest_urls['delete_vxlan'].format(vtep_name)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG')
return
else:
st.error("Invalid CLI type - {}".format(cli_type))
return False
st.debug(command)
return st.config(dut, command, type=cli_type, skip_error_check=skip_error)
def create_evpn_instance(dut, nvo_name, vtep_name, config='yes', skip_error=False, cli_type=''):
"""
purpose:
This definition is used to create EVPN instance
Arguments:
:param dut: device to be configured
:type dut: string
:param nvo_name: evpn instance name to be created
:type nvo_name: string
:param vtep_name: vtep name to be bound to evpn instance
:type vtep_name: string
:param config: it takes value as 'yes' or 'no' to configure or remove evpn instance respectively
:type config: string
:param : cli_type
:param : skip_error
:return: None
usage:
create_evpn_instance(dut1, "dut1EVPN", "dut1VTEP", cli_type='click')
create_evpn_instance(dut1, "dut1EVPN", "dut1VTEP", config='no', cli_type='klish')
Created by: Julius <<EMAIL>
"""
cli_type = st.get_ui_type(dut,cli_type=cli_type)
if config == 'yes':
action = 'add'
else:
vtep_name = ''
action = 'del'
if cli_type == 'click':
command = "config vxlan evpn_nvo {} {} {}".format(action, nvo_name, vtep_name)
st.debug(command)
return st.config(dut, command, skip_error_check=skip_error, type=cli_type)
elif cli_type == 'klish':
st.error("NVO command is not supported in klish")
return False
elif cli_type in ['rest-put','rest-patch']:
st.error("NVO config through OCYANG URI not supported")
return False
else:
st.error("Invalid CLI type - {}".format(cli_type))
return False
def map_vlan_vni(dut, vtep_name, vlan_id, vni_id, range_val='1', config='yes', skip_error=False, cli_type=''):
"""
purpose:
This definition is used to create VLAN to VNI mapping under EVPN instance
Arguments:
:param dut: device to be configured
:type dut: string
:param vtep_name: VTEP name where VLAN to VNI mapping needs to be done
:type vtep_name: string
:param vlan_id: vlan id to be mapped to VNI
:type vlan_id: string
:param vni_id: VNI id where vlan to be mapped
:type vni_id: string
:param range_val: range of vlans to be mapped to VNI
:type range_val: string
:param config: it takes value as 'yes' or 'no' to configure or remove evpn instance respectively
:type config: string
:param : cli_type
:param : skip_error
:return: None
usage:
map_vlan_vni(dut1, "dut1VTEP", "100", "100", cli_type='click')
map_vlan_vni(dut1, "dut1VTEP", "100", "100", config="no", cli_type='click')
map_vlan_vni(dut1, "dut1VTEP", "100", "100", range="10")
map_vlan_vni(dut1, "dut1VTEP", "100", "100", range="10", config="no")
Created by: Julius <<EMAIL>
"""
cli_type = st.get_ui_type(dut,cli_type=cli_type)
range_val = int(range_val)
if config == 'yes':
conf_str = ''
action = 'add'
else:
conf_str = 'no'
action = 'del'
if cli_type == 'click':
if range_val > 1:
vlan_end = int(vlan_id) + range_val - 1
command = "config vxlan map_range {} {} {} {} {}".format(action, vtep_name, vlan_id, vlan_end, vni_id)
elif range_val == 1:
command = "config vxlan map {} {} {} {}".format(action, vtep_name, vlan_id, vni_id)
elif cli_type == 'klish':
command = []
command.append('interface vxlan {}'.format(vtep_name))
if range_val == 1:
command.append('{} map vni {} vlan {}'.format(conf_str, vni_id, vlan_id))
elif range_val > 1:
command.append('{} map vni {} vlan {} count {}'.format(conf_str, vni_id, vlan_id, range_val))
command.append('exit')
elif cli_type in ['rest-put','rest-patch']:
if range_val == 1:
rest_urls = st.get_datastore(dut, "rest_urls")
vlan_data = str(vlan_id) if type(vlan_id) is not str else vlan_id
vlan_str = 'Vlan'+vlan_data
vni_id = int(vni_id) if type(vni_id) is not int else vni_id
if config == 'yes':
url = rest_urls['config_vlan_vni_mapping'].format(vlan_str)
payload = { "openconfig-vxlan:vni-instance":
[{"vni-id": vni_id,
"source-nve": vtep_name,
"config": {"vni-id": vni_id, "source-nve": vtep_name}
}]
}
response = config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload,get_response=True)
error_list = response['output'].get('ietf-restconf:errors', {}).get('error', [])
if error_list:
err_msg = error_list[0].get('error-message', '')
st.banner('FAIL-OCYANG: vlan-vni map failed')
return err_msg
else:
url = rest_urls['delete_vlan_vni_mapping'].format(vlan_str,vni_id,vtep_name)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG')
return False
return
elif range_val > 1:
### In case of range , need to call above URI multiple times, instead fallback to klish
cli_type = 'klish'
command = []
command.append('interface vxlan {}'.format(vtep_name))
command.append('{} map vni {} vlan {} count {}'.format(conf_str, vni_id, vlan_id, range_val))
command.append('exit')
else:
st.error("Invalid CLI type - {}".format(cli_type))
return False
st.debug(command)
return st.config(dut, command, skip_error_check=skip_error, type=cli_type)
def parse_rest_output_vxlan_tunnel(response):
tunnel_list = response['output'].get('openconfig-vxlan:vxlan-tunnel-infos',{}).get('vxlan-tunnel-info',[])
tunnel_count = len(tunnel_list)
result = []
for tunnel in tunnel_list:
dict = {}
dict['total_count'] = tunnel_count
dict['src_vtep'] = tunnel.get('state',{}).get('source-ip',"")
dict['rem_vtep'] = tunnel.get('state',{}).get('peer-ip',"")
tunnel_status = tunnel.get('state',{}).get('status',"")
if tunnel_status == 'UP':
dict['tun_status'] = 'oper_up'
elif tunnel_status == 'DOWN':
dict['tun_status'] = 'oper_down'
else:
## To handle later for any other type
dict['tun_status'] = tunnel['state']['status']
result.append(dict)
return result
def verify_vxlan_tunnel_status(dut, src_vtep, rem_vtep_list, exp_status_list, cli_type=''):
'''
purpose:
This definition is used to verify operational status of VxLAN tunnel
Arguments:
:param dut: Device name where the command to be executed
:type dut: string
:param src_vtep: ip address of local VTEP
:type src_vtep: string
:param rem_vtep_list: list of remote VTEP ip address
:type rem_vtep_list: string
:param exp_status_list: list of expected operational status of VTEP's; example ['oper_down','oper_up']
:type exp_status_list: list
:return: True/False True - success case; False - Failure case
usage: verify_vxlan_tunnel_status(dut1,'1.1.1.1',['2.2.2.2','3.3.3.3'],['oper_up','oper_up'])
verify_vxlan_tunnel_status(dut1,'1.1.1.1',['2.2.2.2','3.3.3.3'],['oper_down','oper_up'])
Created by: Julius <<EMAIL>
'''
cli_type = st.get_ui_type(dut,cli_type=cli_type)
success = True
if cli_type in ['click','klish']:
cli_out = st.show(dut, 'show vxlan tunnel', type=cli_type)
elif cli_type in ['rest-put','rest-patch']:
rest_urls = st.get_datastore(dut, "rest_urls")
url = rest_urls['vxlan_tunnel_info']
response = get_rest(dut, rest_url=url)
if response['output']:
cli_out = parse_rest_output_vxlan_tunnel(response)
else:
st.error("OCYANG-FAIL: verify vxlan tunnel - Get Response is empty")
return False
else:
st.error("Invalid CLI type - {}".format(cli_type))
return False
for rem_vtep,status in zip(rem_vtep_list,exp_status_list):
fil_out = filter_and_select(cli_out, ["tun_status"], {"src_vtep" : src_vtep,
"rem_vtep" : rem_vtep})
if not fil_out:
st.error('No entry found for source VTEP: {} and remote VTEP: {} in '
'output: {}'.format(src_vtep,rem_vtep,cli_out))
success = False
continue
else:
fil_out = fil_out[0]
if fil_out["tun_status"] == status:
st.log('Match found; remote VTEP {} status {}; expected '
'{}'.format(rem_vtep,fil_out["tun_status"],status))
else:
st.error('Match NOT found; expected status for remote VTEP: {} is : {} '
'but found: {}'.format(rem_vtep,status,fil_out["tun_status"]))
success = False
return True if success else False
def verify_bgp_l2vpn_evpn_route_type_macip(dut,**kwargs):
"""
Author: <NAME> (<EMAIL>)
verify_bgp_l2vpn_evpn_route_type_macip(dut=data.dut1,evpn_type_2_prefix="[2]:[0]:[48]:[00:21:ee:00:10:16]",rd="1.1.1.1:2",status_code="*>",metric="",next_hop="172.16.58.3",weight="32768",path="",origin_code="i")
verify_bgp_l2vpn_evpn_route_type_macip(dut=data.dut1,evpn_type_2_prefix="[2]:[0]:[48]:[00:21:ee:00:10:16]:[32]:[5172.16.31.10]",rd="1.1.1.1:2",status_code="*>",metric="",next_hop="172.16.58.3",weight="32768",path="",origin_code="i")
To verify bgp l2vpn evpn route type macip
:param dut:
:param evpn_type_2_prefix:
:param rd:
:param path:
:param status_code:
:param weight:
:param metric:
:param next_hop:
:param origin_code:
:param displayed_prefixes:
:param no_of_paths:
:return:
:reteturn:
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
cli_type = "vtysh" if cli_type == 'click' else cli_type
if cli_type in ["rest-put", "rest-patch"]:
ret_val=True
rest_urls = st.get_datastore(dut, "rest_urls")
url = rest_urls["get_evpn_routes"]
rest_out = get_rest(dut, rest_url=url, timeout=30)
if rest_out["status"] == 200:
out_dict = {}
rest_out = rest_out["output"]["openconfig-bgp-evpn-ext:routes"]["route"]
match = False
for i in rest_out:
try:
prefix = i["prefix"]
if prefix == kwargs["evpn_type_2_prefix"] and i["state"]['valid-route']:
out_dict["next_hop"]=i["attr-sets"]["next-hop"]
if 'rd' in kwargs:
out_dict["rd"]=i["route-distinguisher"]
if 'origin_code' in kwargs and i['attr-sets']['origin'] == "IGP":
out_dict["origin_code"]= "i"
if 'origin_code' in kwargs and i['attr-sets']['origin'] == "EGP":
out_dict["origin_code"]= "e"
if 'origin_code' in kwargs and i['attr-sets']['origin'] == "incomplete":
out_dict["origin_code"]= "?"
if 'path' in kwargs:
as_path = ""
for as1 in i['attr-sets']['as-path']['as-segment'][0]['state']['member']:
as_path = as_path + str(as1) + " "
as_path = as_path.strip()
out_dict["path"]= as_path
for key in out_dict.keys():
if key in kwargs:
if out_dict[key] == kwargs[key]:
st.log("Expected value {} found for key: {} for route {}".format(out_dict[key], key,prefix))
match = True
else:
st.log("Match NOT found; expected value {} but got"
" {}".format(kwargs[key], out_dict[key]))
ret_val = False
if match:
break
except Exception:
continue
if not match:
st.log("MAC IP Route {} was not found in the rest output".format(kwargs["evpn_type_2_prefix"]))
return False
elif not ret_val:
return False
else:
return True
else:
st.log("REST command execution failed")
ret_val = False
else:
output = st.show(dut,"show bgp l2vpn evpn route type macip",type=cli_type)
if len(output) == 0:
st.error("Output is Empty")
return False
count = 0
no_common_key = 0
ret_val1 = False
dict1 = {}
common_key_list = ['bgp_version','router_id','displayed_prefixes','no_of_paths']
for a in output:
for key in a:
output[output.index(a)][key]=output[output.index(a)][key].lstrip()
output[output.index(a)][key]=output[output.index(a)][key].rstrip()
for key in kwargs:
if key in common_key_list:
no_common_key = no_common_key + 1
if no_common_key > 0:
rlist = output[0]
count = 0
for key in kwargs:
if rlist[key] == kwargs[key] and key in common_key_list:
count = count + 1
if no_common_key == count:
ret_val1 = True
for key in kwargs:
if key in common_key_list:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
for key in kwargs:
if key in common_key_list:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
for key in common_key_list:
if key in kwargs:
dict1[key] = kwargs[key]
del kwargs[key]
if no_common_key > 0 and ret_val1 is False:
st.error("DUT {} -> Match Not Found {}".format(dut,dict1))
return ret_val1
ret_val = "True"
#Converting all kwargs to list type to handle single or list of instances
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
#convert kwargs into list of dictionary
input_dict_list =[]
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
st.log("output is {}".format(output))
st.log("input_dict is {}".format(input_dict))
ret_val = False
return ret_val
def map_vrf_vni(dut, vrf_name, vni, config='yes', vtep_name='', skip_error=False, cli_type=''):
"""
purpose:
This API is used to configure VRF to VNI mapping
Arguments:
:param dut: device to be configured
:type dut: string
:param vrf_name: name of the vrf to be mapped to VNI
:type vrf_name: string
:param vni: VNI to be mapped to the VRF
:type vni: string
:param config: it takes value as 'yes' or 'no' to configure or remove the mapping
:type config: string
:param : cli_type
:param : vtep_name
:param : skip_error
:return: None
usage:
map_vrf_vni(dut1, "Vrf-1", "100", cli_type='click')
map_vrf_vni(dut1, "Vrf-1", "100", config='no', cli_type='click')
Created by: <NAME> <<EMAIL>>
"""
cli_type = st.get_ui_type(dut,cli_type=cli_type)
if config == 'yes':
conf_str = ''
action = 'add_vrf_vni_map'
else:
conf_str = 'no'
if cli_type == 'click':
vni = ''
action = 'del_vrf_vni_map'
if cli_type == 'click':
command = "config vrf {} {} {}".format(action, vrf_name, vni)
elif cli_type == 'klish':
if not vtep_name:
st.error('Mandatory argument vtep_name MISSING')
return False
command = []
command.append('interface vxlan {}'.format(vtep_name))
command.append('{} map vni {} vrf {}'.format(conf_str, vni, vrf_name))
command.append('exit')
elif cli_type in ['rest-put','rest-patch']:
if not vtep_name:
st.error('Mandatory argument vtep_name MISSING')
return False
rest_urls = st.get_datastore(dut, "rest_urls")
vni = int(vni) if type(vni) is not int else vni
if config == 'yes':
url = rest_urls['config_vlan_vni_mapping'].format(vrf_name)
payload = { "openconfig-vxlan:vni-instance":
[{"vni-id": vni,
"source-nve": vtep_name,
"config": {"vni-id": vni, "source-nve": vtep_name}
}]
}
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG')
return False
else:
url = rest_urls['delete_vlan_vni_mapping'].format(vrf_name,vni,vtep_name)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG')
return False
return
else:
st.error("Invalid CLI type - {}".format(cli_type))
return False
st.debug(command)
return st.config(dut, command, skip_error_check=skip_error, type=cli_type)
def parse_rest_output_remote_vni(response):
tunnel_vni_list = response['output'].get('openconfig-vxlan:vxlan-vni-peer-infos', {}).get('vxlan-vni-peer-info', [])
tunnel_vni_count = len(tunnel_vni_list)
result = []
for tunnel in tunnel_vni_list:
dict = {}
dict['total_count'] = str(tunnel_vni_count)
### vlan missing in ocyang output
dict['vlan'] = ''
dict['rvtep'] = tunnel.get('peer-ip',"")
dict['vni'] = tunnel.get('state',{}).get('vni-id',0)
result.append(dict)
return result
def verify_vxlan_evpn_remote_vni_id(dut, **kwargs):
"""
Author: <NAME> (<EMAIL>)
verify_vxlan_evpn_remote_vni_id(dut=dut1,vni="100",vlan="Vlan100",rvtep="172.16.58.3",type="dynamic",identifier="all")
To verify show vxlan evpn_remote_vni <vni-id|all>
:param dut:
:param vni:
:param vlan:
:param rvtep:
:param type:
:param total_count:
:param identifier: all | specific vni id which we want to parse using show command
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
if 'identifier' not in kwargs:
st.error("Mandetory arg identifier is not present")
return False
if cli_type == 'klish':
if kwargs['identifier'] == 'all':
kwargs['identifier'] = ''
cmd = 'evpn_remote_vni' if cli_type == 'click' else 'remote vni'
command = 'show vxlan {}'.format(cmd)
if kwargs['identifier']:
command += " {}".format(kwargs['identifier'])
if cli_type in ['rest-put','rest-patch']:
rest_urls = st.get_datastore(dut, "rest_urls")
url = rest_urls['vxlan_vni_peer_info']
response = get_rest(dut, rest_url=url)
st.log('KLISH output for debugging REST')
st.show(dut, 'show vxlan remote vni', type='klish')
if response['output']:
output = parse_rest_output_remote_vni(response)
else:
st.error("OCYANG-FAIL: verify vxlan remote vni - Get Response is empty")
return False
if 'vlan' in kwargs:
del kwargs['vlan']
else:
output = st.show(dut, command, type=cli_type)
if len(output) == 0:
st.error("Output is Empty")
return False
if "return_output" in kwargs:
return True
del kwargs['identifier']
count = 0
no_common_key = 0
ret_val1 = False
dict1 = {}
common_key_list = ['total_count']
for key in kwargs:
if key in common_key_list:
no_common_key = no_common_key + 1
if no_common_key > 0:
rlist = output[0]
count = 0
for key in kwargs:
if rlist[key] == kwargs[key] and key in common_key_list:
count = count + 1
if no_common_key == count:
ret_val1 = True
for key in kwargs:
if key in common_key_list:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
for key in kwargs:
if key in common_key_list:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
for key in common_key_list:
if key in kwargs:
dict1[key] = kwargs[key]
del kwargs[key]
if no_common_key > 0 and ret_val1 is False:
st.error("DUT {} -> Match Not Found {}".format(dut,dict1))
return ret_val1
ret_val = "True"
#Converting all kwargs to list type to handle single or list of instances
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
#convert kwargs into list of dictionary
input_dict_list =[]
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
ret_val = False
return ret_val
def verify_vxlan_evpn_remote_mac_id(dut,**kwargs):
"""
Author: <NAME> (<EMAIL>)
verify_vxlan_evpn_remote_mac_id(dut=dut1,vni="100",vlan="Vlan100",rvtep="172.16.58.3",type="dynamic",identifier="all",mac="00:21:ee:00:10:33")
To verify show vxlan evpn_remote_mac <mac|all>
:param dut:
:param vni:
:param vlan:
:param rvtep:
:param type:
:param mac:
:param total_count:
:param identifier: all | specific mac which we want to parse using show command
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
### NO ocyang URI support for <show vxlan remote mac". Hence fallback to klish
cli_type = "klish" if cli_type in ["rest-put", "rest-patch"] else cli_type
if 'identifier' not in kwargs:
st.error("Mandatory arg identifier is not present")
return False
if cli_type == 'klish':
if kwargs['identifier'] == 'all':
kwargs['identifier'] = ''
cmd = 'evpn_remote_mac' if cli_type == 'click' else 'remote mac'
command = 'show vxlan {}'.format(cmd)
if kwargs['identifier']:
command += " {}".format(kwargs['identifier'])
output = st.show(dut, command, type=cli_type)
if len(output) == 0:
st.error("Output is Empty")
return False
if "return_output" in kwargs:
return True
del kwargs['identifier']
count = 0
no_common_key = 0
ret_val1 = False
dict1 = {}
common_key_list = ['total_count','min_total_count']
for key in kwargs:
if key in common_key_list:
no_common_key = no_common_key + 1
if no_common_key > 0:
rlist = output[0]
count = 0
for key in kwargs:
if key != 'min_total_count':
if rlist[key] == kwargs[key] and key in common_key_list:
count = count + 1
elif key == 'min_total_count':
if rlist['total_count'] >= kwargs[key] and key in common_key_list:
count = count + 1
st.log("Match: Match key {} found => {} out of {}".format(key,kwargs[key],rlist['total_count']))
if 'min_total_count' in kwargs:
del kwargs['min_total_count']
if no_common_key == count:
ret_val1 = True
for key in kwargs:
if key in common_key_list:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
for key in kwargs:
if key in common_key_list:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
for key in common_key_list:
if key in kwargs:
dict1[key] = kwargs[key]
del kwargs[key]
if no_common_key > 0 and ret_val1 is False:
st.error("DUT {} -> Match Not Found {}".format(dut,dict1))
return ret_val1
ret_val = "True"
#Converting all kwargs to list type to handle single or list of instances
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
#convert kwargs into list of dictionary
input_dict_list =[]
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
ret_val = False
return ret_val
def parse_rest_output_vlanvni_map(dut,vlan_data,url):
response = get_rest(dut, rest_url=url)
if response['output']:
dict = {}
dict['vlan'] = vlan_data
vni_map = response['output'].get('openconfig-vxlan:vni-instance', [])
if vni_map:
vni_id = vni_map[0].get('state', {}).get('vni-id', 0)
else:
vni_id = 0
dict['vni'] = str(vni_id) if type(vni_id) is int else vni_id
return dict
else:
st.error("OCYANG-FAIL: verify vxlan vlanvnimap - Get Response is empty for vlan:{}".format(vlan_data))
return False
def verify_vxlan_vlanvnimap(dut,**kwargs):
"""
Author: <NAME> (<EMAIL>)
verify_vxlan_vlanvnimap(dut=dut1,vni=["100","101'],vlan=["Vlan100","Vlan100"],total_count="2")
To verify show vxlan vlanvnimap
:param dut:
:param vni:
:param vlan:
:param total_count:
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
### There is no direct vlan-vni mapping output in ocyang.
#cli_type = "klish" if cli_type in ["rest-put", "rest-patch"] else cli_type
if "return_output" in kwargs:
cli_type = "klish" if cli_type in ["rest-put", "rest-patch"] else cli_type
if cli_type in ['rest-put', 'rest-patch']:
st.log('KLISH output for debugging REST')
st.show(dut, 'show vxlan vlanvnimap', type='klish')
rest_urls = st.get_datastore(dut, "rest_urls")
result = []
vlan_list = [kwargs['vlan']] if type(kwargs['vlan']) is not list else kwargs['vlan']
for vlan_id in vlan_list:
vlan_data = str(vlan_id) if type(vlan_id) is not str else vlan_id
vlan_str = 'Vlan' + vlan_data if 'Vlan' not in vlan_data else vlan_data
url = rest_urls['config_vlan_vni_mapping'].format(vlan_str)
dict = parse_rest_output_vlanvni_map(dut,vlan_data,url)
if dict:
result.append(dict)
count = len(result)
for dict in result:
dict.update({'total_count': count})
output = result
st.log("parsed output:{}".format(result))
kwargs.pop('total_count')
else:
output = st.show(dut, "show vxlan vlanvnimap", type=cli_type)
if len(output) == 0:
st.error("Output is Empty")
return False
if "return_output" in kwargs:
return True
count = 0
no_common_key = 0
ret_val1 = False
dict1 = {}
common_key_list = ['total_count']
for key in kwargs:
if key in common_key_list:
no_common_key = no_common_key + 1
if no_common_key > 0:
rlist = output[0]
count = 0
for key in kwargs:
if rlist[key] == kwargs[key] and key in common_key_list:
count = count + 1
if no_common_key == count:
ret_val1 = True
for key in kwargs:
if key in common_key_list:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
for key in kwargs:
if key in common_key_list:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
for key in common_key_list:
if key in kwargs:
dict1[key] = kwargs[key]
del kwargs[key]
if no_common_key > 0 and ret_val1 is False:
st.error("DUT {} -> Match Not Found {}".format(dut,dict1))
return ret_val1
ret_val = "True"
#Converting all kwargs to list type to handle single or list of instances
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
#convert kwargs into list of dictionary
input_dict_list =[]
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
ret_val = False
return ret_val
def parse_rest_output_vrfvni_map(dut, vrf_str, url):
response = get_rest(dut, rest_url=url)
if response['output']:
dict = {}
dict['vrf'] = vrf_str
vni_map = response['output'].get('openconfig-vxlan:vni-instance', [])
if vni_map:
vni_id = vni_map[0].get('state', {}).get('vni-id', 0)
else:
vni_id = 0
dict['vni'] = str(vni_id) if type(vni_id) is int else vni_id
return dict
else:
st.error("OCYANG-FAIL: verify vxlan vlanvnimap - Get Response is empty for vrf:{}".format(vrf_str))
return False
def verify_vxlan_vrfvnimap(dut,**kwargs):
"""
Author: <NAME> (<EMAIL>)
verify_vxlan_vrfvnimap(dut=dut1,vni=["500","501'],vrf=["Vrf1","Vrf2"],total_count="2")
To verify show vxlan vrfvnimap
:param dut:
:param vni:
:param vlan:
:param total_count:
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
### There is no direct vrf-vni mapping output in ocyang.
if "return_output" in kwargs:
cli_type = "klish" if cli_type in ["rest-put", "rest-patch"] else cli_type
if cli_type in ['rest-put', 'rest-patch']:
st.log('KLISH output for debugging REST')
st.show(dut, 'show vxlan vrfvnimap', type='klish')
rest_urls = st.get_datastore(dut, "rest_urls")
result = []
vrf_list = [kwargs['vrf']] if type(kwargs['vrf']) is str else kwargs['vrf']
for vrf in vrf_list:
vrf_str = str(vrf) if type(vrf) is not str else vrf
url = rest_urls['config_vlan_vni_mapping'].format(vrf_str)
dict = parse_rest_output_vrfvni_map(dut, vrf_str, url)
if dict:
result.append(dict)
count = len(result)
for dict in result:
dict.update({'total_count': count})
output = result
st.log("parsed output:{}".format(result))
kwargs.pop('total_count')
else:
output = st.show(dut, "show vxlan vrfvnimap", type=cli_type)
if len(output) == 0:
st.error("Output is Empty")
return False
if "return_output" in kwargs:
return True
count = 0
no_common_key = 0
ret_val1 = False
dict1 = {}
common_key_list = ['total_count']
for key in kwargs:
if key in common_key_list:
no_common_key = no_common_key + 1
if no_common_key > 0:
rlist = output[0]
count = 0
for key in kwargs:
if rlist[key] == kwargs[key] and key in common_key_list:
count = count + 1
if no_common_key == count:
ret_val1 = True
for key in kwargs:
if key in common_key_list:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
for key in kwargs:
if key in common_key_list:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
for key in common_key_list:
if key in kwargs:
dict1[key] = kwargs[key]
del kwargs[key]
if no_common_key > 0 and ret_val1 is False:
st.error("DUT {} -> Match Not Found {}".format(dut,dict1))
return ret_val1
ret_val = "True"
#Converting all kwargs to list type to handle single or list of instances
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
#convert kwargs into list of dictionary
input_dict_list =[]
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
ret_val = False
return ret_val
def verify_bgp_l2vpn_evpn_route_detail_type_prefix(dut,**kwargs):
"""
Author: <NAME> (<EMAIL>)
verify_bgp_l2vpn_evpn_route_detail_type_prefix(dut=evpn_dict["leaf_node_list"][3],
prefix="[5]:[0]:[24]:[172.16.31.10]",rd="9:9",rt="500:500",rvtep="5.5.5.2")
To verify show bgp l2vpn evpn route detail type prefix
:param dut:
:param rd:
:param as_path:
:param vni_id:
:param prefix:
:param rvtep:
:param bgp_peer:
:param origin:
:param rt:
:param et:
:param rmac:
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut, **kwargs))
ret_val = True
if cli_type in ["rest-put", "rest-patch"]:
rest_urls = st.get_datastore(dut, "rest_urls")
url = rest_urls["get_evpn_routes"]
rest_out = get_rest(dut, rest_url=url, timeout=30)
if rest_out["status"] == 200:
out_dict={}
rest_out=rest_out["output"]["openconfig-bgp-evpn-ext:routes"]["route"]
for i in rest_out:
try:
if "rmac" in kwargs:
rmac = i["attr-sets"]["ext-community"][2]
rmac=":".join(rmac.split(":")[1:7])
rt=i["attr-sets"]["ext-community"][0]
rt=":".join(rt.split(":")[1:3])
out_dict["rt"]=rt
prefix = i["prefix"]
nexthop = i["attr-sets"]["next-hop"]
if prefix == kwargs["prefix"] and nexthop == kwargs["rvtep"] and rmac == kwargs["rmac"]:
if i["state"]["openconfig-rib-bgp-ext:best-path"]:
vni=i["attr-sets"]["tag"]
out_dict["vni_id"] = vni
rd=i["route-distinguisher"]
rd=rd.split(":")[0]
out_dict["rd"]=rd
for key in out_dict.keys():
if key in kwargs:
if out_dict[key]==kwargs[key]:
st.log("Expected value {} found for key: {}".format(out_dict[key],key))
else:
st.log("Match NOT found; expected value {} but got"
" {}".format(kwargs[key],out_dict[key]))
ret_val = False
if ret_val:
return True
except Exception:
continue
else:
st.log("REST command execution failed")
ret_val=False
else:
cli_type = "vtysh" if cli_type == 'click' else "klish"
output = st.show(dut,"show bgp l2vpn evpn route detail type prefix",type=cli_type)
if len(output) == 0:
st.error("Output is Empty")
return False
ret_val = "True"
#Converting all kwargs to list type to handle single or list of instances
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
#convert kwargs into list of dictionary
input_dict_list =[]
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
ret_val = False
return ret_val
def clear_bgp_evpn(dut,clear_type,**kwargs):
'''
:param dut:
:type dut: string
:param clear_type:
:type clear_type: string
:param kwargs:
:type kwargs: dictionary
:return: None
Usage:
to clear all neighbors:
clear_bgp_evpn(dut1,"*")
clear_bgp_evpn(dut1,"*",dir="in")
clear_bgp_evpn(dut1,"*",dir="in",prefix="yes")
clear_bgp_evpn(dut1,"*",dir="out")
clear_bgp_evpn(dut1,"*",soft_dir="in")
clear_bgp_evpn(dut1,"*",soft_dir="out")
to clear specific neighbors:
clear_bgp_evpn(dut1,"1.1.1.1")
clear_bgp_evpn(dut1,"1.1.1.1",dir="in")
clear_bgp_evpn(dut1,"1.1.1.1",dir="in",prefix="yes")
clear_bgp_evpn(dut1,"1.1.1.1",dir="out")
clear_bgp_evpn(dut1,"1.1.1.1",soft_dir="in")
clear_bgp_evpn(dut1,"1.1.1.1",soft_dir="out")
'''
cli_type = kwargs.get('cli_type', st.get_ui_type(dut,**kwargs))
cli_type = "vtysh" if cli_type == 'click' else "klish"
cmd = "clear bgp l2vpn evpn {}".format(clear_type)
supported_args = ["dir","prefix","soft_dir","cli_type"]
for key in kwargs.keys():
if key not in supported_args:
st.error("kindly specify the supported argument among {}".format(supported_args))
return None
if "dir" in kwargs:
cmd += " {}".format(kwargs["dir"])
if kwargs["dir"] == "in" and "prefix" in kwargs:
cmd += " prefix-filter"
if "soft_dir" in kwargs:
cmd += " soft {}".format(kwargs["soft_dir"])
return st.config(dut,cmd,type=cli_type,skip_tmpl=True,conf=False)
def fetch_evpn_neigh_output(dut,**kwargs):
'''
:param dut:
:type dut: string
:return: cli output in success case; False in failure case
Usage:
fetch_evpn_neigh_output(dut1)
'''
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
cli_type = "vtysh" if cli_type == 'click' else "klish"
output = st.show(dut,"show bgp l2vpn evpn summary",type=cli_type)
if len(output) == 0:
st.error("Output is Empty")
return False
else:
return output
def verify_bgp_l2vpn_evpn_route_type_multicast(dut,**kwargs):
"""
Author: <NAME> (<EMAIL>)
verify_bgp_l2vpn_evpn_route_type_multicast(dut=dut1,evpn_type_3_prefix="[3]:[0]:[32]:[172.16.58.3]",rd="1.1.1.1:2",status_code="*>",metric="3276",next_hop="172.16.58.3",weight="8",path="",origin_code="i",displayed_prefixes="5",no_of_paths="5",bgp_version="1",router_id="1.1.1.1")
To verify bgp l2vpn evpn route type multicast
:param dut:
:param bgp_verion:
:param router_id:
:param evpn_type_3_prefix:
:param rd:
:param path:
:param status_code:
:param weight:
:param metric:
:param next_hop:
:param origin_code:
:param displayed_prefixes:
:param no_of_paths:
:return:
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut, **kwargs))
if cli_type in ["rest-put", "rest-patch"]:
ret_val=True
rest_urls = st.get_datastore(dut, "rest_urls")
url = rest_urls["get_evpn_routes"]
rest_out = get_rest(dut, rest_url=url, timeout=30)
if rest_out["status"] == 200:
out_dict = {}
rest_out = rest_out["output"]["openconfig-bgp-evpn-ext:routes"]["route"]
for i in rest_out:
try:
prefix = i["prefix"]
if prefix == kwargs["evpn_type_3_prefix"] and i["state"]["openconfig-rib-bgp-ext:best-path"]:
nexthop = i["attr-sets"]["next-hop"]
out_dict["next_hop"]=nexthop
for key in out_dict.keys():
if key in kwargs:
if out_dict[key] == kwargs[key]:
st.log("Expected value {} found for key: {}".format(out_dict[key], key))
else:
st.log("Match NOT found; expected value {} but got"
" {}".format(kwargs[key], out_dict[key]))
ret_val = False
if ret_val:
return True
except Exception:
continue
else:
st.log("REST command execution failed")
ret_val = False
else:
cli_type = "vtysh" if cli_type == 'click' else "klish"
output = st.show(dut,"show bgp l2vpn evpn route type multicast",type=cli_type)
if len(output) == 0:
st.error("Output is Empty")
return False
for a in output:
for key in a:
output[output.index(a)][key]=output[output.index(a)][key].lstrip()
output[output.index(a)][key]=output[output.index(a)][key].rstrip()
count = 0
no_common_key = 0
ret_val1 = False
dict1 = {}
common_key_list = ['bgp_version','router_id','displayed_prefixes','no_of_paths']
for key in kwargs:
if key in common_key_list:
no_common_key = no_common_key + 1
if no_common_key > 0:
rlist = output[0]
count = 0
for key in kwargs:
if rlist[key] == kwargs[key] and key in common_key_list:
count = count + 1
if no_common_key == count:
ret_val1 = True
for key in kwargs:
if key in common_key_list:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
for key in kwargs:
if key in common_key_list:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
for key in common_key_list:
if key in kwargs:
dict1[key] = kwargs[key]
del kwargs[key]
if no_common_key > 0 and ret_val1 is False:
st.error("DUT {} -> Match Not Found {}".format(dut,dict1))
return ret_val1
ret_val = "True"
#Converting all kwargs to list type to handle single or list of instances
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
#convert kwargs into list of dictionary
input_dict_list =[]
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
ret_val = False
return ret_val
def verify_vxlan_tunnel_count(dut, exp_count, cli_type=''):
"""
:param dut:
:param exp_count:
:param cli_type:
:return:
"""
cli_type = st.get_ui_type(dut,cli_type=cli_type)
if cli_type == "click":
command = 'show vxlan tunnel | grep "Total count "'
output = st.show(dut, command, skip_tmpl=True, type=cli_type)
x = re.search(r"\d+", output)
elif cli_type == "klish":
command = 'show vxlan tunnel | grep "EVPN"'
output = st.show(dut, command, skip_tmpl=True, type=cli_type)
x = output.count("EVPN_")
elif cli_type in ['rest-put','rest-patch']:
rest_urls = st.get_datastore(dut, "rest_urls")
url = rest_urls['vxlan_tunnel_info']
response = get_rest(dut, rest_url=url)
st.log('KLISH output for debugging REST')
st.show(dut, 'show vxlan tunnel', type='klish')
tunnel_list = response['output']['openconfig-vxlan:vxlan-tunnel-infos']['vxlan-tunnel-info']
x = len(tunnel_list)
else:
st.error("Invalid CLI type - {}".format(cli_type))
return False
if x:
if cli_type == "click":
if int(x.group()) == exp_count:
return True
else:
st.log('FAIL: Expected tunnel count not found.')
return False
elif cli_type in ["klish", "rest-put", "rest-patch"]:
if x == exp_count:
return True
else:
st.log('FAIL: Expected tunnel count not found.')
return False
else:
return -1
def create_linktrack(dut, track_group_name, config='yes', **kwargs):
'''
purpose:
This definition is used to create link track
Arguments:
:param dut: device to be configured
:type dut: string
:param track_group_name: interface track group name name to be created
:param config: it takes value as 'yes' or 'no' to configure or remove interface link tracking
:type config: string
:return: None
usage:
create_linktrack(dut1, "group1")
create_linktrack(dut1, "group1",config='no')
Created by: Gangadhara <<EMAIL>>
'''
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
if cli_type == 'click':
if config=='yes':
command = "config linktrack add {}".format(track_group_name)
else:
command = "config linktrack del {}".format(track_group_name)
return st.config(dut=dut,cmd=command)
elif cli_type == 'klish':
config = 'no ' if config != 'yes' else ''
exit_cmd = '\nexit' if config == '' else ''
command = '{}link state track {}{}'.format(config, track_group_name, exit_cmd)
return st.config(dut=dut,cmd=command, type="klish", conf=True)
elif cli_type in ['rest-put','rest-patch']:
rest_urls = st.get_datastore(dut, "rest_urls")
if config == 'yes':
url = rest_urls['config_link_track']
payload = {"openconfig-lst-ext:lst-group":[{"name":track_group_name}]}
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: Config Link track group Failed')
return False
elif config == 'no':
url = rest_urls['delete_link_track'].format(track_group_name)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: Delete Link track group Failed')
return False
return
else:
st.error("Invalid CLI type - {}".format(cli_type))
return False
def update_linktrack_interface(dut, track_group_name, upinterface, timeout, config='yes', **kwargs):
'''
purpose:
This definition is used to update link track interface
Arguments:
:param dut: device to be configured
:type dut: string
:param track_group_name: interface track group name name to be created or deleted
:param upinterface: upstream interface to be added or removed
:param config: it takes value as 'yes' or 'no' to configure or remove interface link tracking
:param upinterface: upinterface to be added or removed
:param description: downinterface to be added or removed
:param downinterface: timeout value to be configured
:type config: string
:return: None
usage:
update_linktrack_interface(dut1, "Ethernet0,Vlan10","10")
update_linktrack_interface(dut1, "Ethernet0,Vlan10","",config='no')
Created by: Gangadhara <<EMAIL>>
'''
cli_type = kwargs.get('cli_type', st.get_ui_type(dut,**kwargs))
description = kwargs.get('description', '')
downinterface = kwargs.get('downinterface', 'all-mclag')
if cli_type == 'click':
if config=='yes':
if description == '':
command = "config linktrack update {} --upstream {} --downstream {} --timeout {}".format(track_group_name,upinterface,downinterface,timeout)
else:
command = "config linktrack update {} --upstream {} --downstream {} --timeout {} --description {}".format(track_group_name,upinterface,downinterface,timeout,description)
else:
command = "config linktrack update {} -nu {} -nd {}".format(track_group_name,upinterface,downinterface)
return st.config(dut=dut,cmd=command)
elif cli_type == 'klish':
config = 'no ' if config != 'yes' else ''
command = 'link state track {}'.format(track_group_name)
intf = get_interface_number_from_name(upinterface)
dintf = get_interface_number_from_name(downinterface)
if config == '':
if downinterface == 'all-mclag':
command = command + "\n" + "downstream {}".format(downinterface)
if timeout != '':
command = command + "\n" + "timeout {}".format(timeout)
if description != '':
command = command + "\n" + "description {}".format(description)
command = command + "\n" + "exit"
command = command + "\n" + "interface {} {}".format(intf["type"], intf["number"])
command = command + "\n" + "link state track {} upstream".format(track_group_name)
command = command + "\n" + "exit"
if downinterface != 'all-mclag':
command = command + "\n" + "interface {} {}".format(dintf["type"], dintf["number"])
command = command + "\n" + "link state track {} downstream".format(track_group_name)
command = command + "\n" + "exit"
else:
if downinterface == 'all-mclag':
command = command + "\n" + "{}downstream {}".format(config,downinterface)
if timeout != '':
command = command + "\n" + "{}timeout".format(config)
if description != '':
command = command + "\n" + "{}description".format(config)
command = command + "\n" + "exit"
command = command + "\n" + "interface {} {}".format(intf["type"], intf["number"])
command = command + "\n" + "{}link state track {} upstream".format(config, track_group_name)
command = command + "\n" + "exit"
if downinterface != 'all-mclag':
command = command + "\n" + "interface {} {}".format(dintf["type"], dintf["number"])
command = command + "\n" + "{}link state track {} downstream".format(config,track_group_name)
command = command + "\n" + "exit"
return st.config(dut, command, type="klish", conf=True)
elif cli_type in ['rest-put','rest-patch']:
rest_urls = st.get_datastore(dut, "rest_urls")
if config == 'yes':
url = rest_urls['config_link_track_params'].format(track_group_name)
#up_intf = get_interface_number_from_name(upinterface)
#dw_intf = get_interface_number_from_name(downinterface)
payload = {"openconfig-lst-ext:config":
{"name": track_group_name}
}
if downinterface == 'all-mclag':
payload["openconfig-lst-ext:config"].update({'all-mclags-downstream':True})
if timeout != '':
payload["openconfig-lst-ext:config"].update({'timeout':int(timeout)})
if description != '':
payload["openconfig-lst-ext:config"].update({'description':description})
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: Config Link track group parameters Failed')
return False
if downinterface != 'all-mclag':
url = rest_urls['add_rem_link_track_downstream'].format(downinterface)
payload = { "openconfig-lst-ext:group-name": track_group_name }
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: Assign Link track to downstream interface Failed')
return False
url = rest_urls['add_rem_link_track_upstream'].format(upinterface)
payload = {"openconfig-lst-ext:upstream-groups":
{"upstream-group":
[{"group-name":track_group_name,
"config":{"group-name":track_group_name}
}]
}
}
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: Assign Link track to upstream interface Failed')
return False
elif config == 'no':
url = rest_urls['add_rem_link_track_upstream'].format(upinterface)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: Remove Link track from upstream interface Failed')
return False
if downinterface == 'all-mclag':
url = rest_urls['link_track_all_mclag'].format(track_group_name)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: Remove all-mclag downstream Failed')
return False
else:
url = rest_urls['add_rem_link_track_downstream'].format(upinterface)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: Remove Link track from downstream interface Failed')
return False
if timeout != '':
url = rest_urls['link_track_timeout'].format(track_group_name)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: Remove Link track group timeout Failed')
return False
if description != '':
url = rest_urls['link_track_description'].format(track_group_name)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: Remove Link track group description Failed')
return False
return
else:
st.error("Invalid CLI type - {}".format(cli_type))
return False
def verify_mac(dut,**kwargs):
"""
Author: <NAME> (<EMAIL>)
verify_mac(dut=dut1,mac="00:11:00:22:00:11",total="1")
To verify mac
:param dut:
:param macaddress:
:param vlan:
:param port:
:param type:
:param dest_ip:
:param total:
:return:
:reteturn:
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
cli_type = "klish" if cli_type in ["rest-put", "rest-patch"] else cli_type
if cli_type == "click":
cmd = "show mac"
elif cli_type == "klish":
cmd = "show mac address-table"
output = st.show(dut,cmd,type=cli_type)
if len(output) == 0:
st.error("Output is Empty")
return False
if cli_type == "klish":
if 'type' in kwargs:
if not kwargs['type'].isupper():
kwargs['type']=kwargs['type'].upper()
count = 0
no_common_key = 0
ret_val1 = False
dict1 = {}
common_key_list = ['total']
for key in kwargs:
if key in common_key_list:
no_common_key = no_common_key + 1
if no_common_key > 0:
rlist = output[0]
count = 0
for key in kwargs:
if rlist[key] == kwargs[key] and key in common_key_list:
count = count + 1
if no_common_key == count:
ret_val1 = True
for key in kwargs:
if key in common_key_list:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
for key in kwargs:
if key in common_key_list:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
for key in common_key_list:
if key in kwargs:
dict1[key] = kwargs[key]
del kwargs[key]
if no_common_key > 0 and ret_val1 is False:
st.error("DUT {} -> Match Not Found {}".format(dut,dict1))
return ret_val1
ret_val = "True"
#Converting all kwargs to list type to handle single or list of instances
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
#convert kwargs into list of dictionary
input_dict_list =[]
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
ret_val = False
return ret_val
def _clear_vxlan_config_helper(dut_list, cli_type='click'):
"""
Helper routine to cleanup vxlan config from devices.
"""
dut_li = list(dut_list) if isinstance(dut_list, list) else [dut_list]
for dut in dut_li:
st.log("############## {} : VxLAN Config Cleanup ################".format(dut))
output = st.show(dut, "show vxlan interface")
st.log("##### VXLAN INTERFACE: {}".format(output))
if len(output) == 0:
continue
entry = output[0]
if entry['vtep_name']:
vtep_name = entry['vtep_name']
nvo_name = entry['nvo_name']
vrf_vni = st.show(dut, "show vxlan vrfvnimap", type=cli_type)
st.log("##### [{}] VXLAN VRF L3VNI MAP: {}".format(dut, vrf_vni))
for entry in vrf_vni:
if not entry['vrf'] or not entry['vni'] or entry['vni'] == '0':
continue
vrf = entry['vrf']
map_vrf_vni(dut, vrf, entry['vni'], config="no")
vlan_vni = st.show(dut, "show vxlan vlanvnimap", type=cli_type)
st.log("##### [{}] VXLAN VLAN VNI MAP: {}".format(dut, vlan_vni))
for entry in vlan_vni:
if not entry['vlan'] or not entry['vni']:
continue
vlan = entry['vlan']
if vlan[:4] == "Vlan":
vlan = vlan[4:]
map_vlan_vni(dut, vtep_name, vlan, entry['vni'], config='no', cli_type=cli_type)
if nvo_name:
create_evpn_instance(dut, nvo_name, vtep_name, config='no', cli_type=cli_type)
create_overlay_intf(dut, vtep_name, '0.0.0.0', config='no', cli_type=cli_type)
return True
def clear_vxlan_configuration(dut_list, thread=True, cli_type='click'):
"""
Find and cleanup all vxlan configuration.
:param dut_list
:return:
"""
dut_li = list(dut_list) if isinstance(dut_list, list) else [dut_list]
[out, exceptions] = utils.exec_foreach(thread, dut_li, _clear_vxlan_config_helper, cli_type)
st.log(exceptions)
return False if False in out else True
def parse_rest_output_linktrack_summary(response):
lst_group = response['output']['openconfig-lst-ext:state']
dict = {}
dict['timeout'] = str(lst_group.get('timeout',""))
dict['name'] = lst_group.get('name',"")
dict['description'] = lst_group.get('description',"")
return [dict]
def verify_linktrack_summary(dut,**kwargs):
"""
Author: <NAME> (<EMAIL>)
verify_linktrack_summary(dut=dut1,name="group1",description="MLAG_LINK_TRACK",timeout="10")
To verify linktrack summary
:param dut:
:param name:
:param description:
:param timeout:
:return: True or False
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
if 'name' not in kwargs:
st.error("Mandatory arg name is not present")
return False
if cli_type == 'click':
result = st.show(dut,"show linktrack summary")
elif cli_type == 'klish':
result = st.show(dut, 'show link state tracking', type='klish')
elif cli_type in ['rest-put','rest-patch']:
rest_urls = st.get_datastore(dut, "rest_urls")
url = rest_urls['get_link_track_summary'].format(kwargs['name'])
response = get_rest(dut,rest_url=url)
st.log('KLISH output for debugging REST')
st.show(dut, 'show link state tracking', type='klish')
if response['output']:
result = parse_rest_output_linktrack_summary(response)
else:
st.error("OCYANG-FAIL: verify link track summary - Get Response is empty")
return False
else:
st.error("Invalid CLI type - {}".format(cli_type))
return False
if len(result) == 0:
st.error("Output is Empty")
return False
if "return_output" in kwargs:
return True
ret_val = False
for rlist in result:
count = 0
for key in kwargs:
if rlist[key] == kwargs[key]:
count = count + 1
if len(kwargs) == count:
ret_val = True
for key in kwargs:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
break
else:
for key in kwargs:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
if ret_val is False:
st.log("Fail: Not Matched all args in passed dict {} from parsed dict".format(kwargs))
return ret_val
def parse_rest_output_linktrack_group(dut,response,timeout,description='',lst_bringup_time='0'):
lst_interfaces = response['output']['openconfig-lst-ext:interface']
result = []
for interface in lst_interfaces:
lst_dict = {}
lst_dict['description'] = description
lst_dict['timeout'] = timeout
lst_dict['startup_remain_time'] = lst_bringup_time
if 'upstream-groups' in interface.keys():
lst_dict['name'] = interface.get('upstream-groups',{}).get('upstream-group',[])[0].get('group-name',"")
lst_dict['direction'] = "Upstream"
lst_dict['interface'] = interface.get('id',"")
port1.get_interface_status(dut, lst_dict['interface'],cli_type='click')
interface_state = str(port1.get_interface_status(dut,lst_dict['interface']))
st.log("DEBUG==>Interface:{}, Inf state from Rest:{}".format(lst_dict['interface'],interface_state))
if interface_state.lower() == 'up':
lst_dict['direction_state'] = 'Up'
elif interface_state.lower() == 'down':
lst_dict['direction_state'] = 'Down'
else:
lst_dict['direction_state'] = interface_state
elif 'downstream-group' in interface.keys():
lst_dict['name'] = interface.get('downstream-group',{}).get('state',{}).get('group-name',"")
lst_dict['direction'] = "Downstream"
lst_dict['interface'] = interface.get('id',"")
if interface.get('downstream-group',{}).get('state',{}).get('disabled',""):
lst_dict['direction_state'] = 'Disabled'
else:
port1.get_interface_status(dut, lst_dict['interface'], cli_type='click')
interface_state = str(port1.get_interface_status(dut, lst_dict['interface']))
st.log("DEBUG==>Interface:{}, Inf state from Rest:{}".format(lst_dict['interface'], interface_state))
if interface_state.lower() == 'up':
lst_dict['direction_state'] = 'Up'
elif interface_state.lower() == 'down':
lst_dict['direction_state'] = 'Down'
else:
lst_dict['direction_state'] = interface_state
result.append(lst_dict)
return result
def verify_linktrack_group_name(dut,**kwargs):
"""
Author: <NAME> (<EMAIL>)
verify_linktrack_group_name(dut=dut1,name="group1",description="MLAG_LINK_TRACK",timeout="10",
upstream_plist=["Ethernet3","Ethernet9"],downstream_plist=["PortChannel10"])
To verify linktrack group <group-name>
:param dut:
:param name:
:param description:
:param timeout:
:param upstream_plist: List of upstream interfaces
:param downstream_plist: List of downstream portchannels
:return: True or False
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
if 'name' not in kwargs:
st.error("Mandatory arg name is not present")
return False
if cli_type == 'click':
output = st.show(dut,"show linktrack group {}".format(kwargs['name']))
elif cli_type == 'klish':
output = st.show(dut,"show link state tracking {}".format(kwargs['name']), type='klish')
elif cli_type in ['rest-put','rest-patch']:
rest_urls = st.get_datastore(dut, "rest_urls")
lst_name = kwargs['name']
url = rest_urls['get_link_track_description'].format(lst_name)
lst_description = get_rest(dut, rest_url=url)['output']['openconfig-lst-ext:description']
url = rest_urls['get_link_track_timeout'].format(lst_name)
lst_timeout = str(get_rest(dut, rest_url=url)['output']['openconfig-lst-ext:timeout'])
url = rest_urls['get_link_track_bringup_remain_time'].format(lst_name)
lst_bringup_time = str(get_rest(dut, rest_url=url)['output']['openconfig-lst-ext:bringup-remaining-time'])
url = rest_urls['get_link_track_interfaces']
response = get_rest(dut, rest_url=url)
st.log('KLISH output for debugging REST')
st.show(dut, 'show link state tracking {}'.format(kwargs['name']), type='klish')
if response['output']:
output = parse_rest_output_linktrack_group(dut,response,lst_timeout,lst_description,lst_bringup_time)
else:
st.error("OCYANG-FAIL: verify link track group - Get Response is empty")
return False
else:
st.error("Invalid CLI type - {}".format(cli_type))
return False
if len(output) == 0:
st.error("Output is Empty")
return False
if "return_output" in kwargs:
return True
no_common_key = 0
ret_val1 = False
dict1 = {}
common_key_list = ['name','description','timeout','startup_remain_time']
for key in kwargs:
if key in common_key_list:
no_common_key = no_common_key + 1
if no_common_key > 0:
rlist = output[0]
count = 0
for key in kwargs:
if rlist[key] == kwargs[key] and key in common_key_list:
count = count + 1
if no_common_key == count:
ret_val1 = True
for key in kwargs:
if key in common_key_list:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
for key in kwargs:
if key in common_key_list:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
for key in common_key_list:
if key in kwargs:
dict1[key] = kwargs[key]
del kwargs[key]
if no_common_key > 0 and ret_val1 is False:
st.error("DUT {} -> Match Not Found {}".format(dut,dict1))
return ret_val1
ret_val = "True"
if len(kwargs.keys()) > 0:
#Converting all kwargs to list type to handle single or list of instances
input_dict_list =[]
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
#convert kwargs into list of dictionary
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
ret_val = False
return ret_val
def get_port_counters(dut, port, counter,**kwargs):
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
cli_type = "klish" if cli_type in ["rest-put", "rest-patch"] else cli_type
port1.clear_interface_counters(dut,cli_type=cli_type)
port_range_list = list(port) if isinstance(port, list) else [port]
cntr_range_list = list(counter) if isinstance(counter, list) else [counter]
if cli_type == 'click':
st.wait(3)
else:
st.wait(1)
list1 = []
for prt, cntr in zip (port_range_list, cntr_range_list):
if cli_type == "click":
if '/' in prt:
prt = st.get_other_names(dut,[prt])[0]
st.show(dut, "show interface counters -i {}".format(prt),type=cli_type)
output = st.show(dut, "show interface counters -i {}".format(prt),type=cli_type)
entries = filter_and_select(output, (cntr,), {'iface': prt})
list1.append(entries[0])
if cli_type == "klish":
output = port1.get_interface_counters_all(dut,port=prt,cli_type=cli_type)
entries = filter_and_select(output, (cntr,), {'iface': prt})
if output == [] or entries == []:
st.log("interface {} is not found in the show interface counters O/P".format(prt))
dict1 = {}
dict1.update({"rx_bps":"0.0 KB/s"})
list1.append(dict1)
else:
if float(entries[0][cntr]) >= 1.0:
value = float(entries[0][cntr])
entries[0][cntr] = str(value) + " MB/s"
elif float(entries[0][cntr]) < 1.0 and float(entries[0][cntr]) >= 0.001:
value = float(entries[0][cntr])*1024
entries[0][cntr] = str(value) + " KB/s"
elif float(entries[0][cntr]) < 0.001:
entries[0][cntr] = get_port_rate_inklish(dut,prt=prt,cntr=cntr)
list1.append(entries[0])
return list1
def get_port_rate_inklish(dut,prt,cntr):
for i in range(4):
st.wait(5,"\n\n###### Retry attempt {} for interface {} {} check #### \n".format(i,prt,cntr))
output = port1.get_interface_counters_all(dut,port=prt,cli_type="klish")
entries = filter_and_select(output, (cntr,), {'iface': prt})
st.log("\n\n###### interface {} {} shows {} #####\n".format(prt,cntr,float(entries[0][cntr])))
if output == [] or entries == []:
st.log("interface {} is not found in the show interface counters O/P".format(prt))
return "0.0 B/s"
else:
if float(entries[0][cntr]) >= 1.0:
value = float(entries[0][cntr])
entries[0][cntr] = str(value) + " MB/s"
return str(value) + " MB/s"
elif float(entries[0][cntr]) < 1.0 and float(entries[0][cntr]) >= 0.001:
value = float(entries[0][cntr])*1024
entries[0][cntr] = str(value) + " KB/s"
return str(value) + " KB/s"
elif float(entries[0][cntr]) < 0.001:
continue
return "0.0 B/s"
def neigh_suppress_config(dut, vlan, config='yes', skip_error=False, cli_type=''):
"""
purpose:
This API used to enable or disable neighbor suppression on vlan
Arguments:
:param dut: device to be configured
:type dut: string
:param vlan: VLAN name
:type vlan: string
:type action: enable|disable
:return: None
usage:
neigh_suppress_config(dut1, "Vlan100", config="yes", cli_type='click')
neigh_suppress_config(dut1, "Vlan100", config="yes", cli_type='klish')
neigh_suppress_config(dut1, "Vlan100", config="no", cli_type='click')
Created by: Ganagadhar <<EMAIL>>
"""
cli_type = st.get_ui_type(dut,cli_type=cli_type)
if config == 'yes':
conf_str = ''
action = 'enable'
else:
conf_str = 'no'
action = 'disable'
if cli_type == 'click':
command = "config neigh_suppress {} {}".format(action,vlan)
elif cli_type == 'klish':
command = []
command.append('interface Vlan {}'.format(vlan))
command.append('{} neigh-suppress'.format(conf_str))
command.append('exit')
elif cli_type in ['rest-put','rest-patch']:
rest_urls = st.get_datastore(dut, "rest_urls")
vlan_data = str(vlan) if type(vlan) is not str else vlan
vlan_str = 'Vlan' + vlan_data if 'Vlan' not in vlan_data else vlan_data
payload = {"openconfig-vxlan:config":{
"arp-and-nd-suppress":"ENABLE"}
}
if config == 'yes':
url = rest_urls['vxlan_arp_nd_suppress'].format(vlan_str)
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: ARP and ND suppress config on vlan Failed')
return False
else:
url = rest_urls['vxlan_arp_nd_suppress_delete'].format(vlan_str)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: ARP and ND suppress UnConfig on vlan Failed')
return False
return
else:
st.error("Invalid CLI type - {}".format(cli_type))
return False
st.debug(command)
return st.config(dut, command, skip_error_check=skip_error, type=cli_type)
def verify_neigh_suppress(dut, **kwargs):
"""
Author: <NAME> (<EMAIL>)
verify_neigh_suppress(dut=dut1,identifier="all",cli_type="click",vlan="Vlan450",status="Configured",netdevice="vtepLeaf4-450")
verify_neigh_suppress(dut=dut1,identifier="450",cli_type="click",vlan="Vlan450",status="Configured",netdevice="vtepLeaf4-450")
verify_neigh_suppress(dut=dut1,identifier="all",cli_type="click",vlan="Vlan100",
status="Not Configured",netdevice="vtepLeaf4-100")
verify_neigh_suppress(dut=dut1,identifier="450",cli_type="klish",vlan="Vlan450",status="on")
To verify neighbour suppress for <vlan|all>
:param dut:
:param total_count:
:param identifier: all | specific vlan id
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
if 'identifier' not in kwargs:
st.error("Mandetory arg identifier is not present")
return False
cmd = 'neigh-suppress' if cli_type == 'click' else 'neighbor-suppress-status'
command = 'show {}'.format(cmd)
if kwargs['identifier'] == "all" and cli_type == 'click':
command += " all"
elif kwargs['identifier'] != "all" and cli_type == 'click':
command += " vlan {}".format(kwargs['identifier'])
elif kwargs['identifier'] != "all" and cli_type == 'klish':
command += " {}".format(kwargs['identifier'])
elif kwargs['identifier'] == "all" and cli_type == 'klish':
command += ""
elif cli_type in ['rest-put','rest-patch']:
st.log('KLISH output for debugging REST')
output = st.show(dut, 'show neighbor-suppress-status', type='klish')
### URI to be used only if neighbor-suppression is enabled for the VLAN -SONIC-31990
## So When expected status is off, verification wil continue based on above klish output
if kwargs['status'].lower() == 'on':
rest_urls = st.get_datastore(dut, "rest_urls")
vlan = kwargs['vlan']
vlan_data = str(vlan) if type(vlan) is not str else vlan
vlan_str = 'Vlan' + vlan_data if 'Vlan' not in vlan_data else vlan_data
url = rest_urls['vxlan_arp_nd_suppress_state'].format(vlan_str)
response = get_rest(dut,rest_url=url)
output = {}
if response['output']:
output['vlan'] = vlan_str
if response.get('output',{}).get('openconfig-vxlan:arp-and-nd-suppress',"") == "ENABLE":
output['status'] ='on'
elif response.get('output',{}).get('openconfig-vxlan:arp-and-nd-suppress',"") == "DISABLE":
output['status'] ='off'
output = [output]
else:
st.error("Invalid CLI type - {}".format(cli_type))
return False
if cli_type not in ['rest-put', 'rest-patch']:
output = st.show(dut, command, type=cli_type)
if len(output) == 0:
st.error("Output is Empty")
return False
del kwargs['identifier']
no_common_key = 0
ret_val1 = False
dict1 = {}
common_key_list = ['total_count']
for key in kwargs:
if key in common_key_list:
no_common_key = no_common_key + 1
if no_common_key > 0:
rlist = output[0]
count = 0
for key in kwargs:
if rlist[key] == kwargs[key] and key in common_key_list:
count = count + 1
if no_common_key == count:
ret_val1 = True
for key in kwargs:
if key in common_key_list:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
for key in kwargs:
if key in common_key_list:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
for key in common_key_list:
if key in kwargs:
dict1[key] = kwargs[key]
del kwargs[key]
if no_common_key > 0 and ret_val1 is False:
st.error("DUT {} -> Match Not Found {}".format(dut,dict1))
return ret_val1
ret_val = "True"
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
input_dict_list =[]
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
ret_val = False
return ret_val
def show_mclag_uniqueip(dut, **kwargs):
"""
API to display the mclag unique ip
:param dut:
:param cli_type:
:param mclag_id:
:return:
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
cli_type = "klish" if cli_type in ["rest-put", "rest-patch"] else cli_type
if cli_type == "klish":
command = "show mclag separate-ip-interfaces"
elif cli_type == "click":
command = "mclagdctl -i {} dump unique_ip".format(kwargs['mclag_id'])
st.show(dut, command,skip_tmpl=True,type=cli_type)
def show_ip_neigh(dut, **kwargs):
"""
API to display ip neighbor
:param dut:
:param cli_type:
:return:
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
cli_type = "klish" if cli_type in ["rest-put", "rest-patch"] else cli_type
if cli_type == "klish":
command1 = "show ip arp"
command2 = "show ipv6 neighbors"
st.show(dut, command1,skip_tmpl=True,type=cli_type)
st.show(dut, command2,skip_tmpl=True,type=cli_type)
elif cli_type == "click":
command = "ip neigh show"
st.show(dut, command,skip_tmpl=True,type=cli_type)
def get_tunnel_list(dut,**kwargs):
"""
API to return the list if tunnels present in the dut
:param dut:
:param cli_type:
:return:
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
cli_type = "klish" if cli_type in ["rest-put", "rest-patch"] else cli_type
res=st.show(dut, 'show vxlan tunnel',type=cli_type)
tun_lst=[]
for l1 in res:
tun_lst.append(l1['rem_vtep'])
return tun_lst
def config_vxlan_qos_mode(dut, vtep_name,**kwargs):
"""
purpose:
This definition is used to configure VxLAN QOS mode
Arguments:
:param dut: device to be configured
:type dut: string
:param vtep_name: VTEP name to be created
:type vtep_name: string
:param : cli_type
:type cli_type: string
:param kwargs["qos_mode"]: qos mode to be configured either uniform/pipe
:type kwargs["qos_mode"]: dict
:param kwargs["pipe_dscp"]: dscp value to be set for PIPE mode
:type kwargs["pipe_dscp"]: dict
:return: None
usage:
config_vxlan_qos_mode(dut1, "dut1VTEP", qos_mode="uniform")
config_vxlan_qos_mode(dut1, "dut1VTEP", qos_mode="pipe",pipe_dscp="10")
Created by: Julius <<EMAIL>
"""
cli_type = st.get_ui_type(dut,**kwargs)
qosMode = kwargs.get("qos_mode", "pipe dscp 0")
if cli_type == "klish":
command = []
command.append('interface vxlan {}'.format(vtep_name))
if qosMode == "pipe" and "pipe_dscp" in kwargs:
command.append("qos-mode pipe dscp {}".format(kwargs["pipe_dscp"]))
elif qosMode == "uniform":
command.append("qos-mode uniform")
else:
command.append("qos-mode {}".format(qosMode))
command.append('exit')
return st.config(dut, command, type=cli_type)
elif cli_type == "rest-put":
rest_urls = st.get_datastore(dut, "rest_urls")
url = rest_urls["config_vxlan_qos"]
if qosMode == "uniform":
payload = {"openconfig-interfaces:interface" : [{"name": vtep_name,"config":{"name": vtep_name,
"type": "IF_NVE"}, "openconfig-vxlan:vxlan-if": {"config": {"qos-mode": "UNIFORM"}}}]}
elif qosMode == "pipe" and "pipe_dscp" in kwargs:
payload = {"openconfig-interfaces:interface" : [{"name": vtep_name,"config":{"name": vtep_name,
"type": "IF_NVE"}, "openconfig-vxlan:vxlan-if": {"config": {"qos-mode": "PIPE",
"dscp" : int(kwargs["pipe_dscp"])}}}]}
else:
payload = {"openconfig-interfaces:interface" : [{"name": vtep_name,"config":{"name": vtep_name,
"type": "IF_NVE"}, "openconfig-vxlan:vxlan-if": {"config": {"qos-mode": "PIPE",
"dscp" : 0}}}]}
return config_rest(dut, http_method='post', rest_url=url, json_data=payload,timeout=10)
elif cli_type == "rest-patch":
rest_urls = st.get_datastore(dut, "rest_urls")
url = rest_urls["modify_vxlan_qos"].format(vtep_name)
if qosMode == "uniform":
payload = {"openconfig-vxlan:config":{"qos-mode": qosMode.upper()}}
elif qosMode == "pipe" and "pipe_dscp" in kwargs:
payload = {"openconfig-vxlan:config": {"qos-mode": qosMode.upper(),"dscp": int(kwargs["pipe_dscp"])}}
else:
payload = {"openconfig-vxlan:config": {"qos-mode": qosMode.upper(),"dscp": 0}}
return config_rest(dut, http_method='patch', rest_url=url, json_data=payload,timeout=10)
else:
st.error("Nothing configured for this UI-TYPE {}").format(cli_type)
return False
def verify_vxlan_qos_mode(dut, vtep_name, qos_mode,**kwargs):
'''
purpose:
This definition is used to verify VxLAN interface QOS mode
Arguments:
:param dut: Device name where the command to be executed
:type dut: string
:param vtep_name: vtep name to be verified
:type vtep_name: string
:param qos_mode: qos mode name to be verified
:type qos_mode: string
:param kwargs["pipe_dscp"]:PIPE DSCP value to be verified
:type kwargs["pipe_dscp"]: dict
:return: True/False True - success case; False - Failure case
usage: verify_vxlan_qos_mode(dut1,qos_mode="uniform")
verify_vxlan_qos_mode(dut1,qos_mode="pipe",pipe_dscp=10)
Created by: Julius <<EMAIL>
'''
success = True
cli_type = st.get_ui_type(dut, **kwargs)
if cli_type == "klish":
if qos_mode == "pipe" and "pipe_dscp" in kwargs:
verify_dict = {"qos_mode" : "pipe", "pipe_dscp" : kwargs["pipe_dscp"]}
elif qos_mode == "uniform":
verify_dict = {"qos_mode" : "uniform"}
else:
verify_dict = {"qos_mode" : "pipe", "pipe_dscp" : "0"}
cli_out = st.show(dut, 'show vxlan interface', type=cli_type)
fil_out = filter_and_select(cli_out, verify_dict.keys(), {"vtep_name": vtep_name})
if len(fil_out) == 0:
st.error("QOS details {} not found in show output".format(verify_dict.keys()))
return False
else:
dut_out = fil_out[0]
for key in verify_dict.keys():
if dut_out[key] == verify_dict[key]:
st.log("Match found for key {}; expected val: {} and "
"obtained val: {}".format(key, verify_dict[key], dut_out[key]))
else:
st.error("Match NOT found for key {}; expected val: {} but "
"obtained val: {}".format(key, verify_dict[key], dut_out[key]))
success = False
elif cli_type in ["rest-put", "rest-patch"]:
rest_urls = st.get_datastore(dut, "rest_urls")
if "pipe_dscp" not in kwargs:
url = rest_urls["get_vxlan_qos_mode"].format(vtep_name)
rest_out = get_rest(dut,rest_url=url,timeout=30)
if rest_out["status"] == 200:
if qos_mode == rest_out["output"]["openconfig-vxlan:qos-mode"].lower():
st.log("Match found for QOS mode; expected val: {} and "
"obtained val: {}".format(qos_mode,
rest_out["output"]["openconfig-vxlan:qos-mode"].lower()))
else:
st.error("Match NOT found for QOS mode; expected val: {} "
"but got: {}".format(qos_mode,
rest_out["output"]["openconfig-vxlan:qos-mode"].lower()))
success = False
else:
st.error("VxLAN QOS mode value NOT found in rest output")
return False
else:
url = rest_urls["get_vxlan_qos_pipe_val"].format(vtep_name)
rest_out = get_rest(dut,rest_url=url,timeout=30)
if rest_out["status"] == 200:
if int(kwargs["pipe_dscp"]) == rest_out["output"]["openconfig-vxlan:dscp"]:
st.log("Match found for PIPE DSCP; expected val: {} and "
"obtained val: {}".format(int(kwargs["pipe_dscp"]),
rest_out["output"]["openconfig-vxlan:dscp"]))
else:
st.error("Match NOT found for PIPE DSCP; expected val: {} "
"but got: {}".format(int(kwargs["pipe_dscp"]),
rest_out["output"]["openconfig-vxlan:dscp"]))
success = False
else:
st.error("PIPE DSCP value not found in rest output")
return False
return success
|
mujoco/vtrace_main.py
|
mrbermell/seed_rl
| 733 |
137219
|
# coding=utf-8
# Copyright 2019 The SEED Authors
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""VTrace (IMPALA) example for Mujoco.
Warning!!! This code uses DeepMind wrappers which differ from OpenAI gym
wrappers and the results may not be comparable.
"""
from absl import app
from absl import flags
from seed_rl.agents.vtrace import learner
from seed_rl.agents.vtrace import networks
from seed_rl.common import actor
from seed_rl.common import common_flags
from seed_rl.common import normalizer
from seed_rl.mujoco import env
import tensorflow as tf
# Optimizer settings.
flags.DEFINE_float('learning_rate', 3e-4, 'Learning rate.')
# Network settings.
flags.DEFINE_integer('n_mlp_layers', 2, 'Number of MLP hidden layers.')
flags.DEFINE_integer('mlp_size', 64, 'Sizes of each of MLP hidden layer.')
flags.DEFINE_integer(
'n_lstm_layers', 0,
'Number of LSTM layers. LSTM layers afre applied after MLP layers.')
flags.DEFINE_integer('lstm_size', 64, 'Sizes of each LSTM layer.')
flags.DEFINE_bool('normalize_observations', False, 'Whether to normalize'
'observations by subtracting mean and dividing by stddev.')
# Environment settings.
flags.DEFINE_string('env_name', 'HalfCheetah-v2',
'Name of the environment from OpenAI Gym.')
flags.DEFINE_enum(
'discretization', 'none', ['none', 'lin', 'log'], 'Values other than '
'"none" cause action coordinates to be discretized into n_actions_per_dim '
'buckets. Buckets are spaced linearly between the bounds if "lin" mode is '
'used and logarithmically for "log" mode.')
flags.DEFINE_integer(
'n_actions_per_dim', 11, 'The number of buckets per action coordinate if '
'discretization is used.')
flags.DEFINE_float(
'action_ratio', 30.,
'The ratio between the highest and the lowest positive '
'action for logarithmic action discretization.')
FLAGS = flags.FLAGS
def create_agent(unused_action_space, unused_env_observation_space,
parametric_action_distribution):
policy = networks.MLPandLSTM(
parametric_action_distribution,
mlp_sizes=[FLAGS.mlp_size] * FLAGS.n_mlp_layers,
lstm_sizes=[FLAGS.lstm_size] * FLAGS.n_lstm_layers)
if FLAGS.normalize_observations:
policy = normalizer.NormalizeObservationsWrapper(policy,
normalizer.Normalizer())
return policy
def create_optimizer(unused_final_iteration):
learning_rate_fn = lambda iteration: FLAGS.learning_rate
optimizer = tf.keras.optimizers.Adam(FLAGS.learning_rate)
return optimizer, learning_rate_fn
def main(argv):
create_environment = lambda task, config: env.create_environment(
env_name=config.env_name,
discretization=config.discretization,
n_actions_per_dim=config.n_actions_per_dim,
action_ratio=config.action_ratio)
if len(argv) > 1:
raise app.UsageError('Too many command-line arguments.')
if FLAGS.run_mode == 'actor':
actor.actor_loop(create_environment)
elif FLAGS.run_mode == 'learner':
learner.learner_loop(create_environment,
create_agent,
create_optimizer)
else:
raise ValueError('Unsupported run mode {}'.format(FLAGS.run_mode))
if __name__ == '__main__':
app.run(main)
|
bcbio/cwl/main.py
|
a113n/bcbio-nextgen
| 418 |
137228
|
"""Main entry point for running preparation of CWL inputs.
"""
from bcbio.pipeline import run_info
from bcbio.cwl import create
def run(args):
"""Run a CWL preparation pipeline.
"""
dirs, config, run_info_yaml = run_info.prep_system(args.sample_config, args.systemconfig)
integrations = args.integrations if hasattr(args, "integrations") else {}
world = run_info.organize(dirs, config, run_info_yaml, is_cwl=True, integrations=integrations)
create.from_world(world, run_info_yaml, integrations=integrations, add_container_tag=args.add_container_tag)
|
tools/bin/ext/figleaf/internals.py
|
YangHao666666/hawq
| 450 |
137232
|
<gh_stars>100-1000
"""
Coverage tracking internals.
"""
import sys
import threading
err = sys.stderr
import types, symbol
# use builtin sets if in >= 2.4, otherwise use 'sets' module.
try:
set()
except NameError:
from sets import Set as set
def get_interesting_lines(code):
"""
Count 'interesting' lines of Python in a code object, where
'interesting' is defined as 'lines that could possibly be
executed'.
This is done by dissassembling the code objecte and returning
line numbers.
"""
# clean up weird end-of-file issues
lines = set([ l for (o, l) in findlinestarts(code) ])
for const in code.co_consts:
if type(const) == types.CodeType:
lines.update(get_interesting_lines(const))
return lines
def findlinestarts(code):
"""Find the offsets in a byte code which are start of lines in the source.
Generate pairs (offset, lineno) as described in Python/compile.c.
CTB -- swiped from Python 2.5, module 'dis', so that earlier versions
of Python could use the function, too.
"""
byte_increments = [ord(c) for c in code.co_lnotab[0::2]]
line_increments = [ord(c) for c in code.co_lnotab[1::2]]
lastlineno = None
lineno = code.co_firstlineno
addr = 0
for byte_incr, line_incr in zip(byte_increments, line_increments):
if byte_incr:
if lineno != lastlineno:
yield (addr, lineno)
lastlineno = lineno
addr += byte_incr
lineno += line_incr
if lineno != lastlineno:
yield (addr, lineno)
class CodeTracer:
"""
Basic mechanisms for code coverage tracking, using sys.settrace.
"""
def __init__(self, exclude_prefix, include_only_prefix):
self.common = self.c = set()
self.section_name = None
self.sections = {}
self.started = False
assert not (exclude_prefix and include_only_prefix), \
"mutually exclusive"
self.excl = exclude_prefix
self.incl = include_only_prefix
def start(self):
"""
Start recording.
"""
if not self.started:
self.started = True
if self.excl and not self.incl:
global_trace_fn = self.g1
elif self.incl and not self.excl:
global_trace_fn = self.g2
else:
global_trace_fn = self.g0
sys.settrace(global_trace_fn)
if hasattr(threading, 'settrace'):
threading.settrace(global_trace_fn)
def stop(self):
if self.started:
sys.settrace(None)
if hasattr(threading, 'settrace'):
threading.settrace(None)
self.started = False
self.stop_section()
def g0(self, f, e, a):
"""
global trace function, no exclude/include info.
f == frame, e == event, a == arg .
"""
if e == 'call':
return self.t
def g1(self, f, e, a):
"""
global trace function like g0, but ignores files starting with
'self.excl'.
"""
if e == 'call':
excl = self.excl
path = f.f_globals.get('__file__')
if path is None:
path = f.f_code.co_filename
if excl and path.startswith(excl):
return
return self.t
def g2(self, f, e, a):
"""
global trace function like g0, but only records files starting with
'self.incl'.
"""
if e == 'call':
incl = self.incl
if incl and f.f_code.co_filename.startswith(incl):
return self.t
def t(self, f, e, a):
"""
local trace function.
"""
if e is 'line':
self.c.add((f.f_code.co_filename, f.f_lineno))
return self.t
def clear(self):
"""
wipe out coverage info
"""
self.c = {}
def start_section(self, name):
self.stop_section()
self.section_name = name
self.c = self.sections.get(name, set())
def stop_section(self):
if self.section_name:
self.sections[self.section_name] = self.c
self.section_name = None
self.c = self.common
class CoverageData:
"""
A class to manipulate and combine data from the CodeTracer object.
In general, do not pickle this object; it's simpler and more
straightforward to just pass the basic Python objects around
(e.g. CoverageData.common, a set, and CoverageData.sections, a
dictionary of sets).
"""
def __init__(self, trace_obj=None):
self.common = set()
self.sections = {}
if trace_obj:
self.update(trace_obj)
def update(self, trace_obj):
# transfer common-block code coverage -- if no sections are set,
# this will be all of the code coverage info.
self.common.update(trace_obj.common)
# update our internal section dictionary with the (filename, line_no)
# pairs from the section coverage as well.
for section_name, section_d in trace_obj.sections.items():
section_set = self.sections.get(section_name, set())
section_set.update(section_d)
self.sections[section_name] = section_set
def gather_files(self, name=None):
"""
Return the dictionary of lines of executed code; the dict
keys are filenames and values are sets containing individual
(integer) line numbers.
'name', if set, is the desired section name from which to gather
coverage info.
"""
cov = set()
cov.update(self.common)
if name is None:
for section_name, coverage_set in self.sections.items():
cov.update(coverage_set)
else:
coverage_set = self.sections.get(name, set())
cov.update(coverage_set)
# cov = list(cov)
# cov.sort()
files = {}
for (filename, line) in cov: # @CTB could optimize
d = files.get(filename, set())
d.add(line)
files[filename] = d
return files
def gather_sections(self, file):
"""
Return a dictionary of sets containing section coverage information for
a specific file. Dict keys are sections, and the dict values are
sets containing (integer) line numbers.
"""
sections = {}
for k, c in self.sections.items():
s = set()
for (filename, line) in c.keys():
if filename == file:
s.add(line)
sections[k] = s
return sections
|
tests/sample_getcontextlogs.py
|
topdown618/aliyun-log-python-sdk
| 130 |
137249
|
<gh_stars>100-1000
# encoding: utf-8
from __future__ import print_function
from aliyun.log import *
import time
import os
def extract_pack_info(log):
contents = log.get_contents()
return contents.get('__tag__:__pack_id__', None), contents.get('__pack_meta__', None)
# @log_enter_exit
def sample_get_context_logs(client, project, logstore):
print('please make sure your logstore has already created index')
time.sleep(3)
# Use get_logs and with_pack_meta to get pack information of the start log.
# Query time range: recent 15 minutes.
# The start log is the first log returned.
query = '* | with_pack_meta'
request = GetLogsRequest(project, logstore, int(time.time()) - 900, int(time.time()), '', query)
response = client.get_logs(request)
logs = response.get_logs()
if not logs:
print('no log is queried')
return
pack_id, pack_meta = extract_pack_info(logs[int(len(logs) / 2)])
if pack_id is None or pack_meta is None:
print('incomplete pack information, please make sure your logs are collected through logtail')
print('pack_id:', pack_id)
print('pack_meta:', pack_meta)
return
print('start log, pack_id:', pack_id, 'pack_meta:', pack_meta)
# Get context logs of the start log (both directions)
response = client.get_context_logs(project, logstore, pack_id, pack_meta, 30, 30)
print('total lines:', response.get_total_lines())
print('back lines:', response.get_back_lines())
print('forward lines:', response.get_forward_lines())
time.sleep(1)
logs = response.get_logs()
backward_start_log = logs[0]
forward_start_log = logs[-1]
# Get context logs backward.
log = backward_start_log
for _ in range(0, 3):
pack_id, pack_meta = extract_pack_info(log)
response = client.get_context_logs(project, logstore, pack_id, pack_meta, 10, 0)
print('backward log, pack_id:', pack_id, 'pack_meta:', pack_meta)
print('total lines:', response.get_total_lines())
print('back lines:', response.get_back_lines())
logs = response.get_logs()
if not logs:
break
log = logs[0]
time.sleep(1)
# Get context logs forward.
log = forward_start_log
for _ in range(0, 3):
pack_id, pack_meta = extract_pack_info(log)
response = client.get_context_logs(project, logstore, pack_id, pack_meta, 0, 10)
print('forward log, pack_id:', pack_id, 'pack_meta:', pack_meta)
print('total lines:', response.get_total_lines())
print('back lines:', response.get_back_lines())
logs = response.get_logs()
if not logs:
break
log = logs[-1]
time.sleep(1)
def main():
endpoint = os.environ.get('ALIYUN_LOG_SAMPLE_ENDPOINT', 'cn-hangzhou.log.aliyuncs.com')
access_key_id = os.environ.get('ALIYUN_LOG_SAMPLE_ACCESSID', '')
access_key = os.environ.get('ALIYUN_LOG_SAMPLE_ACCESSKEY', '')
logstore = os.environ.get('ALIYUN_LOG_SAMPLE_LOGSTORE', '')
project = os.environ.get('ALIYUN_LOG_SAMPLE_PROJECT', '')
token = ""
assert endpoint and access_key_id and access_key and project, ValueError("endpoint/access_id/key and "
"project cannot be empty")
client = LogClient(endpoint, access_key_id, access_key, token)
sample_get_context_logs(client, project, logstore)
if __name__ == '__main__':
main()
|
anaconda_project/internal/cli/activate.py
|
kathatherine/anaconda-project
| 188 |
137258
|
<reponame>kathatherine/anaconda-project
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2016, Anaconda, Inc. All rights reserved.
#
# Licensed under the terms of the BSD 3-Clause License.
# The full license is in the file LICENSE.txt, distributed with this software.
# -----------------------------------------------------------------------------
"""The ``activate`` command which prepares a project to run and prints commands to source in your shell."""
from __future__ import absolute_import, print_function
import os
try:
from shlex import quote # pragma: no cover (py3 only)
except ImportError: # pragma: no cover (py2 only)
from pipes import quote
from anaconda_project.internal.cli.prepare_with_mode import prepare_with_ui_mode_printing_errors
from anaconda_project.internal.cli.project_load import load_project
def activate(dirname, ui_mode, conda_environment, command_name):
"""Prepare project and return lines to be sourced.
Future direction: should also activate the proper conda env.
Returns:
None on failure or a list of lines to print.
"""
project = load_project(dirname)
result = prepare_with_ui_mode_printing_errors(project,
ui_mode=ui_mode,
env_spec_name=conda_environment,
command_name=command_name)
if result.failed:
return None
exports = []
# sort so we have deterministic output order for tests
sorted_keys = list(result.environ.keys())
sorted_keys.sort()
for key in sorted_keys:
value = result.environ[key]
if key not in os.environ or os.environ[key] != value:
exports.append("export {key}={value}".format(key=key, value=quote(value)))
return exports
def main(args):
"""Start the activate command and return exit status code."""
result = activate(args.directory, args.mode, args.env_spec, args.command)
if result is None:
return 1
else:
for line in result:
print(line)
return 0
|
bert-quantization/bert-tf-quantization/run_pretraining.py
|
dujiangsu/FasterTransformer
| 777 |
137267
|
<filename>bert-quantization/bert-tf-quantization/run_pretraining.py<gh_stars>100-1000
# coding=utf-8
# Copyright (c) 2019 NVIDIA CORPORATION. All rights reserved.
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Run masked LM/next sentence masked_lm pre-training for BERT."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import time
import modeling
import optimization
import tensorflow as tf
import glob
from utils.utils import LogEvalRunHook
from tensorflow.core.protobuf import rewriter_config_pb2
flags = tf.flags
FLAGS = flags.FLAGS
## Required parameters
flags.DEFINE_string(
"bert_config_file", None,
"The config json file corresponding to the pre-trained BERT model. "
"This specifies the model architecture.")
flags.DEFINE_string(
"input_files_dir", None,
"Directory with input files, comma separated or single directory.")
flags.DEFINE_string(
"eval_files_dir", None,
"Directory with eval files, comma separated or single directory. ")
flags.DEFINE_string(
"output_dir", None,
"The output directory where the model checkpoints will be written.")
## Other parameters
flags.DEFINE_string(
"init_checkpoint", None,
"Initial checkpoint (usually from a pre-trained BERT model).")
flags.DEFINE_string(
"optimizer_type", "lamb",
"Optimizer used for training - LAMB or ADAM")
flags.DEFINE_integer(
"max_seq_length", 512,
"The maximum total input sequence length after WordPiece tokenization. "
"Sequences longer than this will be truncated, and sequences shorter "
"than this will be padded. Must match data generation.")
flags.DEFINE_integer(
"max_predictions_per_seq", 80,
"Maximum number of masked LM predictions per sequence. "
"Must match data generation.")
flags.DEFINE_bool("do_train", False, "Whether to run training.")
flags.DEFINE_bool("do_eval", False, "Whether to run eval on the dev set.")
flags.DEFINE_integer("train_batch_size", 32, "Total batch size for training.")
flags.DEFINE_integer("eval_batch_size", 8, "Total batch size for eval.")
flags.DEFINE_float("learning_rate", 5e-5, "The initial learning rate for Adam.")
flags.DEFINE_integer("num_train_steps", 100000, "Number of training steps.")
flags.DEFINE_integer("num_warmup_steps", 10000, "Number of warmup steps.")
flags.DEFINE_integer("save_checkpoints_steps", 1000,
"How often to save the model checkpoint.")
flags.DEFINE_integer("display_loss_steps", 10,
"How often to print loss")
flags.DEFINE_integer("iterations_per_loop", 1000,
"How many steps to make in each estimator call.")
flags.DEFINE_integer("max_eval_steps", 100, "Maximum number of eval steps.")
flags.DEFINE_integer("num_accumulation_steps", 1,
"Number of accumulation steps before gradient update."
"Global batch size = num_accumulation_steps * train_batch_size")
flags.DEFINE_bool("allreduce_post_accumulation", False, "Whether to all reduce after accumulation of N steps or after each step")
flags.DEFINE_bool(
"verbose_logging", False,
"If true, all of the trainable parameters are printed")
flags.DEFINE_bool("horovod", False, "Whether to use Horovod for multi-gpu runs")
flags.DEFINE_bool("report_loss", True, "Whether to report total loss during training.")
flags.DEFINE_bool("manual_fp16", False, "Whether to use fp32 or fp16 arithmetic on GPU. "
"Manual casting is done instead of using AMP")
flags.DEFINE_bool("use_xla", False, "Whether to enable XLA JIT compilation.")
flags.DEFINE_bool("use_fp16", False, "Whether to enable AMP ops.")
# report samples/sec, total loss and learning rate during training
class _LogSessionRunHook(tf.estimator.SessionRunHook):
def __init__(self, global_batch_size, num_accumulation_steps, display_every=10, hvd_rank=-1):
self.global_batch_size = global_batch_size
self.display_every = display_every
self.hvd_rank = hvd_rank
self.num_accumulation_steps = num_accumulation_steps
def after_create_session(self, session, coord):
self.elapsed_secs = 0.
self.count = 0
self.all_count = 0
self.avg_loss = 0.0
def before_run(self, run_context):
self.t0 = time.time()
if self.num_accumulation_steps <= 1:
if FLAGS.manual_fp16 or FLAGS.use_fp16:
return tf.estimator.SessionRunArgs(
fetches=['step_update:0', 'total_loss:0',
'learning_rate:0', 'nsp_loss:0',
'mlm_loss:0', 'loss_scale:0'])
else:
return tf.estimator.SessionRunArgs(
fetches=['step_update:0', 'total_loss:0',
'learning_rate:0', 'nsp_loss:0',
'mlm_loss:0'])
else:
if FLAGS.manual_fp16 or FLAGS.use_fp16:
return tf.estimator.SessionRunArgs(
fetches=['step_update:0', 'update_step:0', 'total_loss:0',
'learning_rate:0', 'nsp_loss:0',
'mlm_loss:0', 'loss_scale:0'])
else:
return tf.estimator.SessionRunArgs(
fetches=['step_update:0', 'update_step:0', 'total_loss:0',
'learning_rate:0', 'nsp_loss:0',
'mlm_loss:0'])
def after_run(self, run_context, run_values):
self.elapsed_secs += time.time() - self.t0
if self.num_accumulation_steps <=1:
if FLAGS.manual_fp16 or FLAGS.use_fp16:
global_step, total_loss, lr, nsp_loss, mlm_loss, loss_scaler = run_values.results
else:
global_step, total_loss, lr, nsp_loss, mlm_loss = run_values. \
results
update_step = True
else:
if FLAGS.manual_fp16 or FLAGS.use_fp16:
global_step, update_step, total_loss, lr, nsp_loss, mlm_loss, loss_scaler = run_values.results
else:
global_step, update_step, total_loss, lr, nsp_loss, mlm_loss = run_values.\
results
print_step = global_step + 1 # One-based index for printing.
self.avg_loss += total_loss
self.all_count += 1
if update_step:
self.count += 1
if (print_step == 1 or print_step % self.display_every == 0):
dt = self.elapsed_secs / self.count
sent_per_sec = self.global_batch_size / dt
avg_loss_step = self.avg_loss / self.all_count
if self.hvd_rank >= 0:
if FLAGS.manual_fp16 or FLAGS.use_fp16:
print('Rank = %2d :: Step = %6i Throughput = %11.1f MLM Loss = %10.4e NSP Loss = %10.4e Loss = %6.3f Average Loss = %6.3f LR = %6.4e Loss scale = %6.4e' %
(self.hvd_rank, print_step, sent_per_sec, mlm_loss, nsp_loss, total_loss, avg_loss_step, lr, loss_scaler))
else:
print('Rank = %2d :: Step = %6i Throughput = %11.1f MLM Loss = %10.4e NSP Loss = %10.4e Loss = %6.3f Average Loss = %6.3f LR = %6.4e' %
(self.hvd_rank, print_step, sent_per_sec, mlm_loss, nsp_loss, total_loss, avg_loss_step, lr))
else:
if FLAGS.manual_fp16 or FLAGS.use_fp16:
print('Step = %6i Throughput = %11.1f MLM Loss = %10.4e NSP Loss = %10.4e Loss = %6.3f Average Loss = %6.3f LR = %6.4e Loss scale = %6.4e' %
(print_step, sent_per_sec, mlm_loss, nsp_loss, total_loss, avg_loss_step, lr, loss_scaler))
else:
print('Step = %6i Throughput = %11.1f MLM Loss = %10.4e NSP Loss = %10.4e Loss = %6.3f Average Loss = %6.3f LR = %6.4e' %
(print_step, sent_per_sec, mlm_loss, nsp_loss, total_loss, avg_loss_step, lr))
self.elapsed_secs = 0.
self.count = 0
self.avg_loss = 0.0
self.all_count = 0
def model_fn_builder(bert_config, init_checkpoint, learning_rate,
num_train_steps, num_warmup_steps,
use_one_hot_embeddings, hvd=None):
"""Returns `model_fn` closure for TPUEstimator."""
def model_fn(features, labels, mode, params): # pylint: disable=unused-argument
"""The `model_fn` for TPUEstimator."""
tf.compat.v1.logging.info("*** Features ***")
for name in sorted(features.keys()):
tf.compat.v1.logging.info(" name = %s, shape = %s" % (name, features[name].shape))
input_ids = features["input_ids"]
input_mask = features["input_mask"]
segment_ids = features["segment_ids"]
masked_lm_positions = features["masked_lm_positions"]
masked_lm_ids = features["masked_lm_ids"]
masked_lm_weights = features["masked_lm_weights"]
next_sentence_labels = features["next_sentence_labels"]
is_training = (mode == tf.estimator.ModeKeys.TRAIN)
model = modeling.BertModel(
config=bert_config,
is_training=is_training,
input_ids=input_ids,
input_mask=input_mask,
token_type_ids=segment_ids,
use_one_hot_embeddings=use_one_hot_embeddings,
compute_type=tf.float16 if FLAGS.manual_fp16 else tf.float32)
(masked_lm_loss,
masked_lm_example_loss, masked_lm_log_probs) = get_masked_lm_output(
bert_config, model.get_sequence_output(), model.get_embedding_table(),
masked_lm_positions, masked_lm_ids,
masked_lm_weights)
(next_sentence_loss, next_sentence_example_loss,
next_sentence_log_probs) = get_next_sentence_output(
bert_config, model.get_pooled_output(), next_sentence_labels)
masked_lm_loss = tf.identity(masked_lm_loss, name="mlm_loss")
next_sentence_loss = tf.identity(next_sentence_loss, name="nsp_loss")
total_loss = masked_lm_loss + next_sentence_loss
total_loss = tf.identity(total_loss, name='total_loss')
tvars = tf.trainable_variables()
initialized_variable_names = {}
if init_checkpoint and (hvd is None or hvd.rank() == 0):
print("Loading checkpoint", init_checkpoint)
(assignment_map, initialized_variable_names
) = modeling.get_assignment_map_from_checkpoint(tvars, init_checkpoint)
tf.train.init_from_checkpoint(init_checkpoint, assignment_map)
if FLAGS.verbose_logging:
tf.compat.v1.logging.info("**** Trainable Variables ****")
for var in tvars:
init_string = ""
if var.name in initialized_variable_names:
init_string = ", *INIT_FROM_CKPT*"
tf.compat.v1.logging.info(" %d :: name = %s, shape = %s%s", 0 if hvd is None else hvd.rank(), var.name, var.shape,
init_string)
output_spec = None
if mode == tf.estimator.ModeKeys.TRAIN:
train_op = optimization.create_optimizer(
total_loss, learning_rate, num_train_steps, num_warmup_steps,
hvd, FLAGS.manual_fp16, FLAGS.use_fp16, FLAGS.num_accumulation_steps, FLAGS.optimizer_type, FLAGS.allreduce_post_accumulation)
output_spec = tf.estimator.EstimatorSpec(
mode=mode,
loss=total_loss,
train_op=train_op)
elif mode == tf.estimator.ModeKeys.EVAL:
def metric_fn(masked_lm_example_loss, masked_lm_log_probs, masked_lm_ids,
masked_lm_weights, next_sentence_example_loss,
next_sentence_log_probs, next_sentence_labels):
"""Computes the loss and accuracy of the model."""
masked_lm_log_probs = tf.reshape(masked_lm_log_probs,
[-1, masked_lm_log_probs.shape[-1]])
masked_lm_predictions = tf.argmax(
masked_lm_log_probs, axis=-1, output_type=tf.int32)
masked_lm_example_loss = tf.reshape(masked_lm_example_loss, [-1])
masked_lm_ids = tf.reshape(masked_lm_ids, [-1])
masked_lm_weights = tf.reshape(masked_lm_weights, [-1])
masked_lm_accuracy = tf.metrics.accuracy(
labels=masked_lm_ids,
predictions=masked_lm_predictions,
weights=masked_lm_weights)
masked_lm_mean_loss = tf.metrics.mean(
values=masked_lm_example_loss, weights=masked_lm_weights)
next_sentence_log_probs = tf.reshape(
next_sentence_log_probs, [-1, next_sentence_log_probs.shape[-1]])
next_sentence_predictions = tf.argmax(
next_sentence_log_probs, axis=-1, output_type=tf.int32)
next_sentence_labels = tf.reshape(next_sentence_labels, [-1])
next_sentence_accuracy = tf.metrics.accuracy(
labels=next_sentence_labels, predictions=next_sentence_predictions)
next_sentence_mean_loss = tf.metrics.mean(
values=next_sentence_example_loss)
return {
"masked_lm_accuracy": masked_lm_accuracy,
"masked_lm_loss": masked_lm_mean_loss,
"next_sentence_accuracy": next_sentence_accuracy,
"next_sentence_loss": next_sentence_mean_loss,
}
eval_metric_ops = metric_fn(
masked_lm_example_loss, masked_lm_log_probs, masked_lm_ids,
masked_lm_weights, next_sentence_example_loss,
next_sentence_log_probs, next_sentence_labels
)
output_spec = tf.estimator.EstimatorSpec(
mode=mode,
loss=total_loss,
eval_metric_ops=eval_metric_ops)
else:
raise ValueError("Only TRAIN and EVAL modes are supported: %s" % (mode))
return output_spec
return model_fn
def get_masked_lm_output(bert_config, input_tensor, output_weights, positions,
label_ids, label_weights):
"""Get loss and log probs for the masked LM."""
input_tensor = gather_indexes(input_tensor, positions)
with tf.variable_scope("cls/predictions"):
# We apply one more non-linear transformation before the output layer.
# This matrix is not used after pre-training.
with tf.variable_scope("transform"):
input_tensor = tf.layers.dense(
input_tensor,
units=bert_config.hidden_size,
activation=modeling.get_activation(bert_config.hidden_act),
kernel_initializer=modeling.create_initializer(
bert_config.initializer_range))
input_tensor = modeling.layer_norm(input_tensor)
# The output weights are the same as the input embeddings, but there is
# an output-only bias for each token.
output_bias = tf.get_variable(
"output_bias",
shape=[bert_config.vocab_size],
initializer=tf.zeros_initializer())
logits = tf.matmul(tf.cast(input_tensor, tf.float32), output_weights, transpose_b=True)
logits = tf.nn.bias_add(logits, output_bias)
log_probs = tf.nn.log_softmax(logits, axis=-1)
label_ids = tf.reshape(label_ids, [-1])
label_weights = tf.reshape(label_weights, [-1])
one_hot_labels = tf.one_hot(
label_ids, depth=bert_config.vocab_size, dtype=tf.float32)
# The `positions` tensor might be zero-padded (if the sequence is too
# short to have the maximum number of predictions). The `label_weights`
# tensor has a value of 1.0 for every real prediction and 0.0 for the
# padding predictions.
per_example_loss = -tf.reduce_sum(log_probs * one_hot_labels, axis=[-1])
numerator = tf.reduce_sum(label_weights * per_example_loss)
denominator = tf.reduce_sum(label_weights) + 1e-5
loss = numerator / denominator
return (loss, per_example_loss, log_probs)
def get_next_sentence_output(bert_config, input_tensor, labels):
"""Get loss and log probs for the next sentence prediction."""
# Simple binary classification. Note that 0 is "next sentence" and 1 is
# "random sentence". This weight matrix is not used after pre-training.
with tf.variable_scope("cls/seq_relationship"):
output_weights = tf.get_variable(
"output_weights",
shape=[2, bert_config.hidden_size],
initializer=modeling.create_initializer(bert_config.initializer_range))
output_bias = tf.get_variable(
"output_bias", shape=[2], initializer=tf.zeros_initializer())
logits = tf.matmul(tf.cast(input_tensor, tf.float32), output_weights, transpose_b=True)
logits = tf.nn.bias_add(logits, output_bias)
log_probs = tf.nn.log_softmax(logits, axis=-1)
labels = tf.reshape(labels, [-1])
one_hot_labels = tf.one_hot(labels, depth=2, dtype=tf.float32)
per_example_loss = -tf.reduce_sum(one_hot_labels * log_probs, axis=-1)
loss = tf.reduce_mean(per_example_loss)
return (loss, per_example_loss, log_probs)
def gather_indexes(sequence_tensor, positions):
"""Gathers the vectors at the specific positions over a minibatch."""
sequence_shape = modeling.get_shape_list(sequence_tensor, expected_rank=3)
batch_size = sequence_shape[0]
seq_length = sequence_shape[1]
width = sequence_shape[2]
flat_offsets = tf.reshape(
tf.range(0, batch_size, dtype=tf.int32) * seq_length, [-1, 1])
flat_positions = tf.reshape(positions + flat_offsets, [-1])
flat_sequence_tensor = tf.reshape(sequence_tensor,
[batch_size * seq_length, width])
output_tensor = tf.gather(flat_sequence_tensor, flat_positions)
return output_tensor
def input_fn_builder(input_files,
batch_size,
max_seq_length,
max_predictions_per_seq,
is_training,
num_cpu_threads=4,
hvd=None):
"""Creates an `input_fn` closure to be passed to Estimator."""
def input_fn():
"""The actual input function."""
name_to_features = {
"input_ids":
tf.io.FixedLenFeature([max_seq_length], tf.int64),
"input_mask":
tf.io.FixedLenFeature([max_seq_length], tf.int64),
"segment_ids":
tf.io.FixedLenFeature([max_seq_length], tf.int64),
"masked_lm_positions":
tf.io.FixedLenFeature([max_predictions_per_seq], tf.int64),
"masked_lm_ids":
tf.io.FixedLenFeature([max_predictions_per_seq], tf.int64),
"masked_lm_weights":
tf.io.FixedLenFeature([max_predictions_per_seq], tf.float32),
"next_sentence_labels":
tf.io.FixedLenFeature([1], tf.int64),
}
# For training, we want a lot of parallel reading and shuffling.
# For eval, we want no shuffling and parallel reading doesn't matter.
if is_training:
d = tf.data.Dataset.from_tensor_slices(tf.constant(input_files))
if hvd is not None: d = d.shard(hvd.size(), hvd.rank())
d = d.repeat()
d = d.shuffle(buffer_size=len(input_files))
# `cycle_length` is the number of parallel files that get read.
cycle_length = min(num_cpu_threads, len(input_files))
# `sloppy` mode means that the interleaving is not exact. This adds
# even more randomness to the training pipeline.
d = d.apply(
tf.contrib.data.parallel_interleave(
tf.data.TFRecordDataset,
sloppy=is_training,
cycle_length=cycle_length))
d = d.shuffle(buffer_size=100)
else:
d = tf.data.TFRecordDataset(input_files)
# Since we evaluate for a fixed number of steps we don't want to encounter
# out-of-range exceptions.
d = d.repeat()
# We must `drop_remainder` on training because the TPU requires fixed
# size dimensions. For eval, we assume we are evaluating on the CPU or GPU
# and we *don't* want to drop the remainder, otherwise we wont cover
# every sample.
d = d.apply(
tf.contrib.data.map_and_batch(
lambda record: _decode_record(record, name_to_features),
batch_size=batch_size,
num_parallel_batches=num_cpu_threads,
drop_remainder=True if is_training else False))
return d
return input_fn
def _decode_record(record, name_to_features):
"""Decodes a record to a TensorFlow example."""
example = tf.parse_single_example(record, name_to_features)
# tf.Example only supports tf.int64, but the TPU only supports tf.int32.
# So cast all int64 to int32.
for name in list(example.keys()):
t = example[name]
if t.dtype == tf.int64:
t = tf.to_int32(t)
example[name] = t
return example
def main(_):
tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.INFO)
if not FLAGS.do_train and not FLAGS.do_eval:
raise ValueError("At least one of `do_train` or `do_eval` must be True.")
if FLAGS.use_fp16:
os.environ["TF_ENABLE_AUTO_MIXED_PRECISION_GRAPH_REWRITE"] = "1"
if FLAGS.horovod:
import horovod.tensorflow as hvd
hvd.init()
bert_config = modeling.BertConfig.from_json_file(FLAGS.bert_config_file)
tf.io.gfile.makedirs(FLAGS.output_dir)
input_files = []
for input_file_dir in FLAGS.input_files_dir.split(","):
input_files.extend(tf.io.gfile.glob(os.path.join(input_file_dir, "*")))
if FLAGS.horovod and len(input_files) < hvd.size():
raise ValueError("Input Files must be sharded")
if FLAGS.use_fp16 and FLAGS.manual_fp16:
raise ValueError("AMP and Manual Mixed Precision Training are both activated! Error")
is_per_host = tf.contrib.tpu.InputPipelineConfig.PER_HOST_V2
config = tf.compat.v1.ConfigProto()
if FLAGS.horovod:
config.gpu_options.visible_device_list = str(hvd.local_rank())
if hvd.rank() == 0:
tf.compat.v1.logging.info("***** Configuaration *****")
for key in FLAGS.__flags.keys():
tf.compat.v1.logging.info(' {}: {}'.format(key, getattr(FLAGS, key)))
tf.compat.v1.logging.info("**************************")
# config.gpu_options.per_process_gpu_memory_fraction = 0.7
if FLAGS.use_xla:
config.graph_options.optimizer_options.global_jit_level = tf.compat.v1.OptimizerOptions.ON_1
config.graph_options.rewrite_options.memory_optimization = rewriter_config_pb2.RewriterConfig.NO_MEM_OPT
run_config = tf.estimator.RunConfig(
model_dir=FLAGS.output_dir,
session_config=config,
save_checkpoints_steps=FLAGS.save_checkpoints_steps if not FLAGS.horovod or hvd.rank() == 0 else None,
# This variable controls how often estimator reports examples/sec.
# Default value is every 100 steps.
# When --report_loss is True, we set to very large value to prevent
# default info reporting from estimator.
# Ideally we should set it to None, but that does not work.
log_step_count_steps=10000 if FLAGS.report_loss else 100)
model_fn = model_fn_builder(
bert_config=bert_config,
init_checkpoint=FLAGS.init_checkpoint,
learning_rate=FLAGS.learning_rate if not FLAGS.horovod else FLAGS.learning_rate*hvd.size(),
num_train_steps=FLAGS.num_train_steps,
num_warmup_steps=FLAGS.num_warmup_steps,
use_one_hot_embeddings=False,
hvd=None if not FLAGS.horovod else hvd)
training_hooks = []
if FLAGS.report_loss and (not FLAGS.horovod or hvd.rank() == 0):
global_batch_size = FLAGS.train_batch_size * FLAGS.num_accumulation_steps if not FLAGS.horovod else FLAGS.train_batch_size * FLAGS.num_accumulation_steps * hvd.size()
training_hooks.append(_LogSessionRunHook(global_batch_size, FLAGS.num_accumulation_steps, FLAGS.display_loss_steps))
if FLAGS.horovod and hvd.size() > 1:
training_hooks.append(hvd.BroadcastGlobalVariablesHook(0))
estimator = tf.estimator.Estimator(
model_fn=model_fn,
config=run_config)
if FLAGS.do_train:
tf.compat.v1.logging.info("***** Running training *****")
tf.compat.v1.logging.info(" Batch size = %d", FLAGS.train_batch_size)
train_input_fn = input_fn_builder(
input_files=input_files,
batch_size=FLAGS.train_batch_size,
max_seq_length=FLAGS.max_seq_length,
max_predictions_per_seq=FLAGS.max_predictions_per_seq,
is_training=True,
hvd=None if not FLAGS.horovod else hvd)
estimator.train(input_fn=train_input_fn, hooks=training_hooks, max_steps=FLAGS.num_train_steps)
if FLAGS.do_eval and (not FLAGS.horovod or hvd.rank() == 0):
tf.compat.v1.logging.info("***** Running evaluation *****")
tf.compat.v1.logging.info(" Batch size = %d", FLAGS.eval_batch_size)
eval_files = []
for eval_file_dir in FLAGS.eval_files_dir.split(","):
eval_files.extend(tf.io.gfile.glob(os.path.join(eval_file_dir, "*")))
eval_input_fn = input_fn_builder(
input_files=eval_files,
batch_size=FLAGS.eval_batch_size,
max_seq_length=FLAGS.max_seq_length,
max_predictions_per_seq=FLAGS.max_predictions_per_seq,
is_training=False,
hvd=None if not FLAGS.horovod else hvd)
eval_hooks = [LogEvalRunHook(FLAGS.eval_batch_size)]
eval_start_time = time.time()
result = estimator.evaluate(
input_fn=eval_input_fn, steps=FLAGS.max_eval_steps, hooks=eval_hooks)
eval_time_elapsed = time.time() - eval_start_time
eval_time_wo_overhead = eval_hooks[-1].total_time
num_sentences = (eval_hooks[-1].count - eval_hooks[-1].skipped) * FLAGS.eval_batch_size
ss_sentences_per_second = num_sentences * 1.0 / eval_time_wo_overhead
tf.compat.v1.logging.info("-----------------------------")
tf.compat.v1.logging.info("Total Inference Time = %0.2f for Sentences = %d", eval_time_elapsed,
eval_hooks[-1].count * FLAGS.eval_batch_size)
tf.compat.v1.logging.info("Total Inference Time W/O Overhead = %0.2f for Sentences = %d", eval_time_wo_overhead,
(eval_hooks[-1].count - eval_hooks[-1].skipped) * FLAGS.eval_batch_size)
tf.compat.v1.logging.info("Summary Inference Statistics on EVAL set")
tf.compat.v1.logging.info("Batch size = %d", FLAGS.eval_batch_size)
tf.compat.v1.logging.info("Sequence Length = %d", FLAGS.max_seq_length)
tf.compat.v1.logging.info("Precision = %s", "fp16" if FLAGS.use_fp16 else "fp32")
tf.compat.v1.logging.info("Throughput Average (sentences/sec) = %0.2f", ss_sentences_per_second)
tf.compat.v1.logging.info("-----------------------------")
output_eval_file = os.path.join(FLAGS.output_dir, "eval_results.txt")
with tf.io.gfile.GFile(output_eval_file, "w") as writer:
tf.compat.v1.logging.info("***** Eval results *****")
for key in sorted(result.keys()):
tf.compat.v1.logging.info(" %s = %s", key, str(result[key]))
writer.write("%s = %s\n" % (key, str(result[key])))
if __name__ == "__main__":
flags.mark_flag_as_required("input_files_dir")
if FLAGS.do_eval:
flags.mark_flag_as_required("eval_files_dir")
flags.mark_flag_as_required("bert_config_file")
flags.mark_flag_as_required("output_dir")
if FLAGS.use_xla and FLAGS.manual_fp16:
print('WARNING! Combining --use_xla with --manual_fp16 may prevent convergence.')
print(' This warning message will be removed when the underlying')
print(' issues have been fixed and you are running a TF version')
print(' that has that fix.')
tf.compat.v1.app.run()
|
env/Lib/site-packages/plotly/validators/layout/uniformtext/_mode.py
|
andresgreen-byte/Laboratorio-1--Inversion-de-Capital
| 11,750 |
137282
|
<reponame>andresgreen-byte/Laboratorio-1--Inversion-de-Capital
import _plotly_utils.basevalidators
class ModeValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(self, plotly_name="mode", parent_name="layout.uniformtext", **kwargs):
super(ModeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "plot"),
values=kwargs.pop("values", [False, "hide", "show"]),
**kwargs
)
|
ikalog/inputs/filters/warp.py
|
fetus-hina/IkaLog
| 285 |
137338
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# IkaLog
# ======
# Copyright (C) 2015 <NAME>
# Copyright (C) 2015 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# This source includes modified version of sample codes in OpenCV
# distribution, licensed under 3-clause BSD License.
#
# By downloading, copying, installing or using the software you agree to this license.
# If you do not agree to this license, do not download, install,
# copy or use the software.
#
#
# License Agreement
# For Open Source Computer Vision Library
# (3-clause BSD License)
#
# Copyright (C) 2000-2015, Intel Corporation, all rights reserved.
# Copyright (C) 2009-2011, <NAME> Inc., all rights reserved.
# Copyright (C) 2009-2015, NVIDIA Corporation, all rights reserved.
# Copyright (C) 2010-2013, Advanced Micro Devices, Inc., all rights reserved.
# Copyright (C) 2015, OpenCV Foundation, all rights reserved.
# Copyright (C) 2015, Itseez Inc., all rights reserved.
# Third party copyrights are property of their respective owners.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the names of the copyright holders nor the names of the contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# This software is provided by the copyright holders and contributors "as is" and
# any express or implied warranties, including, but not limited to, the implied
# warranties of merchantability and fitness for a particular purpose are disclaimed.
# In no event shall copyright holders or contributors be liable for any direct,
# indirect, incidental, special, exemplary, or consequential damages
# (including, but not limited to, procurement of substitute goods or services;
# loss of use, data, or profits; or business interruption) however caused
# and on any theory of liability, whether in contract, strict liability,
# or tort (including negligence or otherwise) arising in any way out of
# the use of this software, even if advised of the possibility of such damage.
#
import os
import pickle
import cv2
import numpy as np
from ikalog.inputs.filters import Filter, WarpFilterModel
from ikalog.utils import *
class WarpCalibrationException(Exception):
pass
class WarpCalibrationNotFound(WarpCalibrationException):
pass
class WarpCalibrationUnacceptableSize(WarpCalibrationException):
def __init__(self, shape):
self.shape = shape
class WarpFilter(Filter):
def filter_matches(self, kp1, kp2, matches, ratio=0.75):
mkp1, mkp2 = [], []
for m in matches:
if len(m) == 2 and m[0].distance < m[1].distance * ratio:
m = m[0]
mkp1.append(kp1[m.queryIdx])
mkp2.append(kp2[m.trainIdx])
p1 = np.float32([kp.pt for kp in mkp1])
p2 = np.float32([kp.pt for kp in mkp2])
kp_pairs = zip(mkp1, mkp2)
return p1, p2, kp_pairs
def set_bbox(self, x, y, w, h):
corners = np.float32(
[[x, y], [x + w, y], [w + x, y + h], [x, y + h]]
)
self.pts1 = np.float32(corners)
IkaUtils.dprint('pts1: %s' % [self.pts1])
IkaUtils.dprint('pts2: %s' % [self.pts2])
self.M = cv2.getPerspectiveTransform(self.pts1, self.pts2)
return True
def calibrateWarp(self, capture_image, validation_func=None):
capture_image_gray = cv2.cvtColor(capture_image, cv2.COLOR_BGR2GRAY)
capture_image_keypoints, capture_image_descriptors = self.detector.detectAndCompute(
capture_image_gray, None)
print('caputure_image - %d features' % (len(capture_image_keypoints)))
print('matching...')
raw_matches = self.matcher.knnMatch(
self.calibration_image_descriptors,
trainDescriptors=capture_image_descriptors,
k=2
)
p1, p2, kp_pairs = self.filter_matches(
self.calibration_image_keypoints,
capture_image_keypoints,
raw_matches,
)
if len(p1) >= 4:
H, status = cv2.findHomography(p1, p2, cv2.RANSAC, 5.0)
print('%d / %d inliers/matched' % (np.sum(status), len(status)))
else:
H, status = None, None
print('%d matches found, not enough for homography estimation' % len(p1))
self.calibration_requested = False
raise WarpCalibrationNotFound()
if H is None:
# Should never reach there...
self.calibration_requested = False
raise WarpCalibrationNotFound()
if len(status) < 1000:
raise WarpCalibrationNotFound()
calibration_image_height, calibration_image_width = self.calibration_image_size
corners = np.float32(
[[0, 0],
[calibration_image_width, 0],
[calibration_image_width, calibration_image_height],
[0, calibration_image_height]]
)
pts1 = np.float32(cv2.perspectiveTransform(
corners.reshape(1, -1, 2), H).reshape(-1, 2) + (0, 0))
IkaUtils.dprint('pts1: %s' % [pts1])
IkaUtils.dprint('pts2: %s' % [self.pts2])
if validation_func is not None:
if not validation_func(pts1):
w = int(pts1[1][0] - pts1[0][0])
h = int(pts1[2][1] - pts1[1][1])
raise WarpCalibrationUnacceptableSize((w, h))
self.M = cv2.getPerspectiveTransform(pts1, self.pts2)
return True
def tuples2keyPoints(self, tuples):
new_l = []
for point in tuples:
pt, size, angle, response, octave, class_id = point
new_l.append(cv2.KeyPoint(
pt[0], pt[1], size, angle, response, octave, class_id))
return new_l
def keyPoints2tuples(self, points):
new_l = []
for point in points:
new_l.append((point.pt, point.size, point.angle, point.response, point.octave,
point.class_id))
return new_l
def loadModelFromFile(self, file):
f = open(file, 'rb')
l = pickle.load(f)
f.close()
self.calibration_image_size = l[0]
self.calibration_image_keypoints = self.tuples2keyPoints(l[1])
self.calibration_image_descriptors = l[2]
def saveModelToFile(self, file):
f = open(file, 'wb')
pickle.dump([
self.calibration_image_size,
self.keyPoints2tuples(self.calibration_image_keypoints),
self.calibration_image_descriptors,
], f)
f.close()
def initializeCalibration(self):
model_object = WarpFilterModel()
if not model_object.trained:
raise Exception('Could not intialize WarpFilterModel')
self.detector = model_object.detector
self.norm = model_object.norm
self.matcher = model_object.matcher
self.calibration_image_size = model_object.calibration_image_size
self.calibration_image_keypoints = model_object.calibration_image_keypoints
self.calibration_image_descriptors = model_object.calibration_image_descriptors
self.reset()
def reset(self):
# input source
w = 1280
h = 720
self.pts2 = np.float32([[0, 0], [w, 0], [w, h], [0, h]])
self.M = cv2.getPerspectiveTransform(self.pts2, self.pts2)
def pre_execute(self, frame):
return True
def execute(self, frame):
if not (self.enabled and self.pre_execute(frame)):
return frame
return cv2.warpPerspective(frame, self.M, (1280, 720))
def __init__(self, parent, debug=False):
super().__init__(parent, debug=debug)
self.initializeCalibration()
|
tests/models/speech_encoder_decoder/test_modeling_flax_speech_encoder_decoder.py
|
bugface/transformers
| 8,028 |
137339
|
# coding=utf-8
# Copyright 2022 HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tempfile
import unittest
import numpy as np
from transformers import is_flax_available, is_torch_available
from transformers.testing_utils import is_pt_flax_cross_test, require_flax, slow, torch_device
from ...test_modeling_flax_common import floats_tensor, ids_tensor, random_attention_mask
from ..bart.test_modeling_flax_bart import FlaxBartStandaloneDecoderModelTester
from ..bert.test_modeling_flax_bert import FlaxBertModelTester
from ..gpt2.test_modeling_flax_gpt2 import FlaxGPT2ModelTester
from ..wav2vec2.test_modeling_flax_wav2vec2 import FlaxWav2Vec2ModelTester
if is_flax_available():
import jax
import jax.numpy as jnp
from flax.training.common_utils import onehot
from flax.traverse_util import flatten_dict
from transformers import (
FlaxBartForCausalLM,
FlaxBertForCausalLM,
FlaxGPT2LMHeadModel,
FlaxSpeechEncoderDecoderModel,
FlaxWav2Vec2Model,
SpeechEncoderDecoderConfig,
)
from transformers.modeling_flax_outputs import FlaxBaseModelOutput
from transformers.modeling_flax_pytorch_utils import (
convert_pytorch_state_dict_to_flax,
load_flax_weights_in_pytorch_model,
)
if is_torch_available():
import torch
from transformers import SpeechEncoderDecoderModel
@require_flax
class FlaxEncoderDecoderMixin:
def get_encoder_decoder_model(self, config, decoder_config):
raise NotImplementedError
def prepare_config_and_inputs(self):
raise NotImplementedError
def get_pretrained_model(self):
raise NotImplementedError
def check_encoder_decoder_model_from_pretrained_configs(
self,
config,
inputs,
attention_mask,
encoder_hidden_states,
decoder_config,
decoder_input_ids,
decoder_attention_mask,
**kwargs
):
encoder_decoder_config = SpeechEncoderDecoderConfig.from_encoder_decoder_configs(config, decoder_config)
self.assertTrue(encoder_decoder_config.decoder.is_decoder)
enc_dec_model = FlaxSpeechEncoderDecoderModel(encoder_decoder_config)
self.assertTrue(enc_dec_model.config.is_encoder_decoder)
self.assertFalse(enc_dec_model.config.tie_word_embeddings)
outputs_encoder_decoder = enc_dec_model(
inputs=inputs,
attention_mask=attention_mask,
decoder_input_ids=decoder_input_ids,
decoder_attention_mask=decoder_attention_mask,
)
self.assertEqual(
outputs_encoder_decoder["logits"].shape, (decoder_input_ids.shape + (decoder_config.vocab_size,))
)
def check_encoder_decoder_model(
self,
config,
inputs,
attention_mask,
encoder_hidden_states,
decoder_config,
decoder_input_ids,
decoder_attention_mask,
**kwargs
):
encoder_model, decoder_model = self.get_encoder_decoder_model(config, decoder_config)
enc_dec_model = SpeechEncoderDecoderModel(encoder=encoder_model, decoder=decoder_model)
self.assertTrue(enc_dec_model.config.decoder.is_decoder)
self.assertTrue(enc_dec_model.config.decoder.add_cross_attention)
self.assertTrue(enc_dec_model.config.is_encoder_decoder)
outputs_encoder_decoder = enc_dec_model(
inputs=inputs,
attention_mask=attention_mask,
decoder_input_ids=decoder_input_ids,
decoder_attention_mask=decoder_attention_mask,
)
self.assertEqual(
outputs_encoder_decoder["logits"].shape, (decoder_input_ids.shape + (decoder_config.vocab_size,))
)
encoder_outputs = FlaxBaseModelOutput(last_hidden_state=outputs_encoder_decoder.encoder_hidden_states[-1])
outputs_encoder_decoder = enc_dec_model(
attention_mask, decoder_input_ids, decoder_attention_mask, encoder_outputs=encoder_outputs
)
self.assertEqual(
outputs_encoder_decoder["logits"].shape, (decoder_input_ids.shape + (decoder_config.vocab_size,))
)
def check_encoder_decoder_model_from_pretrained(
self,
config,
inputs,
attention_mask,
encoder_hidden_states,
decoder_config,
decoder_input_ids,
decoder_attention_mask,
return_dict,
**kwargs
):
encoder_model, decoder_model = self.get_encoder_decoder_model(config, decoder_config)
kwargs = {"encoder_model": encoder_model, "decoder_model": decoder_model, "return_dict": return_dict}
enc_dec_model = FlaxSpeechEncoderDecoderModel.from_encoder_decoder_pretrained(**kwargs)
outputs_encoder_decoder = enc_dec_model(
inputs=inputs,
attention_mask=attention_mask,
decoder_input_ids=decoder_input_ids,
decoder_attention_mask=decoder_attention_mask,
output_hidden_states=True,
return_dict=True,
)
self.assertEqual(
outputs_encoder_decoder["logits"].shape, (decoder_input_ids.shape + (decoder_config.vocab_size,))
)
def check_save_and_load(
self,
config,
inputs,
attention_mask,
encoder_hidden_states,
decoder_config,
decoder_input_ids,
decoder_attention_mask,
**kwargs
):
encoder_model, decoder_model = self.get_encoder_decoder_model(config, decoder_config)
kwargs = {"encoder_model": encoder_model, "decoder_model": decoder_model}
enc_dec_model = FlaxSpeechEncoderDecoderModel.from_encoder_decoder_pretrained(**kwargs)
outputs = enc_dec_model(
inputs=inputs,
attention_mask=attention_mask,
decoder_input_ids=decoder_input_ids,
decoder_attention_mask=decoder_attention_mask,
)
out_2 = np.array(outputs[0])
out_2[np.isnan(out_2)] = 0
with tempfile.TemporaryDirectory() as tmpdirname:
enc_dec_model.save_pretrained(tmpdirname)
FlaxSpeechEncoderDecoderModel.from_pretrained(tmpdirname)
after_outputs = enc_dec_model(
inputs=inputs,
attention_mask=attention_mask,
decoder_input_ids=decoder_input_ids,
decoder_attention_mask=decoder_attention_mask,
)
out_1 = np.array(after_outputs[0])
out_1[np.isnan(out_1)] = 0
max_diff = np.amax(np.abs(out_1 - out_2))
self.assertLessEqual(max_diff, 4e-2)
def check_encoder_decoder_model_from_encoder_decoder_pretrained(
self,
config,
inputs,
attention_mask,
encoder_hidden_states,
decoder_config,
decoder_input_ids,
decoder_attention_mask,
**kwargs
):
encoder_model, decoder_model = self.get_encoder_decoder_model(config, decoder_config)
# assert that loading encoder and decoder models from configs has been correctly executed
self.assertEqual(config.add_adapter, encoder_model.config.add_adapter)
self.assertEqual(decoder_config.use_cache, decoder_model.config.use_cache)
with tempfile.TemporaryDirectory() as enc_tmpdir:
with tempfile.TemporaryDirectory() as dec_tmpdir:
encoder_model.save_pretrained(enc_tmpdir)
decoder_model.save_pretrained(dec_tmpdir)
# load a model from pretrained encoder and decoder checkpoints, setting one encoder and one decoder kwarg opposite to that specified in their respective configs
enc_dec_model = FlaxSpeechEncoderDecoderModel.from_encoder_decoder_pretrained(
encoder_pretrained_model_name_or_path=enc_tmpdir,
decoder_pretrained_model_name_or_path=dec_tmpdir,
encoder_add_adapter=not config.add_adapter,
decoder_use_cache=not decoder_config.use_cache,
)
# assert that setting encoder and decoder kwargs opposite to those in the configs has correctly been applied
self.assertNotEqual(config.add_adapter, enc_dec_model.config.encoder.add_adapter)
self.assertNotEqual(decoder_config.use_cache, enc_dec_model.config.decoder.use_cache)
outputs_encoder_decoder = enc_dec_model(
inputs=inputs,
attention_mask=attention_mask,
decoder_input_ids=decoder_input_ids,
decoder_attention_mask=decoder_attention_mask,
output_hidden_states=True,
return_dict=True,
)
self.assertEqual(
outputs_encoder_decoder["logits"].shape, (decoder_input_ids.shape + (decoder_config.vocab_size,))
)
def check_encoder_decoder_model_output_attentions(
self,
config,
inputs,
attention_mask,
encoder_hidden_states,
decoder_config,
decoder_input_ids,
decoder_attention_mask,
**kwargs
):
# make the decoder inputs a different shape from the encoder inputs to harden the test
decoder_input_ids = decoder_input_ids[:, :-1]
decoder_attention_mask = decoder_attention_mask[:, :-1]
encoder_model, decoder_model = self.get_encoder_decoder_model(config, decoder_config)
kwargs = {"encoder_model": encoder_model, "decoder_model": decoder_model}
enc_dec_model = FlaxSpeechEncoderDecoderModel.from_encoder_decoder_pretrained(**kwargs)
outputs_encoder_decoder = enc_dec_model(
inputs=inputs,
attention_mask=attention_mask,
decoder_input_ids=decoder_input_ids,
decoder_attention_mask=decoder_attention_mask,
output_attentions=True,
)
encoder_attentions = outputs_encoder_decoder["encoder_attentions"]
self.assertEqual(len(encoder_attentions), config.num_hidden_layers)
seq_len = enc_dec_model._get_feat_extract_output_lengths(inputs.shape[1])
self.assertEqual(encoder_attentions[0].shape[-3:], (config.num_attention_heads, seq_len, seq_len))
decoder_attentions = outputs_encoder_decoder["decoder_attentions"]
num_decoder_layers = (
decoder_config.num_decoder_layers
if hasattr(decoder_config, "num_decoder_layers")
else decoder_config.num_hidden_layers
)
self.assertEqual(len(decoder_attentions), num_decoder_layers)
self.assertEqual(
decoder_attentions[0].shape[-3:],
(decoder_config.num_attention_heads, decoder_input_ids.shape[-1], decoder_input_ids.shape[-1]),
)
cross_attentions = outputs_encoder_decoder["cross_attentions"]
self.assertEqual(len(cross_attentions), num_decoder_layers)
cross_attention_input_seq_len = decoder_input_ids.shape[-1]
self.assertEqual(
cross_attentions[0].shape[-3:],
(decoder_config.num_attention_heads, cross_attention_input_seq_len, seq_len),
)
def check_encoder_decoder_model_generate(self, inputs, config, decoder_config, **kwargs):
encoder_model, decoder_model = self.get_encoder_decoder_model(config, decoder_config)
kwargs = {"encoder_model": encoder_model, "decoder_model": decoder_model}
enc_dec_model = FlaxSpeechEncoderDecoderModel.from_encoder_decoder_pretrained(**kwargs)
pad_token_id = enc_dec_model.config.decoder.pad_token_id
eos_token_id = enc_dec_model.config.decoder.eos_token_id
decoder_start_token_id = enc_dec_model.config.decoder.decoder_start_token_id
# Copied from generation_utils (GPT2 doesn't have `pad_token_id`)
if pad_token_id is None and eos_token_id is not None:
pad_token_id = eos_token_id
if decoder_start_token_id is None:
decoder_start_token_id = enc_dec_model.config.decoder.bos_token_id
# Bert does not have a bos token id, so use pad_token_id instead
# Copied from `test_modeling_encoder_decoder.py`
if decoder_start_token_id is None:
decoder_start_token_id = pad_token_id
generated_output = enc_dec_model.generate(
inputs,
pad_token_id=pad_token_id,
eos_token_id=eos_token_id,
decoder_start_token_id=decoder_start_token_id,
)
generated_sequences = generated_output.sequences
self.assertEqual(generated_sequences.shape, (inputs.shape[0],) + (decoder_config.max_length,))
def check_freeze_feature_encoder(
self,
config,
inputs,
attention_mask,
encoder_hidden_states,
decoder_config,
decoder_input_ids,
decoder_attention_mask,
**kwargs
):
encoder_decoder_config = SpeechEncoderDecoderConfig.from_encoder_decoder_configs(config, decoder_config)
enc_dec_model = FlaxSpeechEncoderDecoderModel(encoder_decoder_config)
params = enc_dec_model.params
def cross_entropy(logits, labels):
return -jnp.sum(labels * jax.nn.log_softmax(logits, axis=-1), axis=-1)
# define a dummy loss function for computing the loss over a forward pass
def compute_loss(
params,
inputs,
attention_mask,
decoder_input_ids,
freeze_feature_encoder: bool = False,
):
outputs_enc_dec = enc_dec_model(
inputs=inputs,
attention_mask=attention_mask,
decoder_input_ids=decoder_input_ids,
freeze_feature_encoder=freeze_feature_encoder,
params=params,
)
logits = outputs_enc_dec.logits
vocab_size = logits.shape[-1]
loss = cross_entropy(logits, onehot(labels=decoder_input_ids, num_classes=vocab_size)).sum()
return (loss, logits)
# transform the loss function to get the gradients
grad_fn = jax.value_and_grad(compute_loss, has_aux=True)
# compute the loss, logits, and gradients for the unfrozen model
(loss, logits), grads = grad_fn(
params, inputs, attention_mask, decoder_input_ids, freeze_feature_encoder=False
)
# compare to the loss, logits and gradients for the frozen model
(loss_frozen, logits_frozen), grads_frozen = grad_fn(
params, inputs, attention_mask, decoder_input_ids, freeze_feature_encoder=True
)
# ensure that the logits and losses remain precisely equal
self.assertTrue((logits == logits_frozen).all())
self.assertEqual(loss, loss_frozen)
grads = flatten_dict(grads)
grads_frozen = flatten_dict(grads_frozen)
# ensure that the dicts of gradients contain the same keys
self.assertEqual(grads.keys(), grads_frozen.keys())
# ensure that the gradients of the feature extractor layers are precisely zero when frozen and contain non-zero entries when unfrozen
feature_extractor_grads = tuple(grads[k] for k in grads if "feature_extractor" in k)
feature_extractor_grads_frozen = tuple(grads_frozen[k] for k in grads_frozen if "feature_extractor" in k)
for feature_extractor_grad, feature_extractor_grad_frozen in zip(
feature_extractor_grads, feature_extractor_grads_frozen
):
self.assertTrue((feature_extractor_grad_frozen == 0.0).all())
self.assertTrue((feature_extractor_grad > 0.0).any())
# ensure that the gradients of all unfrozen layers remain precisely equal, i.e. all layers excluding the frozen 'feature_extractor'
grads = tuple(grads[k] for k in grads if "feature_extractor" not in k)
grads_frozen = tuple(grads_frozen[k] for k in grads_frozen if "feature_extractor" not in k)
for grad, grad_frozen in zip(grads, grads_frozen):
self.assertTrue((grad == grad_frozen).all())
def check_pt_flax_equivalence(self, pt_model, fx_model, inputs_dict):
pt_model.to(torch_device)
pt_model.eval()
# prepare inputs
flax_inputs = inputs_dict
pt_inputs = {k: torch.tensor(v.tolist()) for k, v in flax_inputs.items()}
with torch.no_grad():
pt_outputs = pt_model(**pt_inputs).to_tuple()
fx_outputs = fx_model(**inputs_dict).to_tuple()
self.assertEqual(len(fx_outputs), len(pt_outputs), "Output lengths differ between Flax and PyTorch")
for fx_output, pt_output in zip(fx_outputs, pt_outputs):
self.assert_almost_equals(fx_output, pt_output.numpy(), 1e-5)
# PT -> Flax
with tempfile.TemporaryDirectory() as tmpdirname:
pt_model.save_pretrained(tmpdirname)
fx_model_loaded = FlaxSpeechEncoderDecoderModel.from_pretrained(tmpdirname, from_pt=True)
fx_outputs_loaded = fx_model_loaded(**inputs_dict).to_tuple()
self.assertEqual(len(fx_outputs_loaded), len(pt_outputs), "Output lengths differ between Flax and PyTorch")
for fx_output_loaded, pt_output in zip(fx_outputs_loaded, pt_outputs):
self.assert_almost_equals(fx_output_loaded, pt_output.numpy(), 1e-5)
# Flax -> PT
with tempfile.TemporaryDirectory() as tmpdirname:
fx_model.save_pretrained(tmpdirname)
pt_model_loaded = SpeechEncoderDecoderModel.from_pretrained(tmpdirname, from_flax=True)
pt_model_loaded.to(torch_device)
pt_model_loaded.eval()
with torch.no_grad():
pt_outputs_loaded = pt_model_loaded(**pt_inputs).to_tuple()
self.assertEqual(len(fx_outputs), len(pt_outputs_loaded), "Output lengths differ between Flax and PyTorch")
for fx_output, pt_output_loaded in zip(fx_outputs, pt_outputs_loaded):
self.assert_almost_equals(fx_output, pt_output_loaded.numpy(), 1e-5)
def check_equivalence_pt_to_flax(self, config, decoder_config, inputs_dict):
encoder_decoder_config = SpeechEncoderDecoderConfig.from_encoder_decoder_configs(config, decoder_config)
pt_model = SpeechEncoderDecoderModel(encoder_decoder_config)
fx_model = FlaxSpeechEncoderDecoderModel(encoder_decoder_config)
fx_state = convert_pytorch_state_dict_to_flax(pt_model.state_dict(), fx_model)
fx_model.params = fx_state
self.check_pt_flax_equivalence(pt_model, fx_model, inputs_dict)
def check_equivalence_flax_to_pt(self, config, decoder_config, inputs_dict):
encoder_decoder_config = SpeechEncoderDecoderConfig.from_encoder_decoder_configs(config, decoder_config)
pt_model = SpeechEncoderDecoderModel(encoder_decoder_config)
fx_model = FlaxSpeechEncoderDecoderModel(encoder_decoder_config)
pt_model = load_flax_weights_in_pytorch_model(pt_model, fx_model.params)
self.check_pt_flax_equivalence(pt_model, fx_model, inputs_dict)
def test_encoder_decoder_model_from_pretrained_configs(self):
input_ids_dict = self.prepare_config_and_inputs()
self.check_encoder_decoder_model_from_pretrained_configs(**input_ids_dict)
def test_encoder_decoder_model_from_pretrained(self):
input_ids_dict = self.prepare_config_and_inputs()
self.check_encoder_decoder_model_from_pretrained(**input_ids_dict, return_dict=False)
def test_encoder_decoder_model_from_pretrained_return_dict(self):
input_ids_dict = self.prepare_config_and_inputs()
self.check_encoder_decoder_model_from_pretrained(**input_ids_dict, return_dict=True)
def test_save_and_load_from_pretrained(self):
input_ids_dict = self.prepare_config_and_inputs()
self.check_save_and_load(**input_ids_dict)
def test_encoder_decoder_model_from_encoder_decoder_pretrained(self):
input_ids_dict = self.prepare_config_and_inputs()
self.check_encoder_decoder_model_from_encoder_decoder_pretrained(**input_ids_dict)
def test_encoder_decoder_model_output_attentions(self):
input_ids_dict = self.prepare_config_and_inputs()
self.check_encoder_decoder_model_output_attentions(**input_ids_dict)
def test_freeze_feature_encoder(self):
input_ids_dict = self.prepare_config_and_inputs()
self.check_freeze_feature_encoder(**input_ids_dict)
def test_encoder_decoder_model_generate(self):
input_ids_dict = self.prepare_config_and_inputs()
self.check_encoder_decoder_model_generate(**input_ids_dict)
def assert_almost_equals(self, a: np.ndarray, b: np.ndarray, tol: float):
diff = np.abs((a - b)).max()
self.assertLessEqual(diff, tol, f"Difference between torch and flax is {diff} (>= {tol}).")
@is_pt_flax_cross_test
def test_pt_flax_equivalence(self):
config_inputs_dict = self.prepare_config_and_inputs()
config = config_inputs_dict.pop("config")
decoder_config = config_inputs_dict.pop("decoder_config")
inputs_dict = config_inputs_dict
# `encoder_hidden_states` is not used in model call/forward
del inputs_dict["encoder_hidden_states"]
# Avoid the case where a sequence has no place to attend (after combined with the causal attention mask)
batch_size = inputs_dict["decoder_attention_mask"].shape[0]
inputs_dict["decoder_attention_mask"] = np.concatenate(
[np.ones(shape=(batch_size, 1)), inputs_dict["decoder_attention_mask"][:, 1:]], axis=1
)
# Flax models don't use the `use_cache` option and cache is not returned as a default.
# So we disable `use_cache` here for PyTorch model.
decoder_config.use_cache = False
self.assertTrue(decoder_config.cross_attention_hidden_size is None)
# check without `enc_to_dec_proj` projection
decoder_config.hidden_size = config.hidden_size
self.assertTrue(config.hidden_size == decoder_config.hidden_size)
self.check_equivalence_pt_to_flax(config, decoder_config, inputs_dict)
self.check_equivalence_flax_to_pt(config, decoder_config, inputs_dict)
# check `enc_to_dec_proj` work as expected
decoder_config.hidden_size = decoder_config.hidden_size * 2
self.assertTrue(config.hidden_size != decoder_config.hidden_size)
self.check_equivalence_pt_to_flax(config, decoder_config, inputs_dict)
self.check_equivalence_flax_to_pt(config, decoder_config, inputs_dict)
# check `add_adapter` works as expected
config.add_adapter = True
self.assertTrue(config.add_adapter)
self.check_equivalence_pt_to_flax(config, decoder_config, inputs_dict)
self.check_equivalence_flax_to_pt(config, decoder_config, inputs_dict)
@slow
def test_real_model_save_load_from_pretrained(self):
model_2 = self.get_pretrained_model()
inputs = ids_tensor([13, 5], model_2.config.encoder.vocab_size)
decoder_input_ids = ids_tensor([13, 1], model_2.config.decoder.vocab_size)
attention_mask = ids_tensor([13, 5], vocab_size=2)
outputs = model_2(
inputs=inputs,
decoder_input_ids=decoder_input_ids,
attention_mask=attention_mask,
)
out_2 = np.array(outputs[0])
out_2[np.isnan(out_2)] = 0
with tempfile.TemporaryDirectory() as tmp_dirname:
model_2.save_pretrained(tmp_dirname)
model_1 = FlaxSpeechEncoderDecoderModel.from_pretrained(tmp_dirname)
after_outputs = model_1(
inputs=inputs,
decoder_input_ids=decoder_input_ids,
attention_mask=attention_mask,
)
out_1 = np.array(after_outputs[0])
out_1[np.isnan(out_1)] = 0
max_diff = np.amax(np.abs(out_1 - out_2))
self.assertLessEqual(max_diff, 4e-2)
@require_flax
class FlaxWav2Vec2GPT2ModelTest(FlaxEncoderDecoderMixin, unittest.TestCase):
def get_pretrained_model_and_inputs(self):
model = FlaxSpeechEncoderDecoderModel.from_encoder_decoder_pretrained(
"facebook/wav2vec2-large-lv60", "gpt2-medium"
)
batch_size = 13
input_values = floats_tensor([batch_size, 512], scale=1.0)
attention_mask = random_attention_mask([batch_size, 512])
decoder_input_ids = ids_tensor([batch_size, 4], model.config.decoder.vocab_size)
decoder_attention_mask = random_attention_mask([batch_size, 4])
inputs = {
"inputs": input_values,
"attention_mask": attention_mask,
"decoder_input_ids": decoder_input_ids,
"decoder_attention_mask": decoder_attention_mask,
}
return model, inputs
def get_encoder_decoder_model(self, config, decoder_config):
encoder_model = FlaxWav2Vec2Model(config)
decoder_model = FlaxGPT2LMHeadModel(decoder_config)
return encoder_model, decoder_model
def prepare_config_and_inputs(self):
model_tester_encoder = FlaxWav2Vec2ModelTester(self, batch_size=13)
model_tester_decoder = FlaxGPT2ModelTester(self, batch_size=13)
encoder_config_and_inputs = model_tester_encoder.prepare_config_and_inputs()
decoder_config_and_inputs = model_tester_decoder.prepare_config_and_inputs_for_decoder()
(config, inputs, attention_mask) = encoder_config_and_inputs
(
decoder_config,
decoder_input_ids,
decoder_attention_mask,
encoder_hidden_states,
encoder_attention_mask,
) = decoder_config_and_inputs
# make sure that cross attention layers are added
decoder_config.add_cross_attention = True
return {
"config": config,
"inputs": inputs,
"attention_mask": attention_mask,
"decoder_config": decoder_config,
"decoder_input_ids": decoder_input_ids,
"decoder_attention_mask": decoder_attention_mask,
"encoder_hidden_states": encoder_hidden_states,
}
@slow
def test_flaxwav2vec2gpt2_pt_flax_equivalence(self):
pt_model = SpeechEncoderDecoderModel.from_pretrained("jsnfly/wav2vec2-large-xlsr-53-german-gpt2")
fx_model = FlaxSpeechEncoderDecoderModel.from_pretrained(
"jsnfly/wav2vec2-large-xlsr-53-german-gpt2", from_pt=True
)
pt_model.to(torch_device)
pt_model.eval()
# prepare inputs
batch_size = 13
input_values = floats_tensor([batch_size, 512], scale=1.0)
attention_mask = random_attention_mask([batch_size, 512])
decoder_input_ids = ids_tensor([batch_size, 4], fx_model.config.decoder.vocab_size)
decoder_attention_mask = random_attention_mask([batch_size, 4])
inputs_dict = {
"inputs": input_values,
"attention_mask": attention_mask,
"decoder_input_ids": decoder_input_ids,
"decoder_attention_mask": decoder_attention_mask,
}
flax_inputs = inputs_dict
pt_inputs = {k: torch.tensor(v.tolist()) for k, v in flax_inputs.items()}
with torch.no_grad():
pt_outputs = pt_model(**pt_inputs)
pt_logits = pt_outputs.logits
pt_outputs = pt_outputs.to_tuple()
fx_outputs = fx_model(**inputs_dict)
fx_logits = fx_outputs.logits
fx_outputs = fx_outputs.to_tuple()
self.assertEqual(len(fx_outputs), len(pt_outputs), "Output lengths differ between Flax and PyTorch")
self.assert_almost_equals(fx_logits, pt_logits.numpy(), 4e-2)
# PT -> Flax
with tempfile.TemporaryDirectory() as tmpdirname:
pt_model.save_pretrained(tmpdirname)
fx_model_loaded = FlaxSpeechEncoderDecoderModel.from_pretrained(tmpdirname, from_pt=True)
fx_outputs_loaded = fx_model_loaded(**inputs_dict)
fx_logits_loaded = fx_outputs_loaded.logits
fx_outputs_loaded = fx_outputs_loaded.to_tuple()
self.assertEqual(len(fx_outputs_loaded), len(pt_outputs), "Output lengths differ between Flax and PyTorch")
self.assert_almost_equals(fx_logits_loaded, pt_logits.numpy(), 4e-2)
# Flax -> PT
with tempfile.TemporaryDirectory() as tmpdirname:
fx_model.save_pretrained(tmpdirname)
pt_model_loaded = SpeechEncoderDecoderModel.from_pretrained(tmpdirname, from_flax=True)
pt_model_loaded.to(torch_device)
pt_model_loaded.eval()
with torch.no_grad():
pt_outputs_loaded = pt_model_loaded(**pt_inputs)
pt_logits_loaded = pt_outputs_loaded.logits
pt_outputs_loaded = pt_outputs_loaded.to_tuple()
self.assertEqual(len(fx_outputs), len(pt_outputs_loaded), "Output lengths differ between Flax and PyTorch")
self.assert_almost_equals(fx_logits, pt_logits_loaded.numpy(), 4e-2)
@require_flax
class FlaxWav2Vec2BartModelTest(FlaxEncoderDecoderMixin, unittest.TestCase):
def get_pretrained_model_and_inputs(self):
model = FlaxSpeechEncoderDecoderModel.from_encoder_decoder_pretrained(
"facebook/wav2vec2-large-lv60", "bart-large"
)
batch_size = 13
input_values = floats_tensor([batch_size, 512], scale=1.0)
attention_mask = random_attention_mask([batch_size, 512])
decoder_input_ids = ids_tensor([batch_size, 4], model.config.decoder.vocab_size)
decoder_attention_mask = random_attention_mask([batch_size, 4])
inputs = {
"inputs": input_values,
"attention_mask": attention_mask,
"decoder_input_ids": decoder_input_ids,
"decoder_attention_mask": decoder_attention_mask,
}
return model, inputs
def get_encoder_decoder_model(self, config, decoder_config):
encoder_model = FlaxWav2Vec2Model(config)
decoder_model = FlaxBartForCausalLM(decoder_config)
return encoder_model, decoder_model
def prepare_config_and_inputs(self):
model_tester_encoder = FlaxWav2Vec2ModelTester(self, batch_size=13)
model_tester_decoder = FlaxBartStandaloneDecoderModelTester(self, batch_size=13)
encoder_config_and_inputs = model_tester_encoder.prepare_config_and_inputs()
decoder_config_and_inputs = model_tester_decoder.prepare_config_and_inputs_for_decoder()
(config, inputs, attention_mask) = encoder_config_and_inputs
(
decoder_config,
decoder_input_ids,
decoder_attention_mask,
encoder_hidden_states,
encoder_attention_mask,
) = decoder_config_and_inputs
# make sure that cross attention layers are added
decoder_config.add_cross_attention = True
return {
"config": config,
"inputs": inputs,
"attention_mask": attention_mask,
"decoder_config": decoder_config,
"decoder_input_ids": decoder_input_ids,
"decoder_attention_mask": decoder_attention_mask,
"encoder_hidden_states": encoder_hidden_states,
}
@slow
def test_flaxwav2vec2bart_pt_flax_equivalence(self):
pt_model = SpeechEncoderDecoderModel.from_pretrained("patrickvonplaten/wav2vec2-2-bart-large")
fx_model = FlaxSpeechEncoderDecoderModel.from_pretrained(
"patrickvonplaten/wav2vec2-2-bart-large", from_pt=True
)
pt_model.to(torch_device)
pt_model.eval()
# prepare inputs
batch_size = 13
input_values = floats_tensor([batch_size, 512], scale=1.0)
attention_mask = random_attention_mask([batch_size, 512])
decoder_input_ids = ids_tensor([batch_size, 4], fx_model.config.decoder.vocab_size)
decoder_attention_mask = random_attention_mask([batch_size, 4])
inputs_dict = {
"inputs": input_values,
"attention_mask": attention_mask,
"decoder_input_ids": decoder_input_ids,
"decoder_attention_mask": decoder_attention_mask,
}
flax_inputs = inputs_dict
pt_inputs = {k: torch.tensor(v.tolist()) for k, v in flax_inputs.items()}
with torch.no_grad():
pt_outputs = pt_model(**pt_inputs)
pt_logits = pt_outputs.logits
pt_outputs = pt_outputs.to_tuple()
fx_outputs = fx_model(**inputs_dict)
fx_logits = fx_outputs.logits
fx_outputs = fx_outputs.to_tuple()
self.assertEqual(len(fx_outputs), len(pt_outputs), "Output lengths differ between Flax and PyTorch")
self.assert_almost_equals(fx_logits, pt_logits.numpy(), 4e-2)
# PT -> Flax
with tempfile.TemporaryDirectory() as tmpdirname:
pt_model.save_pretrained(tmpdirname)
fx_model_loaded = FlaxSpeechEncoderDecoderModel.from_pretrained(tmpdirname, from_pt=True)
fx_outputs_loaded = fx_model_loaded(**inputs_dict)
fx_logits_loaded = fx_outputs_loaded.logits
fx_outputs_loaded = fx_outputs_loaded.to_tuple()
self.assertEqual(len(fx_outputs_loaded), len(pt_outputs), "Output lengths differ between Flax and PyTorch")
self.assert_almost_equals(fx_logits_loaded, pt_logits.numpy(), 4e-2)
# Flax -> PT
with tempfile.TemporaryDirectory() as tmpdirname:
fx_model.save_pretrained(tmpdirname)
pt_model_loaded = SpeechEncoderDecoderModel.from_pretrained(tmpdirname, from_flax=True)
pt_model_loaded.to(torch_device)
pt_model_loaded.eval()
with torch.no_grad():
pt_outputs_loaded = pt_model_loaded(**pt_inputs)
pt_logits_loaded = pt_outputs_loaded.logits
pt_outputs_loaded = pt_outputs_loaded.to_tuple()
self.assertEqual(len(fx_outputs), len(pt_outputs_loaded), "Output lengths differ between Flax and PyTorch")
self.assert_almost_equals(fx_logits, pt_logits_loaded.numpy(), 4e-2)
@require_flax
class FlaxWav2Vec2BertModelTest(FlaxEncoderDecoderMixin, unittest.TestCase):
def get_pretrained_model_and_inputs(self):
model = FlaxSpeechEncoderDecoderModel.from_encoder_decoder_pretrained(
"facebook/wav2vec2-large-lv60", "bert-large-uncased"
)
batch_size = 13
input_values = floats_tensor([batch_size, 512], model.config.encoder.vocab_size)
attention_mask = random_attention_mask([batch_size, 512])
decoder_input_ids = ids_tensor([batch_size, 4], model.config.decoder.vocab_size)
decoder_attention_mask = random_attention_mask([batch_size, 4])
inputs = {
"inputs": input_values,
"attention_mask": attention_mask,
"decoder_input_ids": decoder_input_ids,
"decoder_attention_mask": decoder_attention_mask,
}
return model, inputs
def get_encoder_decoder_model(self, config, decoder_config):
encoder_model = FlaxWav2Vec2Model(config)
decoder_model = FlaxBertForCausalLM(decoder_config)
return encoder_model, decoder_model
def prepare_config_and_inputs(self):
model_tester_encoder = FlaxWav2Vec2ModelTester(self, batch_size=13)
model_tester_decoder = FlaxBertModelTester(self, batch_size=13)
encoder_config_and_inputs = model_tester_encoder.prepare_config_and_inputs()
decoder_config_and_inputs = model_tester_decoder.prepare_config_and_inputs_for_decoder()
(config, inputs, attention_mask) = encoder_config_and_inputs
(
decoder_config,
decoder_input_ids,
decoder_attention_mask,
encoder_hidden_states,
encoder_attention_mask,
) = decoder_config_and_inputs
# make sure that cross attention layers are added
decoder_config.add_cross_attention = True
return {
"config": config,
"inputs": inputs,
"attention_mask": attention_mask,
"decoder_config": decoder_config,
"decoder_input_ids": decoder_input_ids,
"decoder_attention_mask": decoder_attention_mask,
"encoder_hidden_states": encoder_hidden_states,
}
@slow
def test_flaxwav2vec2bert_pt_flax_equivalence(self):
pt_model = SpeechEncoderDecoderModel.from_pretrained("speech-seq2seq/wav2vec2-2-bert-large")
fx_model = FlaxSpeechEncoderDecoderModel.from_pretrained("speech-seq2seq/wav2vec2-2-bert-large", from_pt=True)
pt_model.to(torch_device)
pt_model.eval()
# prepare inputs
batch_size = 13
input_values = floats_tensor([batch_size, 512], fx_model.config.encoder.vocab_size)
attention_mask = random_attention_mask([batch_size, 512])
decoder_input_ids = ids_tensor([batch_size, 4], fx_model.config.decoder.vocab_size)
decoder_attention_mask = random_attention_mask([batch_size, 4])
inputs_dict = {
"inputs": input_values,
"attention_mask": attention_mask,
"decoder_input_ids": decoder_input_ids,
"decoder_attention_mask": decoder_attention_mask,
}
flax_inputs = inputs_dict
pt_inputs = {k: torch.tensor(v.tolist()) for k, v in flax_inputs.items()}
with torch.no_grad():
pt_outputs = pt_model(**pt_inputs)
pt_logits = pt_outputs.logits
pt_outputs = pt_outputs.to_tuple()
fx_outputs = fx_model(**inputs_dict)
fx_logits = fx_outputs.logits
fx_outputs = fx_outputs.to_tuple()
self.assertEqual(len(fx_outputs), len(pt_outputs), "Output lengths differ between Flax and PyTorch")
self.assert_almost_equals(fx_logits, pt_logits.numpy(), 4e-2)
# PT -> Flax
with tempfile.TemporaryDirectory() as tmpdirname:
pt_model.save_pretrained(tmpdirname)
fx_model_loaded = FlaxSpeechEncoderDecoderModel.from_pretrained(tmpdirname, from_pt=True)
fx_outputs_loaded = fx_model_loaded(**inputs_dict)
fx_logits_loaded = fx_outputs_loaded.logits
fx_outputs_loaded = fx_outputs_loaded.to_tuple()
self.assertEqual(len(fx_outputs_loaded), len(pt_outputs), "Output lengths differ between Flax and PyTorch")
self.assert_almost_equals(fx_logits_loaded, pt_logits.numpy(), 4e-2)
# Flax -> PT
with tempfile.TemporaryDirectory() as tmpdirname:
fx_model.save_pretrained(tmpdirname)
pt_model_loaded = SpeechEncoderDecoderModel.from_pretrained(tmpdirname, from_flax=True)
pt_model_loaded.to(torch_device)
pt_model_loaded.eval()
with torch.no_grad():
pt_outputs_loaded = pt_model_loaded(**pt_inputs)
pt_logits_loaded = pt_outputs_loaded.logits
pt_outputs_loaded = pt_outputs_loaded.to_tuple()
self.assertEqual(len(fx_outputs), len(pt_outputs_loaded), "Output lengths differ between Flax and PyTorch")
self.assert_almost_equals(fx_logits, pt_logits_loaded.numpy(), 4e-2)
|
securityheaders/models/xframeoptions/__init__.py
|
th3cyb3rc0p/securityheaders
| 151 |
137375
|
from .xframeoptionsdirective import XFrameOptionsDirective
from .xframeoptions import XFrameOptions
__all__ = ['XFrameOptionsDirective','XFrameOptions']
|
AppServer/lib/django-1.4/tests/modeltests/validation/test_error_messages.py
|
loftwah/appscale
| 790 |
137377
|
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.unittest import TestCase
class ValidationMessagesTest(TestCase):
def test_autofield_field_raises_error_message(self):
f = models.AutoField(primary_key=True)
self.assertRaises(ValidationError, f.clean, 'foo', None)
try:
f.clean('foo', None)
except ValidationError, e:
self.assertEqual(e.messages, [u"'foo' value must be an integer."])
# primary_key must be True. Refs #12467.
self.assertRaises(AssertionError, models.AutoField, 'primary_key', False)
try:
models.AutoField(primary_key=False)
except AssertionError, e:
self.assertEqual(str(e), "AutoFields must have primary_key=True.")
def test_integer_field_raises_error_message(self):
f = models.IntegerField()
self.assertRaises(ValidationError, f.clean, 'foo', None)
try:
f.clean('foo', None)
except ValidationError, e:
self.assertEqual(e.messages, [u"'foo' value must be an integer."])
def test_boolean_field_raises_error_message(self):
f = models.BooleanField()
self.assertRaises(ValidationError, f.clean, 'foo', None)
try:
f.clean('foo', None)
except ValidationError, e:
self.assertEqual(e.messages,
[u"'foo' value must be either True or False."])
def test_float_field_raises_error_message(self):
f = models.FloatField()
self.assertRaises(ValidationError, f.clean, 'foo', None)
try:
f.clean('foo', None)
except ValidationError, e:
self.assertEqual(e.messages, [u"'foo' value must be a float."])
def test_decimal_field_raises_error_message(self):
f = models.DecimalField()
self.assertRaises(ValidationError, f.clean, 'foo', None)
try:
f.clean('foo', None)
except ValidationError, e:
self.assertEqual(e.messages,
[u"'foo' value must be a decimal number."])
def test_null_boolean_field_raises_error_message(self):
f = models.NullBooleanField()
self.assertRaises(ValidationError, f.clean, 'foo', None)
try:
f.clean('foo', None)
except ValidationError, e:
self.assertEqual(e.messages,
[u"'foo' value must be either None, True or False."])
def test_date_field_raises_error_message(self):
f = models.DateField()
self.assertRaises(ValidationError, f.clean, 'foo', None)
try:
f.clean('foo', None)
except ValidationError, e:
self.assertEqual(e.messages, [
u"'foo' value has an invalid date format. "
u"It must be in YYYY-MM-DD format."])
self.assertRaises(ValidationError, f.clean, 'aaaa-10-10', None)
try:
f.clean('aaaa-10-10', None)
except ValidationError, e:
self.assertEqual(e.messages, [
u"'aaaa-10-10' value has an invalid date format. "
u"It must be in YYYY-MM-DD format."])
self.assertRaises(ValidationError, f.clean, '2011-13-10', None)
try:
f.clean('2011-13-10', None)
except ValidationError, e:
self.assertEqual(e.messages, [
u"'2011-13-10' value has the correct format (YYYY-MM-DD) "
u"but it is an invalid date."])
self.assertRaises(ValidationError, f.clean, '2011-10-32', None)
try:
f.clean('2011-10-32', None)
except ValidationError, e:
self.assertEqual(e.messages, [
u"'2011-10-32' value has the correct format (YYYY-MM-DD) "
u"but it is an invalid date."])
def test_datetime_field_raises_error_message(self):
f = models.DateTimeField()
# Wrong format
self.assertRaises(ValidationError, f.clean, 'foo', None)
try:
f.clean('foo', None)
except ValidationError, e:
self.assertEqual(e.messages, [
u"'foo' value has an invalid format. It must be "
u"in YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] format."])
# Correct format but invalid date
self.assertRaises(ValidationError, f.clean, '2011-10-32', None)
try:
f.clean('2011-10-32', None)
except ValidationError, e:
self.assertEqual(e.messages, [
u"'2011-10-32' value has the correct format "
u"(YYYY-MM-DD) but it is an invalid date."])
# Correct format but invalid date/time
self.assertRaises(ValidationError, f.clean, '2011-10-32 10:10', None)
try:
f.clean('2011-10-32 10:10', None)
except ValidationError, e:
self.assertEqual(e.messages, [
u"'2011-10-32 10:10' value has the correct format "
u"(YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ]) "
u"but it is an invalid date/time."])
def test_time_field_raises_error_message(self):
f = models.TimeField()
# Wrong format
self.assertRaises(ValidationError, f.clean, 'foo', None)
try:
f.clean('foo', None)
except ValidationError, e:
self.assertEqual(e.messages, [
u"'foo' value has an invalid format. It must be in "
u"HH:MM[:ss[.uuuuuu]] format."])
# Correct format but invalid time
self.assertRaises(ValidationError, f.clean, '25:50', None)
try:
f.clean('25:50', None)
except ValidationError, e:
self.assertEqual(e.messages, [
u"'25:50' value has the correct format "
u"(HH:MM[:ss[.uuuuuu]]) but it is an invalid time."])
|
tests/emukit/quadrature/test_warpings.py
|
EmuKit/Emukit
| 272 |
137388
|
from collections import namedtuple
import numpy as np
import pytest
from numpy.testing import assert_allclose
from pytest_lazyfixture import lazy_fixture
from emukit.quadrature.methods.warpings import IdentityWarping, SquareRootWarping
def create_fixture_parameters():
return [pytest.param(lazy_fixture(warping.name), id=warping.name) for warping in warpings]
@pytest.fixture
def identity_warping():
return IdentityWarping()
@pytest.fixture
def squarerroot_warping():
offset = 1.0
return SquareRootWarping(offset=offset)
@pytest.fixture
def inverted_squarerroot_warping():
offset = 1.0
return SquareRootWarping(offset=offset, is_inverted=True)
warpings_tuple = namedtuple("WarpingTest", ["name"])
warpings = [
warpings_tuple("identity_warping"),
warpings_tuple("squarerroot_warping"),
warpings_tuple("inverted_squarerroot_warping"),
]
RTOL = 1e-8
ATOL = 1e-6
@pytest.mark.parametrize("warping", create_fixture_parameters())
def test_warping_shapes(warping):
Y = np.ones([5, 1])
assert warping.transform(Y).shape == Y.shape
assert warping.inverse_transform(Y).shape == Y.shape
@pytest.mark.parametrize("warping", create_fixture_parameters())
def test_warping_values(warping):
np.random.seed(42)
Y = np.random.rand(5, 1)
assert_allclose(warping.inverse_transform(warping.transform(Y)), Y, rtol=RTOL, atol=ATOL)
def test_squarerroot_warping_update_parameters(squarerroot_warping, inverted_squarerroot_warping):
new_offset = 10.0
squarerroot_warping.update_parameters(offset=new_offset)
assert squarerroot_warping.offset == new_offset
inverted_squarerroot_warping.update_parameters(offset=new_offset)
assert inverted_squarerroot_warping.offset == new_offset
def test_squarerroot_warping_inverted_flag(squarerroot_warping, inverted_squarerroot_warping):
assert not squarerroot_warping.is_inverted
assert inverted_squarerroot_warping.is_inverted
|
tests/models/archival_test.py
|
MSLars/allennlp
| 11,433 |
137411
|
import copy
import os
import tempfile
import tarfile
import pytest
import torch
from allennlp.version import _MAJOR, _MINOR
from allennlp.commands.train import train_model
from allennlp.common import Params
from allennlp.common.meta import Meta
from allennlp.common.checks import ConfigurationError
from allennlp.common.testing import AllenNlpTestCase
from allennlp.data.dataset_readers import DatasetReader
from allennlp.models.archival import (
archive_model,
load_archive,
CONFIG_NAME,
_check_version_compatibility,
)
def assert_models_equal(model, model2):
# check that model weights are the same
keys = set(model.state_dict().keys())
keys2 = set(model2.state_dict().keys())
assert keys == keys2
for key in keys:
assert torch.equal(model.state_dict()[key], model2.state_dict()[key])
# check that vocabularies are the same
vocab = model.vocab
vocab2 = model2.vocab
assert vocab._token_to_index == vocab2._token_to_index
assert vocab._index_to_token == vocab2._index_to_token
def _test_check_version_compatibility():
meta = Meta(version=f"{_MAJOR}.{int(_MINOR) + 1}.0")
with pytest.warns(UserWarning, match="trained on a newer version"):
_check_version_compatibility("model.tar.gz", meta)
meta = Meta(version="1.2.0")
with pytest.warns(UserWarning, match="trained on version"):
_check_version_compatibility("model.tar.gz", meta)
class ArchivalTest(AllenNlpTestCase):
def setup_method(self):
super().setup_method()
self.params = Params(
{
"model": {
"type": "simple_tagger",
"text_field_embedder": {
"token_embedders": {"tokens": {"type": "embedding", "embedding_dim": 5}}
},
"encoder": {"type": "lstm", "input_size": 5, "hidden_size": 7, "num_layers": 2},
},
"dataset_reader": {"type": "sequence_tagging"},
"train_data_path": str(self.FIXTURES_ROOT / "data" / "sequence_tagging.tsv"),
"validation_data_path": str(self.FIXTURES_ROOT / "data" / "sequence_tagging.tsv"),
"data_loader": {"batch_size": 2},
"trainer": {"num_epochs": 2, "optimizer": "adam", "cuda_device": -1},
}
)
def test_archiving(self):
# copy params, since they'll get consumed during training
params_copy = self.params.duplicate()
params_dict_copy = copy.deepcopy(self.params.as_dict())
# `train_model` should create an archive
serialization_dir = self.TEST_DIR / "archive_test"
model = train_model(self.params, serialization_dir=serialization_dir)
archive_path = serialization_dir / "model.tar.gz"
# load from the archive
archive = load_archive(archive_path)
model2 = archive.model
assert_models_equal(model, model2)
assert isinstance(
archive.dataset_reader,
type(DatasetReader.from_params(params_copy["dataset_reader"].duplicate())),
)
assert isinstance(
archive.validation_dataset_reader,
type(DatasetReader.from_params(params_copy["dataset_reader"].duplicate())),
) # validation_dataset_reader is not in the config, so fall back to dataset_reader
# check that params are the same
params2 = archive.config
assert params2.as_dict() == params_dict_copy
def test_archive_model_uses_archive_path(self):
serialization_dir = self.TEST_DIR / "serialization"
# Train a model
train_model(self.params, serialization_dir=serialization_dir)
# Use a new path.
archive_model(
serialization_dir=serialization_dir, archive_path=serialization_dir / "new_path.tar.gz"
)
archive = load_archive(serialization_dir / "new_path.tar.gz")
assert archive
def test_loading_serialization_directory(self):
# copy params, since they'll get consumed during training
params_dict_copy = copy.deepcopy(self.params.as_dict())
# `train_model` should create an archive
serialization_dir = self.TEST_DIR / "serialization"
model = train_model(self.params, serialization_dir=serialization_dir)
# load from the serialization directory itself
archive = load_archive(serialization_dir)
model2 = archive.model
assert_models_equal(model, model2)
# check that params are the same
params2 = archive.config
assert params2.as_dict() == params_dict_copy
def test_can_load_from_archive_model(self):
serialization_dir = self.FIXTURES_ROOT / "basic_classifier" / "from_archive_serialization"
archive_path = serialization_dir / "model.tar.gz"
model = load_archive(archive_path).model
# We want to be sure that we don't just not crash, but also be sure that we loaded the right
# weights for the model. We'll do that by making sure that we didn't just load the model
# that's in the `archive_path` of the config file, which is this one.
base_model_path = self.FIXTURES_ROOT / "basic_classifier" / "serialization" / "model.tar.gz"
base_model = load_archive(base_model_path).model
base_model_params = dict(base_model.named_parameters())
for name, parameters in model.named_parameters():
if parameters.size() == base_model_params[name].size():
assert not (parameters == base_model_params[name]).all()
else:
# In this case, the parameters are definitely different, no need for the above
# check.
pass
def test_include_in_archive(self):
self.params["include_in_archive"] = ["metrics_epoch_*.json"]
serialization_dir = self.TEST_DIR / "serialization"
# Train a model
train_model(self.params, serialization_dir=serialization_dir)
# Assert that the additional targets were archived
with tempfile.TemporaryDirectory() as tempdir:
with tarfile.open(serialization_dir / "model.tar.gz", "r:gz") as archive:
archive.extractall(tempdir)
assert os.path.isfile(os.path.join(tempdir, "metrics_epoch_0.json"))
assert os.path.isfile(os.path.join(tempdir, "metrics_epoch_1.json"))
assert not os.path.isfile(os.path.join(tempdir, "metrics.json"))
def test_invalid_include_in_archive(self):
self.params["include_in_archive"] = [CONFIG_NAME]
serialization_dir = self.TEST_DIR / "serialization"
with pytest.raises(ConfigurationError) as exc:
train_model(self.params, serialization_dir=serialization_dir)
assert "are saved names and cannot be used" in str(exc.value)
|
meshpy/meshpy/image_converter.py
|
huikul/Dexterous_grasp
| 193 |
137429
|
<filename>meshpy/meshpy/image_converter.py
"""
Classes to convert binary images to extruded meshes
Author: <NAME>
"""
import IPython
import logging
import numpy as np
import os
from PIL import Image, ImageDraw
import sklearn.decomposition
import sys
import matplotlib.pyplot as plt
import skimage.morphology as morph
from skimage.transform import resize
from autolab_core import RigidTransform
from meshpy import Mesh3D
from autolab_core import BinaryImage
class ImageToMeshConverter:
""" Namespace class for converting binary images to SDFs and meshes. """
@staticmethod
def binary_image_to_mesh(binary_im, extrusion=1000, scale_factor=1.0):
"""
Converts a binary image to a 3D extruded polygonal mesh
Parameters
----------
binary_im : :obj:`perception.BinaryImage`
binary image for silhouette
extrusion : float
amount to extrude the polygon in meters
scale_factor : float
amount to rescale the final mesh (from units of pixels to meters)
Returns
-------
:obj:`Mesh3D`
the resulting mesh
Raises
------
:obj:`ValueError`
if the triangulation was not successful due to topology or other factors
"""
# check valid input
if not isinstance(binary_im, BinaryImage):
raise ValueError('Must provide perception.BinaryImage as input')
# get occupied indices from binary image
binary_data = binary_im.data
occ_coords = binary_im.nonzero_pixels()
# create mesh faces and concatenate
front_face_depth = extrusion / 2.0
back_face_depth = -extrusion / 2.0
front_verts, front_tris, front_ind_map = ImageToMeshConverter.create_mesh_face(occ_coords, front_face_depth,
binary_data.shape, cw=True)
back_verts, back_tris, back_ind_map = ImageToMeshConverter.create_mesh_face(occ_coords, back_face_depth,
binary_data.shape, cw=False)
verts, tris = ImageToMeshConverter.join_vert_tri_lists(front_verts, front_tris, back_verts, back_tris)
num_verts = len(front_verts)
back_ind_map = back_ind_map + num_verts
# connect boundaries
boundary_im = binary_im.boundary_map()
ImageToMeshConverter.add_boundary_tris(boundary_im, verts, tris, front_ind_map, back_ind_map)
# convert to mesh and clean
m = Mesh3D(verts, tris)
m.remove_unreferenced_vertices()
T_im_world = RigidTransform(rotation=np.array([[0, 1, 0],
[-1, 0, 0],
[0, 0, 1]]),
from_frame='obj',
to_frame='obj')
m = m.transform(T_im_world)
m.rescale_dimension(scale_factor, Mesh3D.ScalingTypeRelative)
return m
@staticmethod
def join_vert_tri_lists(verts1, tris1, verts2, tris2):
"""
Concatenates two lists of vertices and triangles.
Parameters
----------
verts1 : :obj:`list` of 3-:obj:`list` of float
first list of vertices
tris1 : :obj:`list` of 3-:obj`list` of int
first list of triangles
verts2 : :obj:`list` of 3-:obj:`list` of float
second list of vertices
tris2 : :obj:`list` of 3-:obj`list` of int
second list of triangles
Returns
-------
verts : :obj:`list` of 3-:obj:`list` of float
joined list of vertices
tris : :obj:`list` of 3-:obj`list` of int
joined list of triangles
"""
num_verts1 = len(verts1)
# simple append for verts
verts = list(verts1)
verts.extend(verts2)
# offset and append triangle (vertex indices)
tris = list(tris1)
tris2_offset = [[num_verts1 + t[0], num_verts1 + t[1], num_verts1 + t[2]] for t in tris2]
tris.extend(tris2_offset)
return verts, tris
@staticmethod
def add_boundary_tris(boundary_im, verts, tris, front_ind_map, back_ind_map):
"""
Connects front and back faces along the boundary, modifying tris IN PLACE
NOTE: Right now this only works for points topologically equivalent to a sphere, eg. no holes!
This can be extended by parsing back over untriangulated boundary points.
Parameters
----------
boundary_im : :obj:`perception.BinaryImage`
binary image of the boundary
verts : :obj:`list` of 3-:obj:`list` of float
list of vertices
tris : :obj:`list` of 3-:obj`list` of int
list of triangles
front_ind_map : :obj:`numpy.ndarray`
maps vertex coords to the indices of their front face vertex in list
back_ind_map : :obj:`numpy.ndarray`
maps vertex coords to the indices of their back face vertex in list
Raises
------
:obj:`ValueError`
triangulation failed
"""
# TODO: fix multiple connected comps
# setup variables for boundary coords
upper_bound = np.iinfo(np.uint8).max
remaining_boundary = boundary_im.data.copy()
boundary_ind = np.where(remaining_boundary == upper_bound)
boundary_coords = list(zip(boundary_ind[0], boundary_ind[1]))
if len(boundary_coords) == 0:
raise ValueError('No boundary coordinates')
# setup inital vars
tris_arr = np.array(tris)
visited_map = np.zeros(boundary_im.shape)
another_visit_avail = True
# make sure to start with a reffed tri
visited_marker = 128
finished = False
it = 0
i = 0
coord_visits = []
while not finished:
finished = True
logging.info('Boundary triangulation iter %d' %(it))
reffed = False
while not reffed and i < len(boundary_coords):
cur_coord = boundary_coords[i]
if visited_map[cur_coord[0], cur_coord[1]] == 0:
visited_map[cur_coord[0], cur_coord[1]] = 1
front_ind = front_ind_map[cur_coord[0], cur_coord[1]]
back_ind = back_ind_map[cur_coord[0], cur_coord[1]]
ref_tris = np.where(tris_arr == front_ind)
ref_tris = ref_tris[0]
reffed = (ref_tris.shape[0] > 0)
remaining_boundary[cur_coord[0], cur_coord[1]] = visited_marker
i = i+1
coord_visits.extend([cur_coord])
cur_dir_angle = np.pi / 2 # start straight down
# loop around boundary and add faces connecting front and back
while another_visit_avail:
front_ind = front_ind_map[cur_coord[0], cur_coord[1]]
back_ind = back_ind_map[cur_coord[0], cur_coord[1]]
ref_tris = np.where(tris_arr == front_ind)
ref_tris = ref_tris[0]
num_reffing_tris = ref_tris.shape[0]
# get all possible cadidates from neighboring tris
another_visit_avail = False
candidate_next_coords = []
for i in range(num_reffing_tris):
reffing_tri = tris[ref_tris[i]]
for j in range(3):
v = verts[reffing_tri[j]]
if boundary_im[v[0], v[1]] == upper_bound and visited_map[v[0], v[1]] == 0:
candidate_next_coords.append([v[0], v[1]])
another_visit_avail = True
# get the "rightmost" next point
num_candidates = len(candidate_next_coords)
if num_candidates > 0:
# calculate candidate directions
directions = []
next_dirs = np.array(candidate_next_coords) - np.array(cur_coord)
dir_norms = np.linalg.norm(next_dirs, axis = 1)
next_dirs = next_dirs / np.tile(dir_norms, [2, 1]).T
# calculate angles relative to positive x axis
new_angles = np.arctan(next_dirs[:,0] / next_dirs[:,1])
negative_ind = np.where(next_dirs[:,1] < 0)
negative_ind = negative_ind[0]
new_angles[negative_ind] = new_angles[negative_ind] + np.pi
# compute difference in angles
angle_diff = new_angles - cur_dir_angle
correction_ind = np.where(angle_diff <= -np.pi)
correction_ind = correction_ind[0]
angle_diff[correction_ind] = angle_diff[correction_ind] + 2 * np.pi
# choose the next coordinate with the maximum angle diff (rightmost)
next_ind = np.where(angle_diff == np.max(angle_diff))
next_ind = next_ind[0]
cur_coord = candidate_next_coords[next_ind[0]]
cur_dir_angle = new_angles[next_ind[0]]
# add triangles (only add if there is a new candidate)
next_front_ind = front_ind_map[cur_coord[0], cur_coord[1]]
next_back_ind = back_ind_map[cur_coord[0], cur_coord[1]]
tris.append([int(front_ind), int(back_ind), int(next_front_ind)])
tris.append([int(back_ind), int(next_back_ind), int(next_front_ind)])
# mark coordinate as visited
visited_map[cur_coord[0], cur_coord[1]] = 1
coord_visits.append(cur_coord)
remaining_boundary[cur_coord[0], cur_coord[1]] = visited_marker
# add edge back to first coord
cur_coord = coord_visits[0]
next_front_ind = front_ind_map[cur_coord[0], cur_coord[1]]
next_back_ind = back_ind_map[cur_coord[0], cur_coord[1]]
tris.append([int(front_ind), int(back_ind), int(next_front_ind)])
tris.append([int(back_ind), int(next_back_ind), int(next_front_ind)])
# check success
finished = (np.sum(remaining_boundary == upper_bound) == 0) or (i == len(boundary_coords))
it += 1
@staticmethod
def create_mesh_face(occ_coords, depth, index_shape, cw=True):
"""
Creates a 2D mesh face of vertices and triangles from the given coordinates at a specified depth.
Parameters
----------
occ_coords : :obj:`list` of 3-:obj:`tuple
the coordinates of vertices
depth : float
the depth at which to place the face
index_shape : 2-:obj:`tuple`
the shape of the numpy grid on which the vertices lie
cw : bool
clockwise or counterclockwise orientation
Returns
-------
verts : :obj:`list` of 3-:obj:`list` of float
list of vertices
tris : :obj:`list` of 3-:obj`list` of int
list of triangles
"""
# get mesh vertices
verts = []
tris = []
ind_map = -1 * np.ones(index_shape) # map vertices to indices in vert list
for coord in occ_coords:
verts.append([coord[0], coord[1], depth])
ind_map[coord[0], coord[1]] = len(verts) - 1
# get mesh triangles
# rule: vertex adds triangles that it is the 90 degree corner of
for coord in occ_coords:
coord_right = [coord[0] + 1, coord[1]]
coord_left = [coord[0] - 1, coord[1]]
coord_below = [coord[0], coord[1] + 1]
coord_above = [coord[0], coord[1] - 1]
cur_ind = ind_map[coord[0], coord[1]]
# add tri above left
if coord_left[0] >= 0 and coord_above[1] >= 0:
left_ind = ind_map[coord_left[0], coord_left[1]]
above_ind = ind_map[coord_above[0], coord_above[1]]
# check if valid vertices and add
if left_ind > -1 and above_ind > -1:
if cw:
tris.append([int(cur_ind), int(left_ind), int(above_ind)])
else:
tris.append([int(cur_ind), int(above_ind), int(left_ind)])
elif above_ind > -1:
# try to patch area
coord_left_above = [coord[0] - 1, coord[1] - 1]
if coord_left_above[0] > 0 and coord_left_above[1] > 0:
left_above_ind = ind_map[coord_left_above[0], coord_left_above[1]]
# check validity
if left_above_ind > -1:
if cw:
tris.append([int(cur_ind), int(left_above_ind), int(above_ind)])
else:
tris.append([int(cur_ind), int(above_ind), int(left_above_ind)])
# add tri below right
if coord_right[0] < index_shape[1] and coord_below[1] < index_shape[0]:
right_ind = ind_map[coord_right[0], coord_right[1]]
below_ind = ind_map[coord_below[0], coord_below[1]]
# check if valid vertices and add
if right_ind > -1 and below_ind > -1:
if cw:
tris.append([int(cur_ind), int(right_ind), int(below_ind)])
else:
tris.append([int(cur_ind), int(below_ind), int(right_ind)])
elif below_ind > -1:
# try to patch area
coord_right_below = [coord[0] + 1, coord[1] + 1]
if coord_right_below[0] < index_shape[0] and coord_right_below[1] < index_shape[1]:
right_below_ind = ind_map[coord_right_below[0], coord_right_below[1]]
# check validity
if right_below_ind > -1:
if cw:
tris.append([int(cur_ind), int(right_below_ind), int(below_ind)])
else:
tris.append([int(cur_ind), int(below_ind), int(right_below_ind)])
return verts, tris, ind_map
|
jionlp/algorithm/summary/__init__.py
|
ji3g4m6zo6/JioNLP
| 1,063 |
137437
|
<reponame>ji3g4m6zo6/JioNLP
# -*- coding=utf-8 -*-
# library: jionlp
# author: dongrixinyu
# license: Apache License 2.0
# Email: <EMAIL>
# github: https://github.com/dongrixinyu/JioNLP
# description: Preprocessing tool for Chinese NLP
from .extract_summary import ChineseSummaryExtractor
extract_summary = ChineseSummaryExtractor()
|
examples/contrib/xkcd.py
|
klorel/or-tools
| 279 |
137447
|
# Copyright 2010 <NAME> <EMAIL>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
xkcd problem (Knapsack) in Google CP Solver.
http://xkcd.com/287/
Some amount (or none) of each dish should be ordered to give a total
of exact 15.05
Compare with the following models:
* Comet: http://www.hakank.org/comet/xkcd.co
* ECLiPSE: http://www.hakank.org/eclipse/xkcd.ecl
* Gecode: http://www.hakank.org/gecode/xkcd.cpp
* Gecode/R: http://www.hakank.org/gecode_r/xkcd.rb
* MiniZinc: http://www.hakank.org/minizinc/xkcd.mzn
* Tailor: http://www.hakank.org/minizinc/xkcd.mzn
* SICtus: http://www.hakank.org/sicstus/xkcd.pl
* Zinc: http://www.hakank.org/minizinc/xkcd.zinc
This model was created by <NAME> (<EMAIL>)
Also see my other Google CP Solver models:
http://www.hakank.org/google_cp_solver/
"""
from __future__ import print_function
from ortools.constraint_solver import pywrapcp
def main():
# Create the solver.
solver = pywrapcp.Solver("xkcd knapsack")
#
# data
#
num_prices = 6
# for price and total: multiplied by 100 to be able to use integers
price = [215, 275, 335, 355, 420, 580]
total = 1505
products = [
"mixed fruit", "french fries", "side salad", "host wings",
"mozzarella sticks", "samples place"
]
# declare variables
# how many items of each dish
x = [solver.IntVar(0, 10, "x%i" % i) for i in range(num_prices)]
z = solver.IntVar(0, 1505, "z")
#
# constraints
#
solver.Add(z == solver.Sum([x[i] * price[i] for i in range(num_prices)]))
solver.Add(z == total)
#
# solution and search
#
solution = solver.Assignment()
solution.Add([x[i] for i in range(num_prices)])
solution.Add(z)
collector = solver.AllSolutionCollector(solution)
# collector = solver.FirstSolutionCollector(solution)
# search_log = solver.SearchLog(100, x[0])
solver.Solve(
solver.Phase([x[i] for i in range(num_prices)], solver.INT_VAR_SIMPLE,
solver.ASSIGN_MIN_VALUE), [collector])
num_solutions = collector.SolutionCount()
print("num_solutions: ", num_solutions)
if num_solutions > 0:
for s in range(num_solutions):
print("z:", collector.Value(s, z) / 100.0)
xval = [collector.Value(s, x[i]) for i in range(num_prices)]
print("x:", xval)
for i in range(num_prices):
if xval[i] > 0:
print(xval[i], "of", products[i], ":", price[i] / 100.0)
print()
print()
print("num_solutions:", num_solutions)
print("failures:", solver.Failures())
print("branches:", solver.Branches())
print("WallTime:", solver.WallTime())
else:
print("No solutions found")
if __name__ == "__main__":
main()
|
config_system/generator/generate.py
|
ambrop72/aprinter
| 133 |
137453
|
<filename>config_system/generator/generate.py
# Copyright (c) 2015 <NAME>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import print_function
import sys
import os
sys.path.insert(1, os.path.join(os.path.dirname(__file__), '../utils'))
import argparse
import json
import re
import string
import config_reader
import selection
import function_defined
import assign_func
import rich_template
import file_utils
IDENTIFIER_REGEX = '\\A[A-Za-z][A-Za-z0-9_]{0,127}\\Z'
class GenState(object):
def __init__ (self):
self._subst = {}
self._config_options = []
self._constants = []
self._platform_includes = []
self._aprinter_includes = set()
self._objects = {}
self._singleton_objects = {}
self._finalize_actions = []
self._global_code = []
self._init_calls = []
self._final_init_calls = []
self._global_resources = []
self._modules_exprs = []
self._extra_sources = []
self._need_millisecond_clock = False
self._have_hw_millisecond_clock = False
self._defines = []
self._include_dirs = []
def add_subst (self, key, val, indent=-1):
self._subst[key] = {'val':val, 'indent':indent}
def add_config (self, name, dtype, value, is_constant=False, is_complex=False):
properties = []
if is_constant:
properties.append('ConfigPropertyConstant')
properties_str = 'ConfigProperties<{}>'.format(', '.join(properties))
if dtype == 'double':
config_option_str = 'APRINTER_CONFIG_OPTION_DOUBLE({}, {}, {})'.format(name, value, properties_str)
elif is_complex:
config_option_str = 'APRINTER_CONFIG_OPTION_COMPLEX({}, {}, APRINTER_WRAP_COMPLEX_VALUE({}, {}), {})'.format(name, dtype, dtype, value, properties_str)
else:
config_option_str = 'APRINTER_CONFIG_OPTION_SIMPLE({}, {}, {}, {})'.format(name, dtype, value, properties_str)
self._config_options.append(config_option_str)
return name
def add_float_config (self, name, value, **kwargs):
return self.add_config(name, 'double', format_cpp_float(value), **kwargs)
def add_bool_config (self, name, value, **kwargs):
return self.add_config(name, 'bool', 'true' if value else 'false', **kwargs)
def add_mac_addr_config (self, name, value, **kwargs):
assert len(value) == 6
val_str = '(ConfigTypeMacAddress{{{{{}}}}})'.format(', '.join('0x{:02x}'.format(x) for x in value))
return self.add_config(name, 'ConfigTypeMacAddress', val_str, is_complex=True, **kwargs)
def add_ip_addr_config (self, name, value, **kwargs):
assert len(value) == 4
val_str = '(ConfigTypeIpAddress{{{{{}}}}})'.format(', '.join('{}'.format(x) for x in value))
return self.add_config(name, 'ConfigTypeIpAddress', val_str, is_complex=True, **kwargs)
def add_float_constant (self, name, value):
self._constants.append({'type':'using', 'name':name, 'value':'AMBRO_WRAP_DOUBLE({})'.format(format_cpp_float(value))})
return name
def add_typedef (self, name, value):
self._constants.append({'type':'using', 'name':name, 'value':value})
return name
def add_int_constant (self, dtype, name, value):
if dtype == 'int':
c_type = 'int'
c_init = str(value)
else:
m = re.match('\\A(u?)int(8|16|32|64)\\Z', dtype)
assert m
u = m.group(1)
b = m.group(2)
c_type = '{}_t'.format(dtype)
c_init = '{}INT{}_C({})'.format(u.upper(), b, value)
self._constants.append({
'type': 'static {} const'.format(c_type),
'name': name,
'value': c_init,
})
return name
def add_platform_include (self, inc_file):
self._platform_includes.append(inc_file)
def add_include (self, inc_file):
self._aprinter_includes.add(inc_file)
def add_aprinter_include (self, inc_file):
self.add_include('aprinter/'+inc_file)
def register_objects (self, kind, config, key):
if kind not in self._objects:
self._objects[kind] = {}
for obj_config in config.iter_list_config(key, max_count=20):
name = obj_config.get_string('Name')
if name in self._objects[kind]:
obj_config.path().error('Duplicate {} name'.format(kind))
self._objects[kind][name] = obj_config
def get_object (self, kind, config, key):
name = config.get_string(key)
if kind not in self._objects or name not in self._objects[kind]:
config.key_path(key).error('Nonexistent {} specified'.format(kind))
return self._objects[kind][name]
def register_singleton_object (self, kind, value):
assert kind not in self._singleton_objects
self._singleton_objects[kind] = value
return value
def get_singleton_object (self, kind, allow_none=False):
have = kind in self._singleton_objects
assert allow_none or have
return self._singleton_objects[kind] if have else None
def add_global_code (self, priority, code):
self._global_code.append({'priority':priority, 'code':code})
def add_isr (self, isr):
self.add_global_code(-1, isr)
def add_init_call (self, priority, init_call):
self._init_calls.append({'priority':priority, 'init_call':init_call})
def add_final_init_call (self, priority, init_call):
self._final_init_calls.append({'priority':priority, 'init_call':init_call})
def add_finalize_action (self, action):
self._finalize_actions.append(action)
def add_global_resource (self, priority, name, expr, context_name=None, code_before=None, code_before_program=None,
extra_program_child=None, is_fast_event_root=False, use_instance=False):
code = ''
if code_before is not None:
code += '{}\n'.format(code_before)
if use_instance:
code += 'APRINTER_MAKE_INSTANCE({}, ({}))\n'.format(name, expr.build(indent=0))
else:
code += 'using {} = {};\n'.format(name, expr.build(indent=0))
self._global_resources.append({
'priority': priority,
'name': name,
'context_name':context_name,
'code': code,
'code_before_program': code_before_program,
'extra_program_child': extra_program_child,
'is_fast_event_root': is_fast_event_root,
})
def add_module (self):
index = len(self._modules_exprs)
self._modules_exprs.append(None)
return GenPrinterModule(self, index)
def add_extra_source (self, base, path):
self._extra_sources.append({'base': base, 'path': path})
def set_need_millisecond_clock (self):
self._need_millisecond_clock = True
def set_have_hw_millisecond_clock (self):
self._have_hw_millisecond_clock = True
def add_define (self, name, value=''):
self._defines.append({'name': name, 'value': str(value)})
def add_include_dir (self, base, path):
self._include_dirs.append({'base': base, 'path': path})
def finalize (self):
for action in reversed(self._finalize_actions):
action()
for so in self._singleton_objects.itervalues():
if hasattr(so, 'finalize'):
so.finalize()
global_resources = sorted(self._global_resources, key=lambda x: x['priority'])
program_children = []
program_children.extend(gr['name'] for gr in global_resources)
program_children.extend(gr['extra_program_child'] for gr in global_resources if gr['extra_program_child'] is not None)
self.add_subst('GENERATED_WARNING', 'WARNING: This file was automatically generated!')
self.add_subst('EXTRA_CONSTANTS', ''.join('{} {} = {};\n'.format(c['type'], c['name'], c['value']) for c in self._constants))
self.add_subst('ConfigOptions', ''.join('{}\n'.format(c) for c in self._config_options))
self.add_subst('PLATFORM_INCLUDES', ''.join('#include <{}>\n'.format(inc) for inc in self._platform_includes))
self.add_subst('AprinterIncludes', ''.join('#include <{}>\n'.format(inc) for inc in sorted(self._aprinter_includes)))
self.add_subst('GlobalCode', ''.join('{}\n'.format(gc['code']) for gc in sorted(self._global_code, key=lambda x: x['priority'])))
self.add_subst('InitCalls', ''.join(' {}\n'.format(ic['init_call']) for ic in sorted(self._init_calls, key=lambda x: x['priority'])))
self.add_subst('GlobalResourceExprs', ''.join(gr['code'] for gr in global_resources))
self.add_subst('GlobalResourceContextAliases', ''.join(' using {} = ::{};\n'.format(gr['context_name'], gr['name']) for gr in global_resources if gr['context_name'] is not None))
self.add_subst('GlobalResourceProgramChildren', ',\n'.join(' {}'.format(pc_name) for pc_name in program_children))
self.add_subst('GlobalResourceInit', ''.join(' {}::init(c);\n'.format(gr['name']) for gr in global_resources))
self.add_subst('FinalInitCalls', ''.join(' {}\n'.format(ic['init_call']) for ic in sorted(self._final_init_calls, key=lambda x: x['priority'])))
self.add_subst('CodeBeforeProgram', ''.join('{}\n'.format(gr['code_before_program']) for gr in global_resources if gr['code_before_program'] is not None))
def get_subst (self):
res = {}
for (key, subst) in self._subst.iteritems():
val = subst['val']
indent = subst['indent']
res[key] = val if type(val) is str else val.build(indent)
return res
class GenPrinterModule(object):
def __init__ (self, gen, index):
self._gen = gen
self._index = index
@property
def index (self):
return self._index
def set_expr (self, expr):
self._gen._modules_exprs[self._index] = expr
class GenConfigReader(config_reader.ConfigReader):
def get_int_constant (self, key):
return str(self.get_int(key))
def get_bool_constant (self, key):
return 'true' if self.get_bool(key) else 'false'
def get_float_constant (self, key):
return format_cpp_float(self.get_float(key))
def get_identifier (self, key, validate=None):
val = self.get_string(key)
if not re.match(IDENTIFIER_REGEX, val):
self.key_path(key).error('Incorrect format.')
if validate is not None and not validate(val):
self.key_path(key).error('Custom validation failed.')
return val
def get_id_char (self, key):
val = self.get_string(key)
if val not in string.ascii_uppercase:
self.key_path(key).error('Incorrect format.')
return val
def get_mac_addr (self, key):
val = self.get_string(key)
br = '([0-9A-Fa-f]{1,2})'
mac_re = '\\A{}:{}:{}:{}:{}:{}\\Z'.format(br, br, br, br, br, br)
m = re.match(mac_re, val)
if not m:
self.key_path(key).error('Incorrect format.')
return [int(m.group(i), 16) for i in range(1, 7)]
def get_ip_addr (self, key):
val = self.get_string(key)
br = '([0-9]{1,3})'
ip_re = '\\A{}\\.{}\\.{}\\.{}\\Z'.format(br, br, br, br)
m = re.match(ip_re, val)
if not m:
self.key_path(key).error('Incorrect format A.')
ints = [int(m.group(i), 10) for i in range(1, 5)]
if any(d > 255 for d in ints):
self.key_path(key).error('Incorrect format B.')
return ints
def do_selection (self, key, sel_def):
for config in self.enter_config(key):
try:
result = sel_def.run(config.get_string('_compoundName'), config)
except selection.SelectionError:
config.path().error('Unknown choice.')
return result
def do_list (self, key, elem_cb, min_count=-1, max_count=-1):
elems = []
for (i, config) in enumerate(self.iter_list_config(key, min_count=min_count, max_count=max_count)):
elems.append(elem_cb(config, i))
return TemplateList(elems)
def do_keyed_list (self, count, elems_key, elem_key_prefix, elem_cb):
elems = []
elems_config = self.get_config(elems_key)
for i in range(count):
elem_config = elems_config.get_config('{}{}'.format(elem_key_prefix, i))
elems.append(elem_cb(elem_config, i))
return TemplateList(elems)
def do_enum (self, key, mapping):
val = self.get_string(key)
if val not in mapping:
self.key_path(key).error('Incorrect choice.')
return mapping[val]
class TemplateExpr(object):
def __init__ (self, name, args):
self._name = name
self._args = args
def append_arg(self, arg):
self._args.append(arg)
def build (self, indent):
if indent == -1 or len(self._args) == 0:
initiator = ''
separator = ', '
terminator = ''
child_indent = -1
else:
initiator = '\n' + ' ' * (indent + 1)
separator = ',' + initiator
terminator = '\n' + ' ' * indent
child_indent = indent + 1
return '{}<{}{}{}>'.format(self._name, initiator, separator.join(_build_template_arg(arg, child_indent) for arg in self._args), terminator)
def _build_template_arg (arg, indent):
if type(arg) is str or type(arg) is int or type(arg) is long:
return str(arg)
if type(arg) is bool:
return 'true' if arg else 'false'
return arg.build(indent)
class TemplateList(TemplateExpr):
def __init__ (self, args):
TemplateExpr.__init__(self, 'MakeTypeList', args)
class TemplateChar(object):
def __init__ (self, ch):
self._ch = ch
def build (self, indent):
return '\'{}\''.format(self._ch)
class TemplateLiteral(object):
def __init__ (self, str):
self._str = str
def build (self, indent):
return self._str
def format_cpp_float(value):
return '{:.17E}'.format(value).replace('INF', 'INFINITY')
def setup_event_loop(gen):
impl_obj = gen.get_singleton_object('event_loop_impl', allow_none=True)
if impl_obj is None:
impl = 'BusyEventLoop'
impl_extra_args = []
else:
impl = impl_obj['name']
impl_extra_args = impl_obj['extra_args']
gen.add_aprinter_include('system/{}.h'.format(impl))
code_before_expr = 'struct MyLoopExtraDelay;\n'
expr = TemplateExpr('{}Arg'.format(impl), ['Context', 'Program', 'MyLoopExtraDelay'] + impl_extra_args)
fast_events = 'ObjCollect<MakeTypeList<{}>, MemberType_EventLoopFastEvents>'.format(', '.join(gr['name'] for gr in gen._global_resources if gr['is_fast_event_root']))
code_before_program = 'APRINTER_DEFINE_MEMBER_TYPE(MemberType_EventLoopFastEvents, EventLoopFastEvents)\n'
code_before_program += 'APRINTER_MAKE_INSTANCE(MyLoopExtra, ({}ExtraArg<Program, MyLoop, {}>))\n'.format(impl, fast_events)
code_before_program += 'struct MyLoopExtraDelay : public WrapType<MyLoopExtra> {};'
gen.add_global_resource(0, 'MyLoop', expr, use_instance=True, context_name='EventLoop', code_before=code_before_expr, code_before_program=code_before_program, extra_program_child='MyLoopExtra')
gen.add_final_init_call(100, 'MyLoop::run(c);')
def setup_platform(gen, config, key):
platformType = [None]
platformFlags = []
platformFlagsCXX = []
linkerScript = [None]
extraLinkFlags = []
linkerSymbols = []
extraObjCopyFlags = []
platform_sel = selection.Selection()
arm_checksum_src_file = ('aprinter', 'aprinter/net/inet_chksum_arm.S')
@platform_sel.options(['At91Sam3x8e', 'At91Sam3u4e'])
def option(platform_name, platform):
if platform_name == 'At91Sam3x8e':
arch1 = 'sam3x'
arch2 = '8'
arch3 = 'e'
usb_device = 'uotghs'
elif platform_name == 'At91Sam3u4e':
arch1 = 'sam3u'
arch2 = '4'
arch3 = 'e'
usb_device = 'udphs'
else:
assert False
platformType[0] = 'arm'
platformFlags.extend(['-mcpu=cortex-m3', '-mthumb', '-Wno-psabi'])
platformFlagsCXX.extend(['-Wno-register'])
linkerScript[0] = {'base': 'asf', 'path':
'sam/utils/linker_scripts/{0:}/{0:}{1:}/gcc/flash.ld'.format(arch1, arch2)}
extraLinkFlags.extend(['-nostartfiles', '-lm'])
archDefineName = '__{}__'.format('{}{}{}'.format(arch1, arch2, arch3).upper())
gen.add_define(archDefineName, '')
stack_size = platform.get_int('StackSize')
if not 512 <= stack_size <= 32768:
platform.key_path('StackSize').error('Value out of range.')
linkerSymbols.extend([{'name': '__stack_size__', 'value': str(stack_size)}])
heap_size = platform.get_int('HeapSize')
if not 1024 <= heap_size <= 131072:
platform.key_path('HeapSize').error('Value out of range.')
gen.add_define('HEAP_SIZE', heap_size)
gen.add_define('BOARD', platform.get_int('AsfBoardNum'))
cmsis_dir = 'sam/utils/cmsis/{}'.format(arch1)
templates_dir = cmsis_dir+'/source/templates'
asf_includes = [
cmsis_dir+'/include',
templates_dir,
"",
"sam/utils",
"sam/utils/preprocessor",
"sam/utils/header_files",
"sam/boards",
"sam/drivers/pmc",
"sam/drivers/pio",
"sam/drivers/dmac",
"sam/drivers/emac",
"sam/drivers/rstc",
"common/utils",
"common/services/usb",
"common/services/usb/udc",
"common/services/clock",
"common/services/sleepmgr",
"common/services/ioport",
"common/services/usb/class/cdc",
"common/services/usb/class/cdc/device",
"common/boards",
"thirdparty/CMSIS/Include",
]
asf_sources = [
templates_dir+'/exceptions.c',
templates_dir+'/system_{}.c'.format(arch1),
templates_dir+'/gcc/startup_{}.c'.format(arch1),
'sam/drivers/pmc/pmc.c',
'sam/drivers/pmc/sleep.c',
'sam/drivers/dmac/dmac.c',
'sam/drivers/rstc/rstc.c',
'common/services/clock/{}/sysclk.c'.format(arch1),
'common/utils/interrupt/interrupt_sam_nvic.c',
]
for include in asf_includes:
gen.add_include_dir('asf', include)
for source in asf_sources:
gen.add_extra_source('asf', source)
gen.add_extra_source('aprinter', 'aprinter/platform/newlib_common.c')
gen.add_extra_source('aprinter', 'aprinter/platform/at91sam/at91sam_support.cpp')
gen.add_include_dir('aprinter', 'aprinter/platform/at91sam')
gen.register_singleton_object('asf_usb_device', usb_device)
gen.add_platform_include('aprinter/platform/at91sam/at91sam_support.h')
gen.add_init_call(-1, 'platform_init();')
gen.register_singleton_object('checksum_src_file', arm_checksum_src_file)
@platform_sel.option('Teensy3')
def option(platform):
platformType[0] = 'arm'
platformFlags.extend(['-mcpu=cortex-m4', '-mthumb', '-msoft-float'])
linkerScript[0] = {'base': 'teensyCores', 'path': 'teensy3/mk20dx256.ld'}
extraLinkFlags.extend(['-nostartfiles', '-lm'])
gen.add_define('__MK20DX256__', '')
gen.add_define('F_CPU', 96000000)
gen.add_define('USB_SERIAL', '')
gen.add_define('APRINTER_NO_SBRK', '')
teensy_includes = [
'teensy3',
]
teensy_sources = [
'teensy3/mk20dx128.c',
'teensy3/nonstd.c',
'teensy3/usb_dev.c',
'teensy3/usb_desc.c',
'teensy3/usb_mem.c',
'teensy3/usb_serial.c',
]
for include in teensy_includes:
gen.add_include_dir('teensyCores', include)
for source in teensy_sources:
gen.add_extra_source('teensyCores', source)
gen.add_extra_source('aprinter', 'aprinter/platform/teensy3/aprinter_teensy_eeprom.c')
gen.add_extra_source('aprinter', 'aprinter/platform/newlib_common.c')
gen.add_extra_source('aprinter', 'aprinter/platform/teensy3/teensy3_support.cpp')
gen.add_platform_include('aprinter/platform/teensy3/teensy3_support.h')
gen.register_singleton_object('checksum_src_file', arm_checksum_src_file)
@platform_sel.options(['AVR ATmega2560', 'AVR ATmega1284p'])
def option(platform_name, platform):
if platform_name == 'AVR ATmega2560':
mcu = 'atmega2560'
elif platform_name == 'AVR ATmega1284p':
mcu = 'atmega1284p'
else:
assert False
platformType[0] = 'avr'
platformFlags.extend(['-mmcu={}'.format(mcu)])
extraLinkFlags.extend(['-Wl,-u,vfprintf', '-lprintf_flt'])
extraObjCopyFlags.extend(['-j', '.text', '-j', '.data'])
gen.add_platform_include('avr/io.h')
gen.add_platform_include('aprinter/platform/avr/avr_support.h')
gen.add_init_call(-3, 'sei();')
gen.add_define('F_CPU', platform.get_int('CpuFreq'))
gen.add_define('AMBROLIB_AVR', '')
@platform_sel.options(['stm32f407', 'stm32f411', 'stm32f429'])
def option(platform_name, platform):
if platform_name == 'stm32f407':
chip_flag = 'STM32F407xx'
startup_file = 'startup_stm32f407xx.s'
elif platform_name == 'stm32f411':
chip_flag = 'STM32F411xE'
startup_file = 'startup_stm32f411xe.s'
elif platform_name == 'stm32f429':
chip_flag = 'STM32F429xx'
startup_file = 'startup_stm32f429xx.s'
else:
assert False
platformType[0] = 'arm'
platformFlags.extend(['-mcpu=cortex-m4', '-mthumb', '-mfpu=fpv4-sp-d16', '-mfloat-abi=hard'])
platformFlagsCXX.extend(['-Wno-register'])
linkerScript[0] = {'base': 'aprinter', 'path': 'aprinter/platform/stm32f4/{}.ld'.format(platform_name)}
extraLinkFlags.extend(['-nostartfiles', '-lm'])
gen.add_define(chip_flag, '')
gen.add_define('USE_HAL_DRIVER', '')
gen.add_define('HSE_VALUE', platform.get_int('HSE_VALUE'))
gen.add_define('PLL_N_VALUE', platform.get_int('PLL_N_VALUE'))
gen.add_define('PLL_M_VALUE', platform.get_int('PLL_M_VALUE'))
gen.add_define('PLL_P_DIV_VALUE', platform.get_int('PLL_P_DIV_VALUE'))
gen.add_define('PLL_Q_DIV_VALUE', platform.get_int('PLL_Q_DIV_VALUE'))
gen.add_define('APB1_PRESC_DIV', platform.get_int('APB1_PRESC_DIV'))
gen.add_define('APB2_PRESC_DIV', platform.get_int('APB2_PRESC_DIV'))
heap_size = platform.get_int('HeapSize')
if not 1024 <= heap_size <= 262144:
platform.key_path('HeapSize').error('Value out of range.')
gen.add_define('HEAP_SIZE', heap_size)
cmsis_dir = 'Drivers/CMSIS/Device/ST/STM32F4xx'
templates_dir = cmsis_dir+'/Source/Templates'
hal_dir = 'Drivers/STM32F4xx_HAL_Driver'
usb_dir = 'Middlewares/ST/STM32_USB_Device_Library'
stm32cubef4_includes = [
cmsis_dir+'/Include',
'Drivers/CMSIS/Include',
hal_dir+'/Inc',
usb_dir+'/Core/Inc',
usb_dir+'/Class/CDC/Inc',
]
stm32cubef4_sources = [
templates_dir+'/system_stm32f4xx.c',
templates_dir+'/gcc/{}'.format(startup_file),
hal_dir+'/Src/stm32f4xx_hal.c',
hal_dir+'/Src/stm32f4xx_hal_cortex.c',
hal_dir+'/Src/stm32f4xx_hal_rcc.c',
hal_dir+'/Src/stm32f4xx_hal_iwdg.c',
hal_dir+'/Src/stm32f4xx_hal_gpio.c',
hal_dir+'/Src/stm32f4xx_hal_dma.c',
]
for include in stm32cubef4_includes:
gen.add_include_dir('stm32cubef4', include)
for source in stm32cubef4_sources:
gen.add_extra_source('stm32cubef4', source)
gen.add_include_dir('aprinter', 'aprinter/platform/stm32f4')
gen.add_extra_source('aprinter', 'aprinter/platform/newlib_common.c')
gen.add_extra_source('aprinter', 'aprinter/platform/stm32f4/stm32f4_support.cpp')
gen.add_platform_include('aprinter/platform/stm32f4/stm32f4_support.h')
gen.add_init_call(-1, 'platform_init();')
gen.add_final_init_call(-1, 'platform_init_final();')
gen.register_singleton_object('checksum_src_file', arm_checksum_src_file)
usb_mode = platform.get_string('UsbMode')
if usb_mode not in ('None', 'FS', 'HS', 'HS-in-FS'):
platform.key_path('UsbMode').error('Invalid UsbMode.')
gen.register_singleton_object('stm32f4_usb_mode', usb_mode)
@platform_sel.option('Linux')
def option(platform):
platformType[0] = 'linux'
extraLinkFlags.extend(['-lpthread', '-lrt', '-lm', '-lstdc++'])
gen.add_extra_source('aprinter', 'aprinter/platform/linux/linux_support.cpp')
timers_structure = get_heap_structure(gen, platform, 'TimersStructure')
gen.add_platform_include('aprinter/platform/linux/linux_support.h')
gen.add_init_call(-1, 'platform_init(argc, argv);')
gen.register_singleton_object('event_loop_impl', {
'name': 'LinuxEventLoop',
'extra_args': [timers_structure],
})
config.do_selection(key, platform_sel)
return {
'platformType': platformType[0],
'platformFlags': platformFlags,
'platformFlagsCXX': platformFlagsCXX,
'linkerScript': linkerScript[0],
'extraLinkFlags': extraLinkFlags,
'linkerSymbols': linkerSymbols,
'extraObjCopyFlags': extraObjCopyFlags,
}
def setup_debug_interface(gen, config, key):
debug_sel = selection.Selection()
@debug_sel.option('NoDebug')
def option(debug):
pass
@debug_sel.option('ArmItmDebug')
def option(debug):
stimulus_port = debug.get_int('StimulusPort')
if not 0 <= stimulus_port <= 31:
debug.key_path('StimulusPort').error('Incorrect value.')
gen.add_platform_include('aprinter/hal/generic/ArmItmDebug.h')
gen.add_aprinter_include('hal/generic/NewlibDebugWrite.h')
gen.add_global_code(0, 'using MyDebug = ArmItmDebug<Context, {}>;'.format(
stimulus_port,
))
gen.add_global_code(0, 'APRINTER_SETUP_NEWLIB_DEBUG_WRITE(MyDebug::write, Context())')
config.do_selection(key, debug_sel)
class CommonClock(object):
def __init__ (self, gen, config, clock_name, priority, clockdef_func):
self._gen = gen
self._config = config
self._clock_name = clock_name
self._my_clock = 'My{}'.format(clock_name)
self._priority = priority
self._clockdef = function_defined.FunctionDefinedClass(clockdef_func)
gen.add_aprinter_include(self._clockdef.INCLUDE)
self._timers = self._load_timers(config)
self._interrupt_timers = []
self._primary_timer = self.check_timer(config.get_string('primary_timer'), config.key_path('primary_timer'))
def _load_timers (self, config):
timers = {}
if hasattr(self._clockdef, 'handle_timer'):
for timer_config in config.iter_list_config('timers', max_count=20):
timer_id = self.check_timer(timer_config.get_string('Timer'), timer_config.key_path('Timer'))
if timer_id in timers:
timer_config.path().error('Duplicate timer specified.')
timers[timer_id] = self._clockdef.handle_timer(self._gen, timer_id, timer_config)
return timers
def check_timer (self, timer_name, path):
match = re.match(self._clockdef.TIMER_RE, timer_name)
if not match:
path.error('Incorrect timer name.')
return match.group(1)
def check_oc_unit (self, name, path):
m = re.match(self._clockdef.CHANNEL_RE, name)
if m is None:
path.error('Incorrect OC unit format.')
return {'tc':m.group(1), 'channel':m.group(2)}
def add_interrupt_timer (self, name, user, clearance, path):
it = self.check_oc_unit(name, path)
self._interrupt_timers.append(it)
clearance_name = '{}_{}_Clearance'.format(self._my_clock, name)
self._gen.add_float_constant(clearance_name, clearance)
self._gen.add_isr(self._clockdef.INTERRUPT_TIMER_ISR(it, user))
return self._clockdef.INTERRUPT_TIMER_EXPR(it, clearance_name)
def finalize (self):
auto_timers = (set(it['tc'] for it in self._interrupt_timers) | set([self._primary_timer])) - set(self._timers)
for timer_id in auto_timers:
self._timers[timer_id] = self._clockdef.TIMER_EXPR(timer_id)
if hasattr(self._clockdef, 'TIMER_ISR'):
self._gen.add_isr(self._clockdef.TIMER_ISR(self._my_clock, timer_id))
if hasattr(self._clockdef, 'CLOCK_ISR'):
clock = {'primary_timer': self._primary_timer}
self._gen.add_isr(self._clockdef.CLOCK_ISR(self._my_clock, clock))
temp_timers = set(self._timers)
temp_timers.remove(self._primary_timer)
ordered_timers = [self._primary_timer] + sorted(temp_timers)
timers_expr = TemplateList([self._timers[timer_id] for timer_id in ordered_timers])
clock_service_expr = self._clockdef.CLOCK_SERVICE(self._config)
service_code = 'using {}Service = {};'.format(self._my_clock, clock_service_expr.build(indent=0))
clock_expr = TemplateExpr('{}Service::Clock'.format(self._my_clock), ['Context', 'Program', timers_expr])
self._gen.add_global_resource(self._priority, self._my_clock, clock_expr, use_instance=True, code_before=service_code, context_name=self._clock_name)
def At91Sam3xClockDef(x):
x.INCLUDE = 'hal/at91/At91Sam3xClock.h'
x.CLOCK_SERVICE = lambda config: TemplateExpr('At91Sam3xClockService', [config.get_int_constant('prescaler')])
x.TIMER_RE = '\\ATC([0-9])\\Z'
x.CHANNEL_RE = '\\ATC([0-9])([A-C])\\Z'
x.INTERRUPT_TIMER_EXPR = lambda it, clearance: 'At91Sam3xClockInterruptTimerService<At91Sam3xClockTC{}, At91Sam3xClockComp{}, {}>'.format(it['tc'], it['channel'], clearance)
x.INTERRUPT_TIMER_ISR = lambda it, user: 'AMBRO_AT91SAM3X_CLOCK_INTERRUPT_TIMER_GLOBAL(At91Sam3xClockTC{}, At91Sam3xClockComp{}, {}, Context())'.format(it['tc'], it['channel'], user)
x.TIMER_EXPR = lambda tc: 'At91Sam3xClockTC{}'.format(tc)
x.TIMER_ISR = lambda my_clock, tc: 'AMBRO_AT91SAM3X_CLOCK_TC{}_GLOBAL({}, Context())'.format(tc, my_clock)
def At91Sam3uClockDef(x):
x.INCLUDE = 'hal/at91/At91Sam3uClock.h'
x.CLOCK_SERVICE = lambda config: TemplateExpr('At91Sam3uClockService', [config.get_int_constant('prescaler')])
x.TIMER_RE = '\\ATC([0-9])\\Z'
x.CHANNEL_RE = '\\ATC([0-9])([A-C])\\Z'
x.INTERRUPT_TIMER_EXPR = lambda it, clearance: 'At91Sam3uClockInterruptTimerService<At91Sam3uClockTC{}, At91Sam3uClockComp{}, {}>'.format(it['tc'], it['channel'], clearance)
x.INTERRUPT_TIMER_ISR = lambda it, user: 'AMBRO_AT91SAM3U_CLOCK_INTERRUPT_TIMER_GLOBAL(At91Sam3uClockTC{}, At91Sam3uClockComp{}, {}, Context())'.format(it['tc'], it['channel'], user)
x.TIMER_EXPR = lambda tc: 'At91Sam3uClockTC{}'.format(tc)
x.TIMER_ISR = lambda my_clock, tc: 'AMBRO_AT91SAM3U_CLOCK_TC{}_GLOBAL({}, Context())'.format(tc, my_clock)
def Mk20ClockDef(x):
x.INCLUDE = 'hal/teensy3/Mk20Clock.h'
x.CLOCK_SERVICE = lambda config: TemplateExpr('Mk20ClockService', [config.get_int_constant('prescaler')])
x.TIMER_RE = '\\AFTM([0-9])\\Z'
x.CHANNEL_RE = '\\AFTM([0-9])_([0-9])\\Z'
x.INTERRUPT_TIMER_EXPR = lambda it, clearance: 'Mk20ClockInterruptTimerService<Mk20ClockFTM{}, {}, {}>'.format(it['tc'], it['channel'], clearance)
x.INTERRUPT_TIMER_ISR = lambda it, user: 'AMBRO_MK20_CLOCK_INTERRUPT_TIMER_GLOBAL(Mk20ClockFTM{}, {}, {}, Context())'.format(it['tc'], it['channel'], user)
x.TIMER_EXPR = lambda tc: 'Mk20ClockFtmSpec<Mk20ClockFTM{}>'.format(tc)
x.TIMER_ISR = lambda my_clock, tc: 'AMBRO_MK20_CLOCK_FTM_GLOBAL({}, {}, Context())'.format(tc, my_clock)
def AvrClockDef(x):
x.INCLUDE = 'hal/avr/AvrClock.h'
x.CLOCK_SERVICE = lambda config: TemplateExpr('AvrClockService', [config.get_int_constant('PrescaleDivide')])
x.TIMER_RE = '\\ATC([0-9])\\Z'
x.CHANNEL_RE = '\\ATC([0-9])_([A-Z])\\Z'
x.INTERRUPT_TIMER_EXPR = lambda it, clearance: 'AvrClockInterruptTimerService<AvrClockTcChannel{}{}, {}>'.format(it['tc'], it['channel'], clearance)
x.INTERRUPT_TIMER_ISR = lambda it, user: 'AMBRO_AVR_CLOCK_INTERRUPT_TIMER_ISRS({}, {}, {}, Context())'.format(it['tc'], it['channel'], user)
x.TIMER_EXPR = lambda tc: 'AvrClockTcSpec<AvrClockTc{}>'.format(tc)
x.CLOCK_ISR = lambda my_clock, clock: 'AMBRO_AVR_CLOCK_ISRS({}, {}, Context())'.format(clock['primary_timer'], my_clock)
@assign_func.assign_func(x, 'handle_timer')
def handle_timer(gen, timer_id, timer_config):
mode_sel = selection.Selection()
@mode_sel.option('AvrClockTcModeClock')
def option(mode):
return 'AvrClockTcModeClock'
@mode_sel.option('AvrClockTcMode8BitPwm')
def option(mode):
return TemplateExpr('AvrClockTcMode8BitPwm', [
mode.get_int('PrescaleDivide'),
])
@mode_sel.option('AvrClockTcMode16BitPwm')
def option(mode):
return TemplateExpr('AvrClockTcMode16BitPwm', [
mode.get_int('PrescaleDivide'),
mode.get_int('TopVal'),
])
return TemplateExpr('AvrClockTcSpec', [
'AvrClockTc{}'.format(timer_id),
timer_config.do_selection('Mode', mode_sel),
])
def Stm32f4ClockDef(x):
x.INCLUDE = 'hal/stm32/Stm32f4Clock.h'
x.CLOCK_SERVICE = lambda config: TemplateExpr('Stm32f4ClockService', [config.get_int_constant('prescaler')])
x.TIMER_RE = '\\ATIM([0-9]{1,2})\\Z'
x.CHANNEL_RE = '\\ATIM([0-9]{1,2})_([1-4])\\Z'
x.INTERRUPT_TIMER_EXPR = lambda it, clearance: 'Stm32f4ClockInterruptTimerService<Stm32f4ClockTIM{}, Stm32f4ClockComp{}, {}>'.format(it['tc'], it['channel'], clearance)
x.INTERRUPT_TIMER_ISR = lambda it, user: 'AMBRO_STM32F4_CLOCK_INTERRUPT_TIMER_GLOBAL(Stm32f4ClockTIM{}, Stm32f4ClockComp{}, {}, Context())'.format(it['tc'], it['channel'], user)
x.TIMER_EXPR = lambda tc: 'Stm32f4ClockTIM{}'.format(tc)
x.TIMER_ISR = lambda my_clock, tc: 'AMBRO_STM32F4_CLOCK_TC_GLOBAL({}, {}, Context())'.format(tc, my_clock)
def LinuxClockDef(x):
x.INCLUDE = 'hal/linux/LinuxClock.h'
x.CLOCK_SERVICE = lambda config: TemplateExpr('LinuxClockService', [config.get_int_constant('SubSecondBits'), config.get_int_constant('MaxTimers')])
x.TIMER_RE = '\\A()\\Z'
x.CHANNEL_RE = '\\A()([0-9]{1,2})\\Z'
x.INTERRUPT_TIMER_EXPR = lambda it, clearance: 'LinuxClockInterruptTimerService<{}, {}>'.format(it['channel'], clearance)
x.INTERRUPT_TIMER_ISR = lambda it, user: ''
x.TIMER_EXPR = lambda tc: 'void'
def setup_clock(gen, config, key, clock_name, priority, allow_disabled):
clock_sel = selection.Selection()
@clock_sel.option('NoClock')
def option(clock):
if not allow_disabled:
clock.path().error('A clock is required.')
return None
@clock_sel.option('At91Sam3xClock')
def option(clock):
return CommonClock(gen, clock, clock_name, priority, At91Sam3xClockDef)
@clock_sel.option('At91Sam3uClock')
def option(clock):
return CommonClock(gen, clock, clock_name, priority, At91Sam3uClockDef)
@clock_sel.option('Mk20Clock')
def option(clock):
return CommonClock(gen, clock, clock_name, priority, Mk20ClockDef)
@clock_sel.option('AvrClock')
def option(clock):
return CommonClock(gen, clock, clock_name, priority, AvrClockDef)
@clock_sel.option('Stm32f4Clock')
def option(clock):
return CommonClock(gen, clock, clock_name, priority, Stm32f4ClockDef)
@clock_sel.option('LinuxClock')
def option(clock):
return CommonClock(gen, clock, clock_name, priority, LinuxClockDef)
clock_object = config.do_selection(key, clock_sel)
if clock_object is not None:
gen.register_singleton_object(clock_name, clock_object)
def setup_millisecond_clock(gen, config, key, priority):
clock_sel = selection.Selection()
@clock_sel.option('NoMillisecondClock')
def option(clock):
return None
@clock_sel.option('ArmSysTickMillisecondClock')
def option(clock):
gen.add_aprinter_include('hal/generic/ArmSysTickMillisecondClock.h')
gen.add_isr('APRINTER_ARM_SYSTICK_MILLISECOND_CLOCK_GLOBAL(MyMillisecondClock, Context())')
gen.set_have_hw_millisecond_clock()
return TemplateExpr('ArmSysTickMillisecondClock', ['Context', 'Program'])
clock_expr = config.do_selection(key, clock_sel)
if clock_expr is not None:
gen.add_global_resource(priority, 'MyMillisecondClock', clock_expr, context_name='MillisecondClock')
def setup_pins (gen, config, key):
pin_regexes = [IDENTIFIER_REGEX]
pins_sel = selection.Selection()
@pins_sel.option('At91SamPins')
def options(pin_config):
gen.add_aprinter_include('hal/at91/At91SamPins.h')
pin_regexes.append('\\AAt91SamPin<At91SamPio[A-Z],[0-9]{1,3}>\\Z')
return TemplateLiteral('At91SamPinsService')
@pins_sel.option('Mk20Pins')
def options(pin_config):
gen.add_aprinter_include('hal/teensy3/Mk20Pins.h')
pin_regexes.append('\\AMk20Pin<Mk20Port[A-Z],[0-9]{1,3}>\\Z')
return TemplateLiteral('Mk20PinsService')
@pins_sel.option('AvrPins')
def options(pin_config):
gen.add_aprinter_include('hal/avr/AvrPins.h')
pin_regexes.append('\\AAvrPin<AvrPort[A-Z],[0-9]{1,3}>\\Z')
return TemplateLiteral('AvrPinsService')
@pins_sel.option('Stm32f4Pins')
def options(pin_config):
gen.add_aprinter_include('hal/stm32/Stm32f4Pins.h')
pin_regexes.append('\\AStm32f4Pin<Stm32f4Port[A-Z],[0-9]{1,3}>\\Z')
return TemplateLiteral('Stm32f4PinsService')
@pins_sel.option('StubPins')
def options(pin_config):
gen.add_aprinter_include('hal/generic/StubPins.h')
pin_regexes.append('\\AStubPin\\Z')
return TemplateLiteral('StubPinsService')
service_expr = config.do_selection(key, pins_sel)
service_code = 'using PinsService = {};'.format(service_expr.build(indent=0))
pins_expr = TemplateExpr('PinsService::Pins', ['Context', 'Program'])
gen.add_global_resource(10, 'Pins', pins_expr, use_instance=True, code_before=service_code, context_name='Pins')
gen.register_singleton_object('pin_regexes', pin_regexes)
def get_pin (gen, config, key):
pin = config.get_string(key)
pin_regexes = gen.get_singleton_object('pin_regexes')
if not any(re.match(pin_regex, pin) for pin_regex in pin_regexes):
config.key_path(key).error('Invalid pin value.')
return pin
def setup_watchdog (gen, config, key, user):
watchdog_sel = selection.Selection()
@watchdog_sel.option('At91SamWatchdog')
def option(watchdog):
gen.add_aprinter_include('hal/at91/At91SamWatchdog.h')
return TemplateExpr('At91SamWatchdogService', [
watchdog.get_int('Wdv')
])
@watchdog_sel.option('Mk20Watchdog')
def option(watchdog):
gen.add_aprinter_include('hal/teensy3/Mk20Watchdog.h')
gen.add_isr('AMBRO_MK20_WATCHDOG_GLOBAL({})'.format(user))
return TemplateExpr('Mk20WatchdogService', [
watchdog.get_int('Toval'),
watchdog.get_int('Prescval'),
])
@watchdog_sel.option('AvrWatchdog')
def option(watchdog):
wdto = watchdog.get_string('Timeout')
if not re.match('\\AWDTO_[0-9A-Z]{1,10}\\Z', wdto):
watchdog.key_path('Timeout').error('Incorrect value.')
gen.add_aprinter_include('hal/avr/AvrWatchdog.h')
gen.add_isr('AMBRO_AVR_WATCHDOG_GLOBAL')
return TemplateExpr('AvrWatchdogService', [wdto])
@watchdog_sel.option('Stm32f4Watchdog')
def option(watchdog):
gen.add_aprinter_include('hal/stm32/Stm32f4Watchdog.h')
return TemplateExpr('Stm32f4WatchdogService', [
watchdog.get_int('Divider'),
watchdog.get_int('Reload'),
])
@watchdog_sel.option('NullWatchdog')
def option(watchdog):
gen.add_aprinter_include('hal/generic/NullWatchdog.h')
return 'NullWatchdogService'
return config.do_selection(key, watchdog_sel)
def setup_adc (gen, config, key):
adc_sel = selection.Selection()
@adc_sel.option('NoAdc')
def option(adc_config):
return None
@adc_sel.option('At91SamAdc')
def option(adc_config):
gen.add_aprinter_include('hal/at91/At91SamAdc.h')
gen.add_float_constant('AdcFreq', adc_config.get_float('freq'))
gen.add_float_constant('AdcAvgInterval', adc_config.get_float('avg_interval'))
gen.add_int_constant('uint16', 'AdcSmoothing', max(0, min(65535, int(adc_config.get_float('smoothing') * 65536.0))))
gen.add_isr('AMBRO_AT91SAM_ADC_GLOBAL(MyAdc, Context())')
return {
'service_expr': TemplateExpr('At91SamAdcService', [
'AdcFreq',
adc_config.get_int('startup'),
adc_config.get_int('settling'),
adc_config.get_int('tracking'),
adc_config.get_int('transfer'),
'At91SamAdcAvgParams<AdcAvgInterval>',
]),
'pin_func': lambda pin: 'At91SamAdcSmoothPin<{}, AdcSmoothing>'.format(pin)
}
@adc_sel.option('At91Sam3uAdc')
def option(adc_config):
gen.add_aprinter_include('hal/at91/At91SamAdc.h')
gen.add_float_constant('AdcFreq', adc_config.get_float('freq'))
gen.add_float_constant('AdcAvgInterval', adc_config.get_float('avg_interval'))
gen.add_int_constant('uint16', 'AdcSmoothing', max(0, min(65535, int(adc_config.get_float('smoothing') * 65536.0))))
gen.add_isr('AMBRO_AT91SAM3U_ADC_GLOBAL(MyAdc, Context())')
return {
'service_expr': TemplateExpr('At91Sam3uAdcService', [
'AdcFreq',
adc_config.get_int('startup'),
adc_config.get_int('shtim'),
'At91SamAdcAvgParams<AdcAvgInterval>',
]),
'pin_func': lambda pin: 'At91SamAdcSmoothPin<{}, AdcSmoothing>'.format(pin)
}
@adc_sel.option('Mk20Adc')
def option(adc_config):
gen.add_aprinter_include('hal/teensy3/Mk20Adc.h')
gen.add_int_constant('int32', 'AdcADiv', adc_config.get_int('AdcADiv'))
gen.add_isr('AMBRO_MK20_ADC_ISRS(MyAdc, Context())')
return {
'service_expr': TemplateExpr('Mk20AdcService', ['AdcADiv']),
'pin_func': lambda pin: pin
}
@adc_sel.option('AvrAdc')
def option(adc_config):
gen.add_aprinter_include('hal/avr/AvrAdc.h')
gen.add_int_constant('int32', 'AdcRefSel', adc_config.get_int('RefSel'))
gen.add_int_constant('int32', 'AdcPrescaler', adc_config.get_int('Prescaler'))
gen.add_int_constant('int32', 'AdcOverSamplingBits', adc_config.get_int('OverSamplingBits'))
gen.add_isr('AMBRO_AVR_ADC_ISRS(MyAdc, Context())')
return {
'service_expr': TemplateExpr('AvrAdcService', ['AdcRefSel', 'AdcPrescaler', 'AdcOverSamplingBits']),
'pin_func': lambda pin: pin
}
@adc_sel.option('Stm32f4Adc')
def option(adc_config):
gen.add_aprinter_include('hal/stm32/Stm32f4Adc.h')
gen.add_isr('APRINTER_STM32F4_ADC_GLOBAL(MyAdc, Context())')
return {
'service_expr': TemplateExpr('Stm32f4AdcService', [
adc_config.get_int('ClockDivider'),
adc_config.get_int('SampleTimeSelection'),
]),
'pin_func': lambda pin: pin
}
result = config.do_selection(key, adc_sel)
if result is None:
return
gen.register_singleton_object('adc_pins', [])
def finalize():
adc_pins = gen.get_singleton_object('adc_pins')
pins_expr = TemplateList([result['pin_func'](pin) for pin in adc_pins])
service_code = 'using AdcService = {};'.format(result['service_expr'].build(indent=0))
adc_expr = TemplateExpr('AdcService::Adc', ['Context', 'Program', pins_expr])
gen.add_global_resource(20, 'MyAdc', adc_expr, use_instance=True, code_before=service_code, context_name='Adc')
gen.add_finalize_action(finalize)
def setup_pwm(gen, config, key):
pwm_sel = selection.Selection()
@pwm_sel.option('Disabled')
def option(pwm_config):
return None
@pwm_sel.option('At91Sam3xPwm')
def option(pwm_config):
gen.add_aprinter_include('hal/at91/At91Sam3xPwm.h')
return TemplateExpr('At91Sam3xPwm', ['Context', 'Program', TemplateExpr('At91Sam3xPwmParams', [
pwm_config.get_int('PreA'),
pwm_config.get_int('DivA'),
pwm_config.get_int('PreB'),
pwm_config.get_int('DivB'),
])])
pwm_expr = config.do_selection(key, pwm_sel)
if pwm_expr is not None:
gen.add_global_resource(25, 'MyPwm', pwm_expr, context_name='Pwm')
def use_input_mode (config, key):
im_sel = selection.Selection()
@im_sel.option('At91SamPinInputMode')
def option(im_config):
return TemplateExpr('At91SamPinInputMode', [
im_config.do_enum('PullMode', {'Normal': 'At91SamPinPullModeNormal', 'Pull-up': 'At91SamPinPullModePullUp'}),
])
@im_sel.option('Stm32f4PinInputMode')
def option(im_config):
return TemplateExpr('Stm32f4PinInputMode', [
im_config.do_enum('PullMode', {'Normal': 'Stm32f4PinPullModeNone', 'Pull-up': 'Stm32f4PinPullModePullUp', 'Pull-down': 'Stm32f4PinPullModePullDown'}),
])
@im_sel.option('AvrPinInputMode')
def option(im_config):
return im_config.do_enum('PullMode', {'Normal': 'AvrPinInputModeNormal', 'Pull-up': 'AvrPinInputModePullUp'})
@im_sel.option('Mk20PinInputMode')
def option(im_config):
return im_config.do_enum('PullMode', {'Normal': 'Mk20PinInputModeNormal', 'Pull-up': 'Mk20PinInputModePullUp', 'Pull-down': 'Mk20PinInputModePullDown'})
@im_sel.option('StubPinInputMode')
def option(im_config):
return 'StubPinInputMode'
return config.do_selection(key, im_sel)
def use_digital_input (gen, config, key):
di = gen.get_object('digital_input', config, key)
input_mode = use_input_mode(di, 'InputMode')
return '{}, {}'.format(get_pin(gen, di, 'Pin'), _build_template_arg(input_mode, -1))
def use_analog_input (gen, config, key, user):
ai = gen.get_object('analog_input', config, key)
analog_input_sel = selection.Selection()
@analog_input_sel.option('AdcAnalogInput')
def option(analog_input):
gen.add_aprinter_include('printer/analog_input/AdcAnalogInput.h')
pin = get_pin(gen, analog_input, 'Pin')
gen.get_singleton_object('adc_pins').append(pin)
return TemplateExpr('AdcAnalogInputService', [pin])
@analog_input_sel.option('Max31855AnalogInput')
def option(analog_input):
gen.add_aprinter_include('printer/analog_input/Max31855AnalogInput.h')
return TemplateExpr('Max31855AnalogInputService', [
analog_input.get_int('Speed_Hz'),
get_pin(gen, analog_input, 'SsPin'),
use_spi(gen, analog_input, 'SpiService', '{}::GetSpi'.format(user)),
])
return ai.do_selection('Driver', analog_input_sel)
def use_interrupt_timer (gen, config, key, user, clearance=0.0):
clock = gen.get_singleton_object('Clock')
for it_config in config.enter_config(key):
return clock.add_interrupt_timer(it_config.get_string('oc_unit'), user, clearance, it_config.path())
def use_pwm_output (gen, config, key, user, username, hard=False):
pwm_output = gen.get_object('pwm_output', config, key)
backend_sel = selection.Selection()
@backend_sel.option('SoftPwm')
def option(backend):
if hard:
config.path().error('Only Hard PWM is allowed here.')
gen.add_aprinter_include('printer/pwm/SoftPwm.h')
return TemplateExpr('SoftPwmService', [
get_pin(gen, backend, 'OutputPin'),
backend.get_bool('OutputInvert'),
gen.add_float_constant('{}PulseInterval'.format(username), backend.get_float('PulseInterval')),
use_interrupt_timer(gen, backend, 'Timer', '{}::TheTimer'.format(user))
])
@backend_sel.option('HardPwm')
def option(backend):
gen.add_aprinter_include('printer/pwm/HardPwm.h')
hard_driver_sel = selection.Selection()
@hard_driver_sel.option('AvrClockPwm')
def option(hard_driver):
clock = gen.get_singleton_object('Clock')
oc_unit = clock.check_oc_unit(hard_driver.get_string('oc_unit'), hard_driver.path())
return TemplateExpr('AvrClockPwmService', [
'AvrClockTcChannel{}{}'.format(oc_unit['tc'], oc_unit['channel']),
get_pin(gen, hard_driver, 'OutputPin'),
])
@hard_driver_sel.option('At91Sam3xPwmChannel')
def option(hard_driver):
return TemplateExpr('At91Sam3xPwmChannelService', [
hard_driver.get_int('ChannelPrescaler'),
hard_driver.get_int('ChannelPeriod'),
hard_driver.get_int('ChannelNumber'),
get_pin(gen, hard_driver, 'OutputPin'),
TemplateChar(hard_driver.get_identifier('Signal')),
hard_driver.get_bool('Invert'),
])
hard_pwm_expr = backend.do_selection('HardPwmDriver', hard_driver_sel)
if hard:
return hard_pwm_expr
return TemplateExpr('HardPwmService', [
hard_pwm_expr
])
return pwm_output.do_selection('Backend', backend_sel)
def use_spi_ll (gen, config, key, user):
sel = selection.Selection()
@sel.option('At91SamSpiSpiLL')
def option(config):
gen.add_aprinter_include('hal/at91/At91SamSpiSpiLL.h')
devices = ['SPI0', 'SPI']
dev = config.get_identifier('Device')
if dev not in devices:
config.key_path('Device').error('Incorrect SPI device.')
gen.add_isr('APRINTER_AT91SAM_SPI_SPI_LL_GLOBAL({}, {}, Context())'.format(dev, user))
return TemplateExpr('At91SamSpiSpiLL', [
'At91SamSpiSpiLLDevice{}'.format(dev)
])
@sel.option('At91SamUsartSpiLL')
def option(config):
gen.add_aprinter_include('hal/at91/At91SamUsartSpiLL.h')
dev_index = config.get_int('DeviceIndex')
gen.add_isr('APRINTER_AT91SAM_USART_SPI_LL_GLOBAL({}, {}, Context())'.format(dev_index, user))
return TemplateExpr('At91SamUsartSpiLL', [
'At91SamUsartSpiLLDeviceUSART{}'.format(dev_index),
])
return config.do_selection(key, sel)
def use_spi (gen, config, key, user):
sel = selection.Selection()
@sel.option('GenericSpi')
def option(config):
gen.add_aprinter_include('hal/generic/GenericSpi.h')
return TemplateExpr('GenericSpi', [
use_spi_ll(gen, config, 'LLDriver', '{}::GetLLDriver'.format(user)),
])
@sel.option('AvrSpi')
def option(config):
gen.add_aprinter_include('hal/avr/AvrSpi.h')
gen.add_isr('AMBRO_AVR_SPI_ISRS({}, Context())'.format(user))
return 'AvrSpiService'
return config.do_selection(key, sel)
def use_sdio (gen, config, key, user):
sdio_sel = selection.Selection()
@sdio_sel.option('Stm32f4Sdio')
def option(sdio_config):
hal_dir = 'Drivers/STM32F4xx_HAL_Driver'
gen.add_extra_source('stm32cubef4', hal_dir+'/Src/stm32f4xx_hal_sd.c')
gen.add_extra_source('stm32cubef4', hal_dir+'/Src/stm32f4xx_ll_sdmmc.c')
gen.add_aprinter_include('hal/stm32/Stm32f4Sdio.h')
gen.add_isr('APRINTER_STM32F4_SDIO_GLOBAL({}, Context())'.format(user))
return TemplateExpr('Stm32f4SdioService', [
sdio_config.get_bool('IsWideMode'),
sdio_config.get_int('DataTimeoutBusClocks'),
sdio_config.get_int('SdClockPrescaler'),
])
@sdio_sel.option('At91SamSdio')
def option(sdio_config):
gen.add_aprinter_include('hal/at91/At91SamSdio.h')
return TemplateExpr('At91SamSdioService', [
sdio_config.get_int('Slot'),
sdio_config.get_bool('IsWideMode'),
sdio_config.get_int('MaxIoDescriptors'),
])
return config.do_selection(key, sdio_sel)
def use_i2c (gen, config, key, user, username):
i2c_sel = selection.Selection()
@i2c_sel.option('At91SamI2c')
def option(i2c_config):
gen.add_aprinter_include('hal/at91/At91SamI2c.h')
devices = {
'At91SamI2cDevice1':1,
}
dev = i2c_config.get_identifier('Device')
if dev not in devices:
i2c_config.path().error('Incorrect I2C device.')
gen.add_isr('AMBRO_AT91SAM_I2C_GLOBAL({}, {}, Context())'.format(devices[dev], user))
return 'At91SamI2cService<{}, {}, {}>'.format(
dev,
i2c_config.get_int('Ckdiv'),
gen.add_float_constant('{}I2cFreq'.format(username), i2c_config.get_float('I2cFreq'))
)
return config.do_selection(key, i2c_sel)
def use_eeprom(gen, config, key, user):
eeprom_sel = selection.Selection()
@eeprom_sel.option('I2cEeprom')
def option(eeprom):
gen.add_aprinter_include('hal/generic/I2cEeprom.h')
return TemplateExpr('I2cEepromService', [use_i2c(gen, eeprom, 'I2c', '{}::GetI2c'.format(user), 'ConfigEeprom'), eeprom.get_int('I2cAddr'), eeprom.get_int('Size'), eeprom.get_int('BlockSize'), gen.add_float_constant('ConfigEepromWriteTimeout', eeprom.get_float('WriteTimeout'))])
@eeprom_sel.option('TeensyEeprom')
def option(eeprom):
gen.add_aprinter_include('hal/teensy3/TeensyEeprom.h')
return TemplateExpr('TeensyEepromService', [eeprom.get_int('Size'), eeprom.get_int('FakeBlockSize')])
@eeprom_sel.option('AvrEeprom')
def option(eeprom):
gen.add_aprinter_include('hal/avr/AvrEeprom.h')
gen.add_isr('AMBRO_AVR_EEPROM_ISRS({}, Context())'.format(user))
return TemplateExpr('AvrEepromService', [eeprom.get_int('FakeBlockSize')])
@eeprom_sel.option('FlashWrapper')
def option(eeprom):
gen.add_aprinter_include('hal/generic/FlashWrapper.h')
return TemplateExpr('FlashWrapperService', [
use_flash(gen, eeprom, 'FlashDriver', '{}::GetFlash'.format(user)),
])
return config.do_selection(key, eeprom_sel)
def use_flash(gen, config, key, user):
flash_sel = selection.Selection()
@flash_sel.option('At91SamFlash')
def option(flash):
device_index = flash.get_int('DeviceIndex')
if not (0 <= device_index < 10):
flash.key_path('DeviceIndex').error('Invalid device index.')
gen.add_aprinter_include('hal/at91/At91Sam3xFlash.h')
gen.add_isr('AMBRO_AT91SAM3X_FLASH_GLOBAL({}, {}, Context())'.format(device_index, user))
return TemplateExpr('At91Sam3xFlashService', [
'At91Sam3xFlashDevice{}'.format(device_index)
])
return config.do_selection(key, flash_sel)
def use_serial(gen, config, key, user):
serial_sel = selection.Selection()
@serial_sel.option('AsfUsbSerial')
def option(serial_service):
usb_device = gen.get_singleton_object('asf_usb_device')
gen.add_extra_source('asf', 'sam/drivers/{0:}/{0:}_device.c'.format(usb_device))
gen.add_extra_source('asf', 'common/services/usb/udc/udc.c')
gen.add_extra_source('asf', 'common/services/usb/class/cdc/device/udi_cdc.c')
gen.add_extra_source('asf', 'common/services/usb/class/cdc/device/udi_cdc_desc.c')
gen.add_aprinter_include('hal/at91/AsfUsbSerial.h')
gen.add_init_call(0, 'udc_start();')
return 'AsfUsbSerialService'
@serial_sel.option('At91Sam3xSerial')
def option(serial_service):
gen.add_aprinter_include('hal/at91/At91Sam3xSerial.h')
gen.add_isr('AMBRO_AT91SAM3X_SERIAL_GLOBAL({}, Context())'.format(user))
if serial_service.get_bool('UseForDebug'):
gen.add_aprinter_include('hal/generic/NewlibDebugWrite.h')
gen.add_global_code(0, 'APRINTER_SETUP_NEWLIB_DEBUG_WRITE(At91Sam3xSerial_DebugWrite<{}>, Context())'.format(user))
return 'At91Sam3xSerialService'
@serial_sel.option('TeensyUsbSerial')
def option(serial_service):
gen.add_aprinter_include('hal/teensy3/TeensyUsbSerial.h')
gen.add_global_code(0, 'extern "C" { void usb_init (void); }')
gen.add_init_call(0, 'usb_init();')
return 'TeensyUsbSerialService'
@serial_sel.option('AvrSerial')
def option(serial_service):
gen.add_aprinter_include('hal/avr/AvrSerial.h')
gen.add_aprinter_include('hal/avr/AvrDebugWrite.h')
gen.add_isr('AMBRO_AVR_SERIAL_ISRS({}, Context())'.format(user))
gen.add_global_code(0, 'APRINTER_SETUP_AVR_DEBUG_WRITE(AvrSerial_DebugPutChar<{}>, Context())'.format(user))
gen.add_init_call(-2, 'aprinter_init_avr_debug_write();')
return TemplateExpr('AvrSerialService', [serial_service.get_bool('DoubleSpeed')])
@serial_sel.option('Stm32f4UsbSerial')
def option(serial_service):
hal_dir = 'Drivers/STM32F4xx_HAL_Driver'
usb_dir = 'Middlewares/ST/STM32_USB_Device_Library'
gen.add_extra_source('stm32cubef4', hal_dir+'/Src/stm32f4xx_hal_pcd.c')
gen.add_extra_source('stm32cubef4', hal_dir+'/Src/stm32f4xx_hal_pcd_ex.c')
gen.add_extra_source('stm32cubef4', hal_dir+'/Src/stm32f4xx_ll_usb.c')
gen.add_extra_source('stm32cubef4', usb_dir+'/Core/Src/usbd_core.c')
gen.add_extra_source('stm32cubef4', usb_dir+'/Core/Src/usbd_ctlreq.c')
gen.add_extra_source('stm32cubef4', usb_dir+'/Core/Src/usbd_ioreq.c')
gen.add_extra_source('stm32cubef4', usb_dir+'/Class/CDC/Src/usbd_cdc.c')
gen.add_extra_source('aprinter', 'aprinter/platform/stm32f4/usbd_conf.c')
gen.add_extra_source('aprinter', 'aprinter/platform/stm32f4/usbd_desc.c')
gen.add_define('APRINTER_ENABLE_USB', '')
usb_mode = gen.get_singleton_object('stm32f4_usb_mode')
if usb_mode == 'None':
serial_service.path().error('USB mode in plaform configuration must not be None.')
elif usb_mode == 'FS':
gen.add_define('USE_USB_FS', '')
elif usb_mode == 'HS':
gen.add_define('USE_USB_HS', '')
elif usb_mode == 'HS-in-FS':
gen.add_define('USE_USB_HS', '')
gen.add_define('USE_USB_HS_IN_FS', '')
else:
assert False
gen.add_aprinter_include('hal/stm32/Stm32f4UsbSerial.h')
return 'Stm32f4UsbSerialService'
@serial_sel.option('LinuxStdInOutSerial')
def option(serial_service):
gen.add_aprinter_include('hal/linux/LinuxStdInOutSerial.h')
return 'LinuxStdInOutSerialService'
@serial_sel.option('NullSerial')
def option(serial_service):
gen.add_aprinter_include('hal/generic/NullSerial.h')
return 'NullSerialService'
return config.do_selection(key, serial_sel)
def use_sdcard(gen, config, key, user):
sd_service_sel = selection.Selection()
@sd_service_sel.option('SpiSdCard')
def option(spi_sd):
gen.add_aprinter_include('hal/generic/SpiSdCard.h')
return TemplateExpr('SpiSdCardService', [
spi_sd.get_int('Speed_Hz'),
get_pin(gen, spi_sd, 'SsPin'),
use_spi(gen, spi_sd, 'SpiService', '{}::GetSpi'.format(user)),
])
@sd_service_sel.option('SdioSdCard')
def option(sdio_sd):
gen.add_aprinter_include('hal/generic/SdioSdCard.h')
return TemplateExpr('SdioSdCardService', [
use_sdio(gen, sdio_sd, 'SdioService', '{}::GetSdio'.format(user)),
])
@sd_service_sel.option('LinuxSdCard')
def option(linux_sd):
gen.add_aprinter_include('hal/linux/LinuxSdCard.h')
return TemplateExpr('LinuxSdCardService', [
linux_sd.get_int('BlockSize'),
linux_sd.get_int('MaxIoBlocks'),
linux_sd.get_int('MaxIoDescriptors'),
])
return config.do_selection(key, sd_service_sel)
def use_config_manager(gen, config, key, user):
config_manager_sel = selection.Selection()
@config_manager_sel.option('ConstantConfigManager')
def option(config_manager):
gen.add_aprinter_include('printer/config_manager/ConstantConfigManager.h')
return 'ConstantConfigManagerService'
@config_manager_sel.option('RuntimeConfigManager')
def option(config_manager):
gen.add_aprinter_include('printer/config_manager/RuntimeConfigManager.h')
config_store_sel = selection.Selection()
@config_store_sel.option('NoStore')
def option(config_store):
return 'RuntimeConfigManagerNoStoreService'
@config_store_sel.option('EepromConfigStore')
def option(config_store):
gen.add_aprinter_include('printer/config_store/EepromConfigStore.h')
return TemplateExpr('EepromConfigStoreService', [
use_eeprom(gen, config_store, 'Eeprom', '{}::GetStore<>::GetEeprom'.format(user)),
config_store.get_int('StartBlock'),
config_store.get_int('EndBlock'),
])
@config_store_sel.option('FileConfigStore')
def option(config_store):
gen.add_aprinter_include('printer/config_store/FileConfigStore.h')
return 'FileConfigStoreService'
return TemplateExpr('RuntimeConfigManagerService', [config_manager.do_selection('ConfigStore', config_store_sel)])
return config.do_selection(key, config_manager_sel)
def use_microstep(gen, config, key):
ms_driver_sel = selection.Selection()
@ms_driver_sel.option('A4982')
def option(ms_driver_config):
gen.add_aprinter_include('printer/microstep/A4982MicroStep.h')
return 'A4982MicroStepService<{}, {}>'.format(
get_pin(gen, ms_driver_config, 'Ms1Pin'),
get_pin(gen, ms_driver_config, 'Ms2Pin'),
)
@ms_driver_sel.option('A4988')
def option(ms_driver_config):
gen.add_aprinter_include('printer/microstep/A4988MicroStep.h')
return 'A4988MicroStepService<{}, {}, {}>'.format(
get_pin(gen, ms_driver_config, 'Ms1Pin'),
get_pin(gen, ms_driver_config, 'Ms2Pin'),
get_pin(gen, ms_driver_config, 'Ms3Pin'),
)
return config.do_selection(key, ms_driver_sel)
def use_current_driver(gen, config, key, user):
current_driver_sel = selection.Selection()
@current_driver_sel.option('Ad5206Current')
def option(current_driver):
gen.add_aprinter_include('printer/current/Ad5206Current.h')
return TemplateExpr('Ad5206CurrentService', [
current_driver.get_int('Speed_Hz'),
get_pin(gen, current_driver, 'SsPin'),
use_spi(gen, current_driver, 'SpiService', '{}::GetSpi'.format(user)),
])
return config.do_selection(key, current_driver_sel)
def use_current_driver_channel(gen, config, key, name):
current_driver_channel_sel = selection.Selection()
@current_driver_channel_sel.option('Ad5206CurrentChannelParams')
def option(current_driver_channel):
return TemplateExpr('Ad5206CurrentChannelParams', [
current_driver_channel.get_int('DevChannelIndex'),
gen.add_float_config('{}CurrentConversionFactor'.format(name), current_driver_channel.get_float('ConversionFactor'))
])
return config.do_selection(key, current_driver_channel_sel)
def get_heater_fan_number(config, key, existing_numbers):
number = config.get_int(key)
if not 0 <= number <= 255:
config.key_path(key).error('Value out of range.')
if number in existing_numbers:
config.key_path(key).error('Conflicting heater or fan number.')
existing_numbers.add(number)
return number
def get_ip_index(gen, config, key):
index_name = config.get_string(key)
if index_name not in ('MruListIndex', 'AvlTreeIndex'):
config.key_path(key).error('Invalid value.')
gen.add_include('aipstack/structure/index/{}.h'.format(index_name))
return 'AIpStack::{}Service'.format(index_name)
def get_heap_structure(gen, config, key):
structure_name = config.get_string(key)
if structure_name not in ('LinkedHeap', 'SortedList'):
config.key_path(key).error('Invalid value.')
gen.add_aprinter_include('structure/{}.h'.format(structure_name))
return 'APrinter::{}Service'.format(structure_name)
class NetworkConfigState(object):
def __init__(self, min_send_buf, min_recv_buf):
self.min_send_buf = min_send_buf
self.min_recv_buf = min_recv_buf
self._num_connections = 0
def add_resource_counts(self, connections=0):
self._num_connections += connections
def setup_network(gen, config, key, assertions_enabled):
network_sel = selection.Selection()
@network_sel.option('NoNetwork')
def option(network_config):
return False
@network_sel.option('Network')
def option(network_config):
gen.add_aprinter_include('net/IpStackNetwork.h')
gen.add_include('aipstack/ip/IpReassembly.h')
gen.add_include('aipstack/ip/IpPathMtuCache.h')
num_arp_entries = network_config.get_int('NumArpEntries')
if not 4 <= num_arp_entries <= 128:
network_config.key_path('NumArpEntries').error('Value out of range.')
arp_protect_count = network_config.get_int('ArpProtectCount')
if not 2 <= arp_protect_count <= num_arp_entries:
network_config.key_path('ArpProtectCount').error('Value out of range.')
max_reass_packets = network_config.get_int('MaxReassPackets')
if not 1 <= max_reass_packets <= 128:
network_config.key_path('MaxReassPackets').error('Value out of range.')
max_reass_size = network_config.get_int('MaxReassSize')
if not 1480 <= max_reass_size <= 30000:
network_config.key_path('MaxReassSize').error('Value out of range.')
max_reass_holes = network_config.get_int('MaxReassHoles')
if not 1 <= max_reass_holes <= 250:
network_config.key_path('MaxReassHoles').error('Value out of range.')
max_reass_time_sec = network_config.get_int('MaxReassTimeSeconds')
if not 5 <= max_reass_time_sec <= 255:
network_config.key_path('MaxReassTimeSeconds').error('Value out of range.')
mtu_timeout_minutes = network_config.get_int('MtuTimeoutMinutes')
if not 1 <= mtu_timeout_minutes <= 255:
network_config.key_path('MtuTimeoutMinutes').error('Value out of range.')
num_tcp_pcbs = network_config.get_int('NumTcpPcbs')
if not 2 <= num_tcp_pcbs <= 2048:
network_config.key_path('NumTcpPcbs').error('Value out of range.')
num_oos_segs = network_config.get_int('NumOosSegs')
if not 1 <= num_oos_segs <= 32:
network_config.key_path('NumOosSegs').error('Value out of range.')
tcp_wnd_upd_thr_div = network_config.get_int('TcpWndUpdThrDiv')
link_with_array_indices = network_config.get_bool('LinkWithArrayIndices')
checksum_src_file = gen.get_singleton_object('checksum_src_file', allow_none=True)
if checksum_src_file is not None:
gen.add_extra_source(*checksum_src_file)
gen.add_define('AIPSTACK_EXTERNAL_CHKSUM', 1)
gen.add_define('AIPSTACK_CONFIG_ASSERT_INCLUDE', '<aprinter/base/Assert.h>')
gen.add_define('AIPSTACK_CONFIG_ASSERT_HANDLER', 'APRINTER_AIPSTACK_ASSERT_HANDLER')
if assertions_enabled:
gen.add_define('AIPSTACK_CONFIG_ENABLE_ASSERTIONS')
service_expr = TemplateExpr('IpStackNetworkService', [
use_ethernet(gen, network_config, 'EthernetDriver', 'MyNetwork::GetEthernet'),
num_arp_entries,
arp_protect_count,
TemplateExpr('AIpStack::IpPathMtuCacheService', [
'AIpStack::IpPathMtuCacheOptions::NumMtuEntries::Is<{}>'.format('IpStackNumMtuEntries'),
'AIpStack::IpPathMtuCacheOptions::MtuTimeoutMinutes::Is<{}>'.format(mtu_timeout_minutes),
'AIpStack::IpPathMtuCacheOptions::MtuIndexService::Is<{}>'.format(
get_ip_index(gen, network_config, 'MtuIndexService')),
]),
TemplateExpr('AIpStack::IpReassemblyService', [
'AIpStack::IpReassemblyOptions::MaxReassEntrys::Is<{}>'.format(max_reass_packets),
'AIpStack::IpReassemblyOptions::MaxReassSize::Is<{}>'.format(max_reass_size),
'AIpStack::IpReassemblyOptions::MaxReassHoles::Is<{}>'.format(max_reass_holes),
'AIpStack::IpReassemblyOptions::MaxReassTimeSeconds::Is<{}>'.format(max_reass_time_sec),
]),
num_tcp_pcbs,
num_oos_segs,
tcp_wnd_upd_thr_div,
get_ip_index(gen, network_config, 'PcbIndexService'),
link_with_array_indices,
get_heap_structure(gen, network_config, 'ArpTableTimersStructureService'),
])
service_code = 'using NetworkService = {};'.format(service_expr.build(indent=0))
network_expr = TemplateExpr('NetworkService::Compose', ['Context', 'Program'])
gen.add_global_resource(27, 'MyNetwork', network_expr, use_instance=True, code_before=service_code, context_name='Network', is_fast_event_root=True)
tcp_max_mss = 1460
min_send_buf = 2*tcp_max_mss
min_recv_buf = 2*tcp_max_mss
network_state = NetworkConfigState(min_send_buf, min_recv_buf)
gen.register_singleton_object('network', network_state)
def finalize():
num_mtu_entries = network_state._num_connections
gen.add_int_constant('int', 'IpStackNumMtuEntries', num_mtu_entries)
gen.add_finalize_action(finalize)
return True
return config.do_selection(key, network_sel)
def use_ethernet(gen, config, key, user):
ethernet_sel = selection.Selection()
@ethernet_sel.option('MiiEthernet')
def option(ethernet_config):
gen.add_aprinter_include('hal/generic/MiiEthernet.h')
return TemplateExpr('MiiEthernetService', [
use_mii(gen, ethernet_config, 'MiiDriver', '{}::GetMii'.format(user)),
use_phy(gen, ethernet_config, 'PhyDriver'),
])
@ethernet_sel.option('LinuxTapEthernet')
def option(ethernet_config):
gen.add_aprinter_include('hal/linux/LinuxTapEthernet.h')
return 'LinuxTapEthernetService'
return config.do_selection(key, ethernet_sel)
def use_mii(gen, config, key, user):
mii_sel = selection.Selection()
@mii_sel.option('At91SamEmacMii')
def option(mii_config):
num_rx_buffers = mii_config.get_int('NumRxBufers')
if not 12 <= num_rx_buffers <= 240:
mii_config.key_path('NumRxBufers').error('Value out of range.')
num_tx_buffers = mii_config.get_int('NumTxBufers')
if not 1 <= num_tx_buffers <= 20:
mii_config.key_path('NumTxBufers').error('Value out of range.')
gen.add_aprinter_include('hal/at91/At91SamEmacMii.h')
gen.add_extra_source('asf', 'sam/drivers/emac/emac.c')
gen.add_isr('APRINTER_AT91SAM_EMAC_MII_GLOBAL({}, Context())'.format(user))
gen.add_define('APRINTER_AT91SAM_EMAC_NUM_RX_BUFFERS', num_rx_buffers)
gen.add_define('APRINTER_AT91SAM_EMAC_NUM_TX_BUFFERS', num_tx_buffers)
return 'At91SamEmacMiiService'
return config.do_selection(key, mii_sel)
def use_phy(gen, config, key):
phy_sel = selection.Selection()
@phy_sel.option('GenericPhy')
def option(phy_config):
gen.add_aprinter_include('hal/ethernet_phy/GenericPhy.h')
return TemplateExpr('GenericPhyService', [
phy_config.get_bool('Rmii'),
phy_config.get_int('PhyAddr'),
])
return config.do_selection(key, phy_sel)
def generate(config_root_data, cfg_name, main_template):
gen = GenState()
for config_root in config_reader.start(config_root_data, config_reader_class=GenConfigReader):
if cfg_name is None:
cfg_name = config_root.get_string('selected_config')
for config in config_root.enter_elem_by_id('configurations', 'name', cfg_name):
board_name = config.get_string('board')
aux_control_module = gen.add_module()
aux_control_module_user = 'MyPrinter::GetModule<{}>'.format(aux_control_module.index)
for board_data in config_root.enter_elem_by_id('boards', 'name', board_name):
for platform_config in board_data.enter_config('platform_config'):
output_types = []
for output_types_config in platform_config.enter_config('output_types'):
if output_types_config.get_bool('output_elf'):
output_types.append('elf')
if output_types_config.get_bool('output_bin'):
output_types.append('bin')
if output_types_config.get_bool('output_hex'):
output_types.append('hex')
platformInfo = setup_platform(gen, platform_config, 'platform')
for platform in platform_config.enter_config('platform'):
setup_clock(gen, platform, 'clock', clock_name='Clock', priority=-10, allow_disabled=False)
setup_pins(gen, platform, 'pins')
setup_adc(gen, platform, 'adc')
if platform.has('pwm'):
setup_pwm(gen, platform, 'pwm')
if platform.has('fast_clock'):
setup_clock(gen, platform, 'fast_clock', clock_name='FastClock', priority=-12, allow_disabled=True)
if platform.has('millisecond_clock'):
setup_millisecond_clock(gen, platform, 'millisecond_clock', priority=-13)
setup_debug_interface(gen, platform_config, 'debug_interface')
for helper_name in platform_config.get_list(config_reader.ConfigTypeString(), 'board_helper_includes', max_count=20):
if not re.match('\\A[a-zA-Z0-9_]{1,128}\\Z', helper_name):
platform_config.key_path('board_helper_includes').error('Invalid helper name.')
gen.add_aprinter_include('board/{}.h'.format(helper_name))
gen.register_objects('digital_input', board_data, 'digital_inputs')
gen.register_objects('stepper_port', board_data, 'stepper_ports')
gen.register_objects('analog_input', board_data, 'analog_inputs')
gen.register_objects('pwm_output', board_data, 'pwm_outputs')
gen.register_objects('laser_port', board_data, 'laser_ports')
led_pin_expr = get_pin(gen, board_data, 'LedPin')
for performance in board_data.enter_config('performance'):
gen.add_typedef('TheAxisDriverPrecisionParams', performance.get_identifier('AxisDriverPrecisionParams'))
event_channel_timer_clearance = performance.get_float('EventChannelTimerClearance')
optimize_for_size = performance.get_bool('OptimizeForSize')
optimize_libc_for_size = performance.get_bool('OptimizeLibcForSize')
event_channel_timer_expr = use_interrupt_timer(gen, board_data, 'EventChannelTimer', user='{}::GetEventChannelTimer<>'.format(aux_control_module_user), clearance=event_channel_timer_clearance)
for development in board_data.enter_config('development'):
assertions_enabled = development.get_bool('AssertionsEnabled')
event_loop_benchmark_enabled = development.get_bool('EventLoopBenchmarkEnabled')
detect_overload_enabled = development.get_bool('DetectOverloadEnabled')
watchdog_debug_mode = development.get_bool('WatchdogDebugMode') if development.has('WatchdogDebugMode') else False
build_with_clang = development.get_bool('BuildWithClang')
verbose_build = development.get_bool('VerboseBuild')
debug_symbols = development.get_bool('DebugSymbols')
if assertions_enabled:
gen.add_define('AMBROLIB_ASSERTIONS')
if event_loop_benchmark_enabled:
gen.add_define('EVENTLOOP_BENCHMARK')
if detect_overload_enabled:
gen.add_define('AXISDRIVER_DETECT_OVERLOAD')
if development.get_bool('EnableBulkOutputTest'):
gen.add_aprinter_include('printer/modules/BulkOutputTestModule.h')
bulk_output_test_module = gen.add_module()
bulk_output_test_module.set_expr('BulkOutputTestModuleService')
if development.get_bool('EnableBasicTestModule'):
gen.add_aprinter_include('printer/modules/BasicTestModule.h')
basic_test_module = gen.add_module()
basic_test_module.set_expr('BasicTestModuleService')
elif detect_overload_enabled:
development.key_path('DetectOverloadEnabled').error('BasicTestModule is required for overload detection.')
if development.get_bool('EnableStubCommandModule'):
gen.add_aprinter_include('printer/modules/StubCommandModule.h')
stub_command_module = gen.add_module()
stub_command_module.set_expr('StubCommandModuleService')
for serial in board_data.iter_list_config('serial_ports', max_count=5):
gen.add_aprinter_include('printer/modules/SerialModule.h')
gen.add_aprinter_include('printer/utils/GcodeParser.h')
serial_module = gen.add_module()
serial_user = 'MyPrinter::GetModule<{}>::GetSerial'.format(serial_module.index)
serial_module.set_expr(TemplateExpr('SerialModuleService', [
'UINT32_C({})'.format(serial.get_int_constant('BaudRate')),
serial.get_int_constant('RecvBufferSizeExp'),
serial.get_int_constant('SendBufferSizeExp'),
TemplateExpr('SerialGcodeParserService', [
serial.get_int_constant('GcodeMaxParts'),
]),
use_serial(gen, serial, 'Service', serial_user),
]))
sdcard_sel = selection.Selection()
@sdcard_sel.option('NoSdCard')
def option(sdcard):
pass
@sdcard_sel.option('SdCard')
def option(sdcard):
gen.add_aprinter_include('printer/modules/SdCardModule.h')
sdcard_module = gen.add_module()
sdcard_user = 'MyPrinter::GetModule<{}>::GetInput::GetSdCard'.format(sdcard_module.index)
gcode_parser_sel = selection.Selection()
@gcode_parser_sel.option('TextGcodeParser')
def option(parser):
gen.add_aprinter_include('printer/utils/GcodeParser.h')
return TemplateExpr('FileGcodeParserService', [
parser.get_int('MaxParts'),
])
@gcode_parser_sel.option('BinaryGcodeParser')
def option(parser):
gen.add_aprinter_include('printer/utils/BinaryGcodeParser.h')
return TemplateExpr('BinaryGcodeParserService', [
parser.get_int('MaxParts'),
])
fs_sel = selection.Selection()
@fs_sel.option('Raw')
def option(fs_config):
gen.add_aprinter_include('printer/input/SdRawInput.h')
return TemplateExpr('SdRawInputService', [
use_sdcard(gen, sdcard, 'SdCardService', sdcard_user),
])
@fs_sel.option('Fat32')
def option(fs_config):
max_filename_size = fs_config.get_int('MaxFileNameSize')
if not (12 <= max_filename_size <= 1024):
fs_config.key_path('MaxFileNameSize').error('Bad value.')
num_cache_entries = fs_config.get_int('NumCacheEntries')
if not (1 <= num_cache_entries <= 64):
fs_config.key_path('NumCacheEntries').error('Bad value.')
max_io_blocks = fs_config.get_int('MaxIoBlocks')
if not (1 <= max_io_blocks <= num_cache_entries):
fs_config.key_path('MaxIoBlocks').error('Bad value.')
gen.add_aprinter_include('printer/input/SdFatInput.h')
gen.add_aprinter_include('fs/FatFs.h')
if fs_config.get_bool('EnableFsTest'):
gen.add_aprinter_include('printer/modules/FsTestModule.h')
fs_test_module = gen.add_module()
fs_test_module.set_expr('FsTestModuleService')
gcode_upload_sel = selection.Selection()
@gcode_upload_sel.option('NoGcodeUpload')
def option(gcode_upload_config):
pass
@gcode_upload_sel.option('GcodeUpload')
def option(gcode_upload_config):
gen.add_aprinter_include('printer/modules/GcodeUploadModule.h')
max_command_size = gcode_upload_config.get_int('MaxCommandSize')
if not (32 <= max_command_size <= 1024):
gcode_upload_config.key_path('MaxCommandSize').error('Bad value.')
gcode_upload_module = gen.add_module()
gcode_upload_module.set_expr(TemplateExpr('GcodeUploadModuleService', [
max_command_size,
]))
fs_config.do_selection('GcodeUpload', gcode_upload_sel)
return TemplateExpr('SdFatInputService', [
use_sdcard(gen, sdcard, 'SdCardService', sdcard_user),
TemplateExpr('FatFsService', [
max_filename_size,
num_cache_entries,
1, # NumIoUnits
max_io_blocks,
fs_config.get_bool_constant('CaseInsensFileName'),
fs_config.get_bool_constant('FsWritable'),
fs_config.get_bool_constant('EnableReadHinting'),
]),
fs_config.get_bool_constant('HaveAccessInterface'),
])
sdcard_module.set_expr(TemplateExpr('SdCardModuleService', [
sdcard.do_selection('FsType', fs_sel),
sdcard.do_selection('GcodeParser', gcode_parser_sel),
sdcard.get_int('BufferBaseSize'),
sdcard.get_int('MaxCommandSize'),
]))
board_data.get_config('sdcard_config').do_selection('sdcard', sdcard_sel)
config_manager_expr = use_config_manager(gen, board_data.get_config('runtime_config'), 'config_manager', 'MyPrinter::GetConfigManager')
have_network = setup_network(gen, board_data.get_config('network_config'), 'network', assertions_enabled)
if have_network:
network = gen.get_singleton_object('network')
for network_config in board_data.get_config('network_config').enter_config('network'):
gen.add_aprinter_include('printer/modules/NetworkSupportModule.h')
network_support_module = gen.add_module()
network_support_module.set_expr(TemplateExpr('NetworkSupportModuleService', [
gen.add_bool_config('NetworkEnabled', network_config.get_bool('NetEnabled')),
gen.add_mac_addr_config('NetworkMacAddress', network_config.get_mac_addr('MacAddress')),
gen.add_bool_config('NetworkDhcpEnabled', network_config.get_bool('DhcpEnabled')),
gen.add_ip_addr_config('NetworkIpAddress', network_config.get_ip_addr('IpAddress')),
gen.add_ip_addr_config('NetworkIpNetmask', network_config.get_ip_addr('IpNetmask')),
gen.add_ip_addr_config('NetworkIpGateway', network_config.get_ip_addr('IpGateway')),
]))
tcpconsole_sel = selection.Selection()
@tcpconsole_sel.option('NoTcpConsole')
def option(tcpconsole_config):
pass
@tcpconsole_sel.option('TcpConsole')
def option(tcpconsole_config):
console_port = tcpconsole_config.get_int('Port')
if not (1 <= console_port <= 65534):
tcpconsole_config.key_path('Port').error('Bad value.')
console_max_clients = tcpconsole_config.get_int('MaxClients')
if not (1 <= console_max_clients <= 32):
tcpconsole_config.key_path('MaxClients').error('Bad value.')
console_max_pcbs = tcpconsole_config.get_int('MaxPcbs')
if not (console_max_clients <= console_max_pcbs):
tcpconsole_config.key_path('MaxPcbs').error('Bad value.')
console_max_parts = tcpconsole_config.get_int('MaxParts')
if not (1 <= console_max_parts <= 64):
tcpconsole_config.key_path('MaxParts').error('Bad value.')
console_max_command_size = tcpconsole_config.get_int('MaxCommandSize')
if not (1 <= console_max_command_size <= 512):
tcpconsole_config.key_path('MaxCommandSize').error('Bad value.')
console_send_buf_size = tcpconsole_config.get_int('SendBufferSize')
if console_send_buf_size < network.min_send_buf:
tcpconsole_config.key_path('SendBufferSize').error('Bad value.')
console_recv_buf_size = tcpconsole_config.get_int('RecvBufferSize')
if console_recv_buf_size < network.min_recv_buf:
tcpconsole_config.key_path('RecvBufferSize').error('Bad value.')
gen.add_aprinter_include('printer/modules/TcpConsoleModule.h')
gen.add_aprinter_include('printer/utils/GcodeParser.h')
tcp_console_module = gen.add_module()
tcp_console_module.set_expr(TemplateExpr('TcpConsoleModuleService', [
TemplateExpr('SerialGcodeParserService', [
console_max_parts,
]),
console_port,
console_max_clients,
console_max_pcbs,
console_max_command_size,
console_send_buf_size,
console_recv_buf_size,
gen.add_float_constant('TcpConsoleSendBufTimeout', tcpconsole_config.get_float('SendBufTimeout')),
gen.add_float_constant('TcpConsoleSendEndTimeout', tcpconsole_config.get_float('SendEndTimeout')),
]))
network.add_resource_counts(connections=console_max_clients)
network_config.do_selection('tcpconsole', tcpconsole_sel)
webif_sel = selection.Selection()
@webif_sel.option('NoWebInterface')
def option(webif_config):
pass
@webif_sel.option('WebInterface')
def option(webif_config):
webif_port = webif_config.get_int('Port')
if not (1 <= webif_port <= 65534):
webif_config.key_path('Port').error('Bad value.')
webif_max_clients = webif_config.get_int('MaxClients')
if not (1 <= webif_max_clients <= 128):
webif_config.key_path('MaxClients').error('Bad value.')
webif_queue_size = webif_config.get_int('QueueSize')
if not (0 <= webif_queue_size <= 512):
webif_config.key_path('QueueSize').error('Bad value.')
webif_queue_recv_buffer_size = webif_config.get_int('QueueRecvBufferSize')
if not (0 < webif_queue_recv_buffer_size):
webif_config.key_path('QueueRecvBufferSize').error('Bad value.')
webif_max_pcbs = webif_config.get_int('MaxPcbs')
if not (webif_max_clients+webif_queue_size <= webif_max_pcbs):
webif_config.key_path('MaxPcbs').error('Bad value.')
webif_send_buf_size = webif_config.get_int('SendBufferSize')
if webif_send_buf_size < network.min_send_buf:
webif_config.key_path('SendBufferSize').error('Bad value.')
webif_recv_buf_size = webif_config.get_int('RecvBufferSize')
if webif_recv_buf_size < network.min_recv_buf:
webif_config.key_path('RecvBufferSize').error('Bad value (too small).')
if webif_recv_buf_size < webif_queue_recv_buffer_size:
webif_config.key_path('RecvBufferSize').error('Bad value (less than QueueRecvBufferSize).')
allow_persistent = webif_config.get_bool('AllowPersistent')
gen.add_float_constant('WebInterfaceQueueTimeout', webif_config.get_float('QueueTimeout'))
gen.add_float_constant('WebInterfaceInactivityTimeout', webif_config.get_float('InactivityTimeout'))
if webif_config.get_bool('EnableDebug'):
gen.add_define('APRINTER_DEBUG_HTTP_SERVER', 1)
gen.add_aprinter_include('printer/modules/WebInterfaceModule.h')
gen.add_aprinter_include('printer/utils/GcodeParser.h')
# Add modules with request handlers before the WebInterfaceModule
# so we have a clean deinit path - first active requests in a module
# are deinited, then the associated module is deinited.
if True:
gen.add_aprinter_include('printer/modules/WebApiFilesModule.h')
config_web_api_module = gen.add_module()
config_web_api_module.set_expr('WebApiFilesModuleService')
if config_manager_expr != 'ConstantConfigManagerService':
gen.add_aprinter_include('printer/modules/WebApiConfigModule.h')
config_web_api_module = gen.add_module()
config_web_api_module.set_expr('WebApiConfigModuleService')
webif_module = gen.add_module()
webif_module.set_expr(TemplateExpr('WebInterfaceModuleService', [
TemplateExpr('HttpServerNetParams', [
webif_port,
webif_max_clients,
webif_queue_size,
webif_queue_recv_buffer_size,
webif_max_pcbs,
webif_send_buf_size,
webif_recv_buf_size,
allow_persistent,
'WebInterfaceQueueTimeout',
'WebInterfaceInactivityTimeout',
]),
webif_config.get_int('JsonBufferSize'),
webif_config.get_int('NumGcodeSlots'),
TemplateExpr('SerialGcodeParserService', [
webif_config.get_int('MaxGcodeParts'),
]),
webif_config.get_int('MaxGcodeCommandSize'),
gen.add_float_constant('WebInterfaceGcodeSendBufTimeout', webif_config.get_float('GcodeSendBufTimeout')),
]))
network.add_resource_counts(connections=webif_max_clients+webif_queue_size)
network_config.do_selection('webinterface', webif_sel)
for development in board_data.enter_config('development'):
networktest_sel = selection.Selection()
@networktest_sel.option('Disabled')
def option(networksel_config):
pass
@networktest_sel.option('Enabled')
def option(networksel_config):
gen.add_aprinter_include('printer/modules/NetworkTestModule.h')
network_test_module = gen.add_module()
network_test_module.set_expr(TemplateExpr('NetworkTestModuleService', [
networksel_config.get_int('BufferSize'),
]))
network.add_resource_counts(connections=1)
development.do_selection('NetworkTestModule', networktest_sel)
current_config = board_data.get_config('current_config')
gen.add_aprinter_include('printer/PrinterMain.h')
for advanced in config.enter_config('advanced'):
gen.add_float_constant('LedBlinkInterval', advanced.get_float('LedBlinkInterval'))
gen.add_float_config('ForceTimeout', advanced.get_float('ForceTimeout'))
current_control_channel_list = []
microstep_axis_list = []
def stepper_cb(stepper, stepper_index):
name = stepper.get_id_char('Name')
homing_sel = selection.Selection()
@homing_sel.option('no_homing')
def option(homing):
return 'PrinterMainNoHomingParams'
@homing_sel.option('homing')
def option(homing):
gen.add_aprinter_include('printer/utils/AxisHomer.h')
return TemplateExpr('PrinterMainHomingParams', [
gen.add_bool_config('{}HomeDir'.format(name), homing.get_bool('HomeDir')),
gen.add_float_config('{}HomeOffset'.format(name), homing.get_float('HomeOffset')),
TemplateExpr('AxisHomerService', [
use_digital_input(gen, homing, 'HomeEndstopInput'),
gen.add_bool_config('{}HomeEndInvert'.format(name), homing.get_bool('HomeEndInvert')),
gen.add_float_config('{}HomeFastMaxDist'.format(name), homing.get_float('HomeFastMaxDist')),
gen.add_float_config('{}HomeRetractDist'.format(name), homing.get_float('HomeRetractDist')),
gen.add_float_config('{}HomeSlowMaxDist'.format(name), homing.get_float('HomeSlowMaxDist')),
gen.add_float_config('{}HomeFastSpeed'.format(name), homing.get_float('HomeFastSpeed')),
gen.add_float_config('{}HomeRetractSpeed'.format(name), homing.get_float('HomeRetractSpeed')),
gen.add_float_config('{}HomeSlowSpeed'.format(name), homing.get_float('HomeSlowSpeed')),
])
])
gen.add_aprinter_include('printer/actuators/AxisDriver.h')
stepper_ports_for_axis = []
def slave_steppers_cb(slave_stepper, slave_stepper_index):
slave_stepper_port = gen.get_object('stepper_port', slave_stepper, 'stepper_port')
stepper_ports_for_axis.append(slave_stepper_port)
stepper_config_prefix = name if slave_stepper_index == 0 else '{}S{}'.format(name, slave_stepper_index)
stepper_current_sel = selection.Selection()
@stepper_current_sel.option('NoCurrent')
def option(stepper_current_config):
pass
@stepper_current_sel.option('Current')
def option(stepper_current_config):
stepper_current_expr = TemplateExpr('MotorCurrentAxisParams', [
TemplateChar(name), # M906 will only work on all slave steppers of an axis...
gen.add_float_config('{}Current'.format(stepper_config_prefix), slave_stepper.get_float('Current')),
use_current_driver_channel(gen, stepper_current_config, 'DriverChannelParams', stepper_config_prefix),
])
current_control_channel_list.append(stepper_current_expr)
slave_stepper_port.do_selection('current', stepper_current_sel)
microstep_sel = selection.Selection()
@microstep_sel.option('NoMicroStep')
def option(microstep_config):
pass
@microstep_sel.option('MicroStep')
def option(microstep_config):
microstep_expr = TemplateExpr('MicroStepAxisParams', [
use_microstep(gen, microstep_config, 'MicroStepDriver'),
slave_stepper.get_int('MicroSteps'),
])
microstep_axis_list.append(microstep_expr)
slave_stepper_port.do_selection('microstep', microstep_sel)
return TemplateExpr('PrinterMainSlaveStepperParams', [
TemplateExpr('StepperDef', [
get_pin(gen, slave_stepper_port, 'DirPin'),
get_pin(gen, slave_stepper_port, 'StepPin'),
get_pin(gen, slave_stepper_port, 'EnablePin'),
slave_stepper_port.get_bool('StepLevel'),
slave_stepper_port.get_bool('EnableLevel'),
gen.add_bool_config('{}InvertDir'.format(stepper_config_prefix), slave_stepper.get_bool('InvertDir')),
]),
])
slave_steppers_expr = stepper.do_list('slave_steppers', slave_steppers_cb, min_count=1, max_count=10)
delay_sel = selection.Selection()
@delay_sel.option('NoDelay')
def option(delay_config):
return 'AxisDriverNoDelayParams'
@delay_sel.option('Delay')
def option(delay_config):
return TemplateExpr('AxisDriverDelayParams', [
gen.add_float_constant('{}DirSetTime'.format(name), delay_config.get_float('DirSetTime')),
gen.add_float_constant('{}StepHighTime'.format(name), delay_config.get_float('StepHighTime')),
gen.add_float_constant('{}StepLowTime'.format(name), delay_config.get_float('StepLowTime')),
])
first_stepper_port = stepper_ports_for_axis[0]
if first_stepper_port.get_config('StepperTimer').get_string('_compoundName') != 'interrupt_timer':
first_stepper_port.key_path('StepperTimer').error('Stepper port of first stepper in axis must have a timer unit defined.')
return TemplateExpr('PrinterMainAxisParams', [
TemplateChar(name),
gen.add_float_config('{}StepsPerUnit'.format(name), stepper.get_float('StepsPerUnit')),
gen.add_float_config('{}MinPos'.format(name), stepper.get_float('MinPos')),
gen.add_float_config('{}MaxPos'.format(name), stepper.get_float('MaxPos')),
gen.add_float_config('{}MaxSpeed'.format(name), stepper.get_float('MaxSpeed')),
gen.add_float_config('{}MaxAccel'.format(name), stepper.get_float('MaxAccel')),
gen.add_float_config('{}DistanceFactor'.format(name), stepper.get_float('DistanceFactor')),
gen.add_float_config('{}CorneringDistance'.format(name), stepper.get_float('CorneringDistance')),
stepper.do_selection('homing', homing_sel),
stepper.get_bool('EnableCartesianSpeedLimit'),
stepper.get_bool('IsExtruder'),
32,
TemplateExpr('AxisDriverService', [
use_interrupt_timer(gen, first_stepper_port, 'StepperTimer', user='MyPrinter::GetAxisTimer<{}>'.format(stepper_index)),
'TheAxisDriverPrecisionParams',
stepper.get_bool('PreloadCommands'),
stepper.do_selection('delay', delay_sel),
]),
slave_steppers_expr,
])
steppers_expr = config.do_list('steppers', stepper_cb, min_count=1, max_count=15)
heater_types = ['Extruder', 'Bed', 'Chamber']
heater_numbers = dict((heater_type, set()) for heater_type in heater_types)
def heater_cb(heater, heater_index):
heater_type_sel = selection.Selection()
@heater_type_sel.options(heater_types)
def handle_heater_type(heater_type, heater_type_config):
heater_number = get_heater_fan_number(heater_type_config, 'Number', heater_numbers[heater_type])
return heater_type, heater_number
heater_type, heater_number = heater.do_selection('Type', heater_type_sel)
heater_cfg_prefix = '{}Heater{}'.format(heater_type, heater_number)
conversion_sel = selection.Selection()
@conversion_sel.option('conversion')
def option(conversion_config):
gen.add_aprinter_include('printer/thermistor/GenericThermistor.h')
return TemplateExpr('GenericThermistorService', [
gen.add_float_config('{}TempResistorR'.format(heater_cfg_prefix), conversion_config.get_float('ResistorR')),
gen.add_float_config('{}TempR0'.format(heater_cfg_prefix), conversion_config.get_float('R0')),
gen.add_float_config('{}TempBeta'.format(heater_cfg_prefix), conversion_config.get_float('Beta')),
gen.add_float_config('{}TempMinTemp'.format(heater_cfg_prefix), conversion_config.get_float('MinTemp')),
gen.add_float_config('{}TempMaxTemp'.format(heater_cfg_prefix), conversion_config.get_float('MaxTemp')),
])
@conversion_sel.option('PtRtdFormula')
def option(conversion_config):
gen.add_aprinter_include('printer/thermistor/PtRtdFormula.h')
return TemplateExpr('PtRtdFormulaService', [
gen.add_float_config('{}TempResistorR'.format(heater_cfg_prefix), conversion_config.get_float('ResistorR')),
gen.add_float_config('{}TempPtR0'.format(heater_cfg_prefix), conversion_config.get_float('PtR0')),
gen.add_float_config('{}TempPtA'.format(heater_cfg_prefix), conversion_config.get_float('PtA')),
gen.add_float_config('{}TempPtB'.format(heater_cfg_prefix), conversion_config.get_float('PtB')),
gen.add_float_config('{}TempMinTemp'.format(heater_cfg_prefix), conversion_config.get_float('MinTemp')),
gen.add_float_config('{}TempMaxTemp'.format(heater_cfg_prefix), conversion_config.get_float('MaxTemp')),
])
@conversion_sel.option('Max31855Formula')
def option(conversion_config):
gen.add_aprinter_include('printer/thermistor/Max31855Formula.h')
return 'Max31855FormulaService'
@conversion_sel.option('E3dPt100')
def option(conversion_config):
gen.add_aprinter_include('printer/thermistor/InterpolationTableThermistor_tables.h')
return TemplateExpr('InterpolationTableThermistorService', ['InterpolationTableE3dPt100'])
conversion = heater.do_selection('conversion', conversion_sel)
for control in heater.enter_config('control'):
gen.add_aprinter_include('printer/temp_control/PidControl.h')
control_interval = control.get_float('ControlInterval')
control_service = TemplateExpr('PidControlService', [
gen.add_float_config('{}PidP'.format(heater_cfg_prefix), control.get_float('PidP')),
gen.add_float_config('{}PidI'.format(heater_cfg_prefix), control.get_float('PidI')),
gen.add_float_config('{}PidD'.format(heater_cfg_prefix), control.get_float('PidD')),
gen.add_float_config('{}PidIStateMin'.format(heater_cfg_prefix), control.get_float('PidIStateMin')),
gen.add_float_config('{}PidIStateMax'.format(heater_cfg_prefix), control.get_float('PidIStateMax')),
gen.add_float_config('{}PidDHistory'.format(heater_cfg_prefix), control.get_float('PidDHistory')),
])
for observer in heater.enter_config('observer'):
gen.add_aprinter_include('printer/utils/TemperatureObserver.h')
observer_service = TemplateExpr('TemperatureObserverService', [
gen.add_float_config('{}ObserverInterval'.format(heater_cfg_prefix), observer.get_float('ObserverInterval')),
gen.add_float_config('{}ObserverTolerance'.format(heater_cfg_prefix), observer.get_float('ObserverTolerance')),
gen.add_float_config('{}ObserverMinTime'.format(heater_cfg_prefix), observer.get_float('ObserverMinTime')),
])
cold_extrusion_sel = selection.Selection()
@cold_extrusion_sel.option('NoColdExtrusionPrevention')
def option(cold_extrusion_config):
return 'AuxControlNoColdExtrusionParams'
@cold_extrusion_sel.option('ColdExtrusionPrevention')
def option(cold_extrusion_config):
extruders_exprs = []
for axis_name in cold_extrusion_config.get_list(config_reader.ConfigTypeString(), 'ExtruderAxes', max_count=20):
extruders_exprs.append(TemplateExpr('WrapInt', [TemplateChar(axis_name)]))
return TemplateExpr('AuxControlColdExtrusionParams', [
gen.add_float_config('{}MinExtrusionTemp'.format(heater_cfg_prefix), cold_extrusion_config.get_float('MinExtrusionTemp')),
TemplateList(extruders_exprs),
])
cold_extrusion = heater.do_selection('cold_extrusion_prevention', cold_extrusion_sel)
return TemplateExpr('AuxControlModuleHeaterParams', [
'HeaterType::{}'.format(heater_type),
heater_number,
use_analog_input(gen, heater, 'ThermistorInput', '{}::GetHeaterAnalogInput<{}>'.format(aux_control_module_user, heater_index)),
conversion,
gen.add_float_config('{}MinSafeTemp'.format(heater_cfg_prefix), heater.get_float('MinSafeTemp')),
gen.add_float_config('{}MaxSafeTemp'.format(heater_cfg_prefix), heater.get_float('MaxSafeTemp')),
gen.add_float_config('{}ControlInterval'.format(heater_cfg_prefix), control_interval),
control_service,
observer_service,
use_pwm_output(gen, heater, 'pwm_output', '{}::GetHeaterPwm<{}>'.format(aux_control_module_user, heater_index), heater_cfg_prefix),
cold_extrusion,
])
heaters_expr = config.do_list('heaters', heater_cb, max_count=15)
transform_sel = selection.Selection()
transform_axes = []
@transform_sel.option('NoTransform')
def option(transform):
return 'PrinterMainNoTransformParams'
@transform_sel.default()
def default(transform_type, transform):
virt_homing_axes = []
def virtual_axis_cb(virtual_axis, virtual_axis_index):
name = virtual_axis.get_id_char('Name')
transform_axes.append(name)
homing_sel = selection.Selection()
@homing_sel.option('no_homing')
def option(homing):
pass
@homing_sel.option('homing')
def option(homing):
virt_homing_axes.append(TemplateExpr('VirtualHomingModuleAxisParams', [
TemplateChar(name),
gen.add_bool_config('{}HomeByDefault'.format(name), homing.get_bool('ByDefault')),
use_digital_input(gen, homing, 'HomeEndstopInput'),
gen.add_bool_config('{}HomeEndInvert'.format(name), homing.get_bool('HomeEndInvert')),
gen.add_bool_config('{}HomeDir'.format(name), homing.get_bool('HomeDir')),
gen.add_float_config('{}HomeFastExtraDist'.format(name), homing.get_float('HomeFastExtraDist')),
gen.add_float_config('{}HomeRetractDist'.format(name), homing.get_float('HomeRetractDist')),
gen.add_float_config('{}HomeSlowExtraDist'.format(name), homing.get_float('HomeSlowExtraDist')),
gen.add_float_config('{}HomeFastSpeed'.format(name), homing.get_float('HomeFastSpeed')),
gen.add_float_config('{}HomeRetractSpeed'.format(name), homing.get_float('HomeRetractSpeed')),
gen.add_float_config('{}HomeSlowSpeed'.format(name), homing.get_float('HomeSlowSpeed')),
]))
virtual_axis.do_selection('homing', homing_sel)
return TemplateExpr('PrinterMainVirtualAxisParams', [
TemplateChar(name),
gen.add_float_config('{}MinPos'.format(name), virtual_axis.get_float('MinPos')),
gen.add_float_config('{}MaxPos'.format(name), virtual_axis.get_float('MaxPos')),
gen.add_float_config('{}MaxSpeed'.format(name), virtual_axis.get_float('MaxSpeed')),
])
def transform_stepper_cb(transform_stepper, transform_stepper_index):
stepper_name = transform_stepper.get_id_char('StepperName')
try:
stepper_generator = config.enter_elem_by_id('steppers', 'Name', stepper_name)
except config_reader.ConfigError:
transform_stepper.path().error('Unknown stepper \'{}\' referenced.'.format(stepper_name))
for stepper in stepper_generator:
if stepper.get_bool('EnableCartesianSpeedLimit'):
stepper.key_path('EnableCartesianSpeedLimit').error('Stepper involved coordinate transform may not be cartesian.')
return TemplateExpr('WrapInt', [TemplateChar(stepper_name)])
transform_type_sel = selection.Selection()
@transform_type_sel.option('Null')
def option():
gen.add_aprinter_include('printer/transform/IdentityTransform.h')
return TemplateExpr('IdentityTransformService', [0]), 'Transform'
@transform_type_sel.option('CoreXY')
def option():
gen.add_aprinter_include('printer/transform/CoreXyTransform.h')
return 'CoreXyTransformService', 'Transform'
@transform_type_sel.option('Delta')
def option():
gen.add_aprinter_include('printer/transform/DeltaTransform.h')
return TemplateExpr('DeltaTransformService', [
gen.add_float_config('DeltaDiagonalRod', transform.get_float('DiagnalRod')),
gen.add_float_config('DeltaDiagonalRodCorr1', transform.get_float('DiagonalRodCorr1')),
gen.add_float_config('DeltaDiagonalRodCorr2', transform.get_float('DiagonalRodCorr2')),
gen.add_float_config('DeltaDiagonalRodCorr3', transform.get_float('DiagonalRodCorr3')),
gen.add_float_config('DeltaSmoothRodOffset', transform.get_float('SmoothRodOffset')),
gen.add_float_config('DeltaEffectorOffset', transform.get_float('EffectorOffset')),
gen.add_float_config('DeltaCarriageOffset', transform.get_float('CarriageOffset')),
gen.add_float_config('DeltaLimitRadius', transform.get_float('LimitRadius')),
]), 'Delta'
@transform_type_sel.option('RotationalDelta')
def option():
gen.add_aprinter_include('printer/transform/RotationalDeltaTransform.h')
return TemplateExpr('RotationalDeltaTransformService', [
gen.add_float_config('DeltaEndEffectorLength', transform.get_float('EndEffectorLength')),
gen.add_float_config('DeltaBaseLength', transform.get_float('BaseLength')),
gen.add_float_config('DeltaRodLength', transform.get_float('RodLength')),
gen.add_float_config('DeltaArmLength', transform.get_float('ArmLength')),
gen.add_float_config('DeltaZOffset', transform.get_float('ZOffset')),
]), 'Delta'
@transform_type_sel.option('SCARA')
def option():
gen.add_aprinter_include('printer/transform/SCARATransform.h')
return TemplateExpr('SCARATransformService', [
gen.add_float_config('SCARAArm1Length', transform.get_float('Arm1Length')),
gen.add_float_config('SCARAArm2Length', transform.get_float('Arm2Length')),
gen.add_bool_config('SCARAExternalArm2Motor', transform.get_bool('ExternalArm2Motor')),
gen.add_float_config('SCARAXOffset', transform.get_float('XOffset')),
gen.add_float_config('SCARAYOffset', transform.get_float('YOffset')),
]), 'SCARA'
@transform_type_sel.option('DualSCARA')
def option():
gen.add_aprinter_include('printer/transform/DualSCARATransform.h')
return TemplateExpr('DualSCARATransformService', [
gen.add_float_config('SCARAArm1ShoulderXCoord', transform.get_float('Arm1ShoulderXCoord')),
gen.add_float_config('SCARAArm2ShoulderXCoord', transform.get_float('Arm2ShoulderXCoord')),
gen.add_float_config('SCARAArm1ProximalSideLength', transform.get_float('Arm1ProximalSideLength')),
gen.add_float_config('SCARAArm2ProximalSideLength', transform.get_float('Arm2ProximalSideLength')),
gen.add_float_config('SCARAArm1DistalSideLength', transform.get_float('Arm1DistalSideLength')),
gen.add_float_config('SCARAArm2DistalSideLength', transform.get_float('Arm2DistalSideLength')),
gen.add_float_config('SCARAXOffset', transform.get_float('XOffset')),
gen.add_float_config('SCARAYOffset', transform.get_float('YOffset')),
]), 'SCARA'
transform_type_expr, transform_prefix = transform_type_sel.run(transform_type)
splitter_sel = selection.Selection()
@splitter_sel.option('NoSplitter')
def option(splitter):
gen.add_aprinter_include('printer/transform/NoSplitter.h')
return 'NoSplitterService'
@splitter_sel.option('DistanceSplitter')
def option(splitter):
gen.add_aprinter_include('printer/transform/DistanceSplitter.h')
return TemplateExpr('DistanceSplitterService', [
gen.add_float_config('{}MinSplitLength'.format(transform_prefix), splitter.get_float('MinSplitLength')),
gen.add_float_config('{}MaxSplitLength'.format(transform_prefix), splitter.get_float('MaxSplitLength')),
gen.add_float_config('{}SegmentsPerSecond'.format(transform_prefix), splitter.get_float('SegmentsPerSecond')),
])
splitter_expr = transform.do_selection('Splitter', splitter_sel)
max_dimensions = 10
dimension_count = transform.get_int('DimensionCount')
if not 0 <= dimension_count <= max_dimensions:
transform.path().error('Incorrect DimensionCount.')
virtual_axes = transform.do_keyed_list(dimension_count, 'CartesianAxes', 'VirtualAxis', virtual_axis_cb)
transform_steppers = transform.do_keyed_list(dimension_count, 'Steppers', 'TransformStepper', transform_stepper_cb)
if transform.has('IdentityAxes'):
num_idaxes = 0
for idaxis in transform.iter_list_config('IdentityAxes', max_count=max_dimensions-dimension_count):
virt_axis_name = idaxis.get_id_char('Name')
stepper_name = idaxis.get_id_char('StepperName')
limits_sel = selection.Selection()
@limits_sel.option('LimitsAsStepper')
def option(limits_config):
return [
'{}MinPos'.format(stepper_name),
'{}MaxPos'.format(stepper_name),
]
@limits_sel.option('LimitsSpecified')
def option(limits_config):
return [
gen.add_float_config('{}MinPos'.format(virt_axis_name), limits_config.get_float('MinPos')),
gen.add_float_config('{}MaxPos'.format(virt_axis_name), limits_config.get_float('MaxPos')),
]
limits = idaxis.do_selection('Limits', limits_sel)
transform_axes.append(virt_axis_name)
num_idaxes += 1
virtual_axes.append_arg(TemplateExpr('PrinterMainVirtualAxisParams', [
TemplateChar(virt_axis_name),
limits[0],
limits[1],
gen.add_float_config('{}MaxSpeed'.format(virt_axis_name), float('inf'), is_constant=True),
]))
transform_steppers.append_arg(TemplateExpr('WrapInt', [TemplateChar(stepper_name)]))
if num_idaxes > 0:
gen.add_aprinter_include('printer/transform/IdentityTransform.h')
id_transform_type_expr = TemplateExpr('IdentityTransformService', [num_idaxes])
if dimension_count == 0:
transform_type_expr = id_transform_type_expr
else:
gen.add_aprinter_include('printer/transform/CombineTransform.h')
transform_type_expr = TemplateExpr('CombineTransformService', [TemplateList([transform_type_expr, id_transform_type_expr])])
dimension_count += num_idaxes
if dimension_count == 0:
transform.path().error('Need at least one dimension.')
if len(virt_homing_axes) > 0:
gen.add_aprinter_include('printer/modules/VirtualHomingModule.h')
virt_homing_module = gen.add_module()
virt_homing_module.set_expr(TemplateExpr('VirtualHomingModuleService', [
TemplateList(virt_homing_axes),
]))
return TemplateExpr('PrinterMainTransformParams', [
virtual_axes,
transform_steppers,
transform_type_expr,
splitter_expr,
])
transform_expr = config.do_selection('transform', transform_sel)
probe_sel = selection.Selection()
@probe_sel.option('NoProbe')
def option(probe):
return False
@probe_sel.option('Probe')
def option(probe):
gen.add_aprinter_include('printer/modules/BedProbeModule.h')
probe_module = gen.add_module()
gen.add_bool_config('ProbeInvert', probe.get_bool('InvertInput')),
gen.add_float_config('ProbeOffsetX', probe.get_float('OffsetX'))
gen.add_float_config('ProbeOffsetY', probe.get_float('OffsetY'))
gen.add_float_config('ProbeStartHeight', probe.get_float('StartHeight'))
gen.add_float_config('ProbeLowHeight', probe.get_float('LowHeight'))
gen.add_float_config('ProbeRetractDist', probe.get_float('RetractDist'))
gen.add_float_config('ProbeMoveSpeed', probe.get_float('MoveSpeed'))
gen.add_float_config('ProbeFastSpeed', probe.get_float('FastSpeed'))
gen.add_float_config('ProbeRetractSpeed', probe.get_float('RetractSpeed'))
gen.add_float_config('ProbeSlowSpeed', probe.get_float('SlowSpeed'))
gen.add_float_config('ProbeGeneralZOffset', probe.get_float('GeneralZOffset'))
num_points = 0
for (i, point) in enumerate(probe.iter_list_config('ProbePoints', min_count=1, max_count=20)):
num_points += 1
gen.add_bool_config('ProbeP{}Enabled'.format(i+1), point.get_bool('Enabled'))
gen.add_float_config('ProbeP{}X'.format(i+1), point.get_float('X'))
gen.add_float_config('ProbeP{}Y'.format(i+1), point.get_float('Y'))
gen.add_float_config('ProbeP{}ZOffset'.format(i+1), point.get_float('Z-offset'))
correction_sel = selection.Selection()
@correction_sel.option('NoCorrection')
def option(correction):
return 'BedProbeNoCorrectionParams'
@correction_sel.option('Correction')
def option(correction):
if 'Z' not in transform_axes:
correction.path().error('Bed correction is only supported when the Z axis is involved in the coordinate transformation.')
quadratic_supported = correction.get_bool('QuadraticCorrectionSupported')
quadratic_enabled = gen.add_bool_config('ProbeQuadrCorrEnabled', correction.get_bool('QuadraticCorrectionEnabled')) if quadratic_supported else 'void'
return TemplateExpr('BedProbeCorrectionParams', [quadratic_supported, quadratic_enabled])
correction_expr = probe.do_selection('correction', correction_sel)
probe_module.set_expr(TemplateExpr('BedProbeModuleService', [
'MakeTypeList<WrapInt<\'X\'>, WrapInt<\'Y\'>>',
'\'Z\'',
use_digital_input(gen, probe, 'ProbePin'),
'ProbeInvert',
'MakeTypeList<ProbeOffsetX, ProbeOffsetY>',
'ProbeStartHeight',
'ProbeLowHeight',
'ProbeRetractDist',
'ProbeMoveSpeed',
'ProbeFastSpeed',
'ProbeRetractSpeed',
'ProbeSlowSpeed',
'ProbeGeneralZOffset',
TemplateList(['BedProbePointParams<ProbeP{0}Enabled, MakeTypeList<ProbeP{0}X, ProbeP{0}Y>, ProbeP{0}ZOffset>'.format(i+1) for i in range(num_points)]),
correction_expr,
]))
return True
have_bed_probing = config.get_config('probe_config').do_selection('probe', probe_sel)
fan_numbers = set()
def fan_cb(fan, fan_index):
fan_number = get_heater_fan_number(fan, 'Number', fan_numbers)
return TemplateExpr('AuxControlModuleFanParams', [
fan_number,
use_pwm_output(gen, fan, 'pwm_output', '{}::GetFanPwm<{}>'.format(aux_control_module_user, fan_index), 'Fan{}'.format(fan_number))
])
fans_expr = config.do_list('fans', fan_cb, max_count=15)
def laser_cb(laser, laser_index):
gen.add_aprinter_include('printer/actuators/LaserDriver.h')
gen.add_aprinter_include('printer/duty_formula/LinearDutyFormula.h')
name = laser.get_id_char('Name')
laser_port = gen.get_object('laser_port', laser, 'laser_port')
return TemplateExpr('PrinterMainLaserParams', [
TemplateChar(name),
TemplateChar(laser.get_id_char('DensityName')),
gen.add_float_config('{}LaserPower'.format(name), laser.get_float('LaserPower')),
gen.add_float_config('{}MaxPower'.format(name), laser.get_float('MaxPower')),
use_pwm_output(gen, laser_port, 'pwm_output', '', '', hard=True),
TemplateExpr('LinearDutyFormulaService', [15]),
TemplateExpr('LaserDriverService', [
use_interrupt_timer(gen, laser_port, 'LaserTimer', user='MyPrinter::GetLaserDriver<{}>::TheTimer'.format(laser_index)),
gen.add_float_constant('{}AdjustmentInterval'.format(name), laser.get_float('AdjustmentInterval')),
'LaserDriverDefaultPrecisionParams',
]),
])
lasers_expr = config.do_list('lasers', laser_cb, max_count=15)
current_sel = selection.Selection()
@current_sel.option('NoCurrent')
def option(current):
pass
@current_sel.option('Current')
def option(current):
gen.add_aprinter_include('printer/modules/MotorCurrentModule.h')
current_module = gen.add_module()
current_module.set_expr(TemplateExpr('MotorCurrentModuleService', [
TemplateList(current_control_channel_list),
use_current_driver(gen, current, 'current_driver', 'MyPrinter::GetModule<{}>::GetDriver'.format(current_module.index))
]))
current_config.do_selection('current', current_sel)
if len(microstep_axis_list) > 0:
gen.add_aprinter_include('printer/modules/MicroStepConfigModule.h')
microstep_module = gen.add_module()
microstep_module.set_expr(TemplateExpr('MicroStepConfigModuleService', [
TemplateList(microstep_axis_list),
]))
gen.add_aprinter_include('printer/modules/AuxControlModule.h')
aux_control_module.set_expr(TemplateExpr('AuxControlModuleService', [
performance.get_int_constant('EventChannelBufferSize'),
event_channel_timer_expr,
gen.add_float_config('WaitTimeout', config.get_float('WaitTimeout')),
gen.add_float_config('WaitReportPeriod', config.get_float('WaitReportPeriod')),
heaters_expr,
fans_expr,
]))
moves_sel = selection.Selection()
@moves_sel.option('NoMoves')
def option(moves_config):
pass
@moves_sel.option('Moves')
def option(moves_config):
gen.add_aprinter_include('printer/modules/MoveToModule.h')
def move_cb(move_config, move_index):
move_config_prefix = 'Move{}'.format(move_index+1)
move_coords = set()
def coord_cb(coord_config, coord_index):
axis_name = coord_config.get_id_char('AxisName')
if axis_name in move_coords:
coord_config.key_path('AxisName').error('Duplicate axis in coordinate.')
move_coords.add(axis_name)
return TemplateExpr('MoveCoordSpec', [
TemplateChar(axis_name),
gen.add_float_config('{}{}'.format(move_config_prefix, axis_name), coord_config.get_float('Value')),
])
hook_type = move_config.do_enum('HookType', {
'After homing': 'ServiceList::AfterDefaultHomingHookService',
'After bed probing': 'ServiceList::AfterBedProbingHookService',
})
if hook_type == 'ServiceList::AfterBedProbingHookService' and not have_bed_probing:
move_config.key_path('HookType').error('Cannot use bed probing hook without bed probing configured.')
return TemplateExpr('MoveSpec', [
hook_type,
move_config.get_int('HookPriority'),
gen.add_bool_config('{}Enabled'.format(move_config_prefix), move_config.get_bool('Enabled')),
gen.add_float_config('{}Speed'.format(move_config_prefix), move_config.get_float('Speed')),
move_config.do_list('Coordinates', coord_cb, max_count=10),
])
moveto_module = gen.add_module()
moveto_module.set_expr(TemplateExpr('MoveToModuleService', [
moves_config.do_list('Moves', move_cb, max_count=10),
]))
config.do_selection('Moves', moves_sel)
if gen._need_millisecond_clock:
if not gen._have_hw_millisecond_clock:
gen.add_aprinter_include('system/MillisecondClock.h')
gen.add_global_resource(5, 'MyMillisecondClock', TemplateExpr('MillisecondClock', ['Context', 'Program']), context_name='MillisecondClock')
gen.add_aprinter_include('printer/modules/MillisecondClockInfoModule.h')
millisecond_clock_module = gen.add_module()
millisecond_clock_module.set_expr('MillisecondClockInfoModuleService')
printer_params = TemplateExpr('PrinterMainParams', [
led_pin_expr,
'LedBlinkInterval',
gen.add_float_config('InactiveTime', config.get_float('InactiveTime')),
performance.get_int_constant('ExpectedResponseLength'),
performance.get_int_constant('ExtraSendBufClearance'),
performance.get_int_constant('MaxMsgSize'),
gen.add_float_constant('SpeedLimitMultiply', 1.0 / 60.0),
gen.add_float_config('MaxStepsPerCycle', performance.get_float('MaxStepsPerCycle')),
performance.get_int_constant('StepperSegmentBufferSize'),
performance.get_int_constant('LookaheadBufferSize'),
performance.get_int_constant('LookaheadCommitCount'),
'ForceTimeout',
performance.get_identifier('FpType', lambda x: x in ('float', 'double')),
setup_watchdog(gen, platform, 'watchdog', 'MyPrinter::GetWatchdog'),
watchdog_debug_mode,
config_manager_expr,
'ConfigList',
steppers_expr,
transform_expr,
lasers_expr,
TemplateList(gen._modules_exprs),
])
printer_params_typedef = 'struct ThePrinterParams : public {} {{}};'.format(printer_params.build(0))
printer_expr = TemplateExpr('PrinterMainArg', ['Context', 'Program', 'ThePrinterParams'])
gen.add_global_resource(30, 'MyPrinter', printer_expr, use_instance=True, context_name='Printer', code_before=printer_params_typedef, is_fast_event_root=True)
gen.add_subst('EmergencyProvider', 'MyPrinter')
setup_event_loop(gen)
gen.finalize()
outputConfig = {
'desiredOutputs': output_types,
'optimizeForSize': optimize_for_size,
'toolchainOptSize': optimize_libc_for_size,
'buildWithClang': build_with_clang,
'verboseBuild': verbose_build,
'enableDebugSymbols': debug_symbols,
'defines': gen._defines,
'includeDirs': gen._include_dirs,
'extraSourceFiles': gen._extra_sources,
'mainSourceCode': rich_template.RichTemplate(main_template).substitute(gen.get_subst()),
}
outputConfig.update(platformInfo)
return outputConfig
def main():
# Parse arguments.
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--config', required=True,
help='JSON configuration file to use.')
parser.add_argument('--cfg-name',
help='Build this configuration instead of the one specified in the configuration file.')
args = parser.parse_args()
# Determine directories.
generator_src_dir = file_utils.file_dir(__file__)
aprinter_src_dir = os.path.join(generator_src_dir, '..', '..')
# Read the configuration.
with file_utils.use_input_file(args.config) as config_f:
config_data = json.load(config_f)
# Determine the configuration name (turn empty string to None).
cfg_name = args.cfg_name if args.cfg_name != "" else None
# Read main template file.
main_template = file_utils.read_file(os.path.join(generator_src_dir, 'main_template.cpp'))
# Call the generate function.
genCfg = generate(config_data, cfg_name, main_template)
# Print the generated configuration to stdout.
print(json.dumps(genCfg))
if __name__ == '__main__':
main()
|
tests/terraform/checks/resource/linode/test_user_email_set.py
|
jamesholland-uk/checkov
| 4,013 |
137454
|
import unittest
import hcl2
from checkov.terraform.checks.resource.linode.user_email_set import check
from checkov.common.models.enums import CheckResult
class Testuser_email_set(unittest.TestCase):
def test_success(self):
hcl_res = hcl2.loads("""
resource "linode_user" "test" {
email="<EMAIL>"
}
""")
resource_conf = hcl_res['resource'][0]['linode_user']['test']
scan_result = check.scan_resource_conf(conf=resource_conf)
self.assertEqual(CheckResult.PASSED, scan_result)
def test_failure(self):
hcl_res = hcl2.loads("""
resource "linode_user" "test" {
}
""")
resource_conf = hcl_res['resource'][0]['linode_user']['test']
scan_result = check.scan_resource_conf(conf=resource_conf)
self.assertEqual(CheckResult.FAILED, scan_result)
if __name__ == '__main__':
unittest.main()
|
pyprob/__init__.py
|
SRagy/pyprob
| 125 |
137458
|
<reponame>SRagy/pyprob
__version__ = '1.3.0.dev7'
from .util import TraceMode, PriorInflation, InferenceEngine, InferenceNetwork, Optimizer, LearningRateScheduler, ObserveEmbedding, set_verbosity, set_device, seed
from .state import sample, observe, factor, tag
from .address_dictionary import AddressDictionary
from .model import Model, RemoteModel
|
py/testdir_multi_jvm/notest_import_covtype_parse_4jvm_fvec.py
|
gigliovale/h2o
| 882 |
137464
|
<reponame>gigliovale/h2o
import unittest, sys, random, time
sys.path.extend(['.','..','../..','py'])
import h2o, h2o_browse as h2b, h2o_import as h2i
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
pass
print "Will build clouds with incrementing heap sizes and import folder/parse"
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_import_covtype_parse_4jvm_fvec(self):
csvFilename = "covtype.data"
importFolderPath = "standard"
trialMax = 2
for tryHeap in [1]:
print "\n", tryHeap,"GB heap, 4 jvms, import folder, then loop parsing 'covtype.data' to unique keys"
h2o.init(node_count=4,java_heap_GB=tryHeap)
for trial in range(trialMax):
# import each time, because h2o deletes source after parse
csvPathname = importFolderPath + "/" + csvFilename
hex_key = csvFilename + "_" + str(trial) + ".hex"
parseResult = h2i.import_parse(bucket='home-0xdiag-datasets', path=csvPathname, hex_key=hex_key, timeoutSecs=20)
# sticky ports?
h2o.tear_down_cloud()
time.sleep(5)
# print "Waiting 60 secs for TIME_WAIT sockets to go away"
# time.sleep(60)
time.sleep(2)
if __name__ == '__main__':
h2o.unit_main()
|
OpenAttack/victim/classifiers/__init__.py
|
e-tornike/OpenAttack
| 444 |
137507
|
from .base import Classifier
from .transformers import TransformersClassifier
|
cocos/numerics/_algorithm.py
|
michaelnowotny/cocos
| 101 |
137519
|
import arrayfire as af
import typing as tp
from ._array import ndarray, _wrap_af_array
def count_nonzero(a: ndarray,
axis: tp.Optional[int] = None) \
-> tp.Union[int, ndarray]:
return _wrap_af_array(af.count(a._af_array, dim=axis))
def diff(a: ndarray,
n: int = 1,
axis: int = -1) -> ndarray:
"""Calculate the n-th discrete difference along given axis."""
if axis == -1:
# use last axis
axis = a.ndim - 1
if 0 <= axis <= 3:
if axis >= a.ndim:
raise ValueError("axis exceeds array dimension")
if n >= a.shape[axis]:
raise ValueError(f"input array has length {a.shape[axis]} in "
f"dimension {axis} and therefore cannot be "
f"differentiated more than {a.shape[axis] - 1} "
f"times")
if n == 0:
return a.copy()
elif n == 1:
new_array = af.diff1(a._af_array, dim=axis)
elif n == 2:
new_array = af.diff2(a._af_array, dim=axis)
elif n > 2:
output = a
while n >= 2:
n -= 2
output = ndarray(af.diff2(output._af_array, dim=axis))
if n == 1:
output = ndarray(af.diff1(output._af_array, dim=axis))
return output
else:
raise ValueError(f"n must be positive but is {n}")
else:
raise ValueError("Axis must be between 0 and 3")
return ndarray(new_array)
def flatnonzero(a: ndarray) -> ndarray:
return ndarray(af.where(a._af_array))
def sort_by_keys(keys: ndarray,
values: ndarray,
axis: int = -1,
ascending: bool = True) -> tp.Tuple[ndarray, ndarray]:
if keys.shape != values.shape:
raise ValueError("Keys and values must have the same dimensions.")
elif axis is None:
keys = keys.flatten()
values = values.flatten()
elif axis == -1:
axis = keys.ndim - 1
elif axis >= keys.ndim:
raise ValueError(f"Parameter axis must be between -1 and "
f"{keys.ndim - 1}")
ordered_values, ordered_keys \
= af.sort_by_key(values._af_array,
keys._af_array,
is_ascending=ascending)
return ndarray(ordered_keys), ndarray(ordered_values)
def unique(ar: ndarray,
return_index: bool = False,
return_inverse: bool = False,
return_counts: bool = False) -> ndarray:
if return_index:
raise ValueError("return_index=True is not supported")
if return_inverse:
raise ValueError("return_inverse=True is not supported")
if return_counts:
raise ValueError("return_counts=True is not supported")
unsorted_unique_set_af_array = af.set_unique(ar._af_array,
is_sorted=False)
sorted_unique_set_af_array = af.sort(unsorted_unique_set_af_array,
dim=0,
is_ascending=True)
return ndarray(sorted_unique_set_af_array)
def union1d(ar1: ndarray, ar2: ndarray) -> ndarray:
new_af_array = af.set_union(ar1._af_array,
ar2._af_array,
is_unique=False)
return ndarray(new_af_array)
def intersect1d(ar1: ndarray, ar2: ndarray) -> ndarray:
new_af_array = af.set_intersect(ar1._af_array,
ar2._af_array,
is_unique=False)
return ndarray(new_af_array)
|
test/datasets/test_dukemtmc.py
|
Tianyu97/Person-Re-identification-Research-Based-on-Deep-Learning
| 1,341 |
137520
|
from unittest import TestCase
class TestDukeMTMC(TestCase):
def test_all(self):
import os.path as osp
from reid.datasets import DukeMTMC
from reid.utils.serialization import read_json
root, split_id, num_val = '/tmp/open-reid/dukemtmc', 0, 100
dataset = DukeMTMC(root, split_id=split_id, num_val=num_val,
download=True)
self.assertTrue(osp.isfile(osp.join(root, 'meta.json')))
self.assertTrue(osp.isfile(osp.join(root, 'splits.json')))
meta = read_json(osp.join(root, 'meta.json'))
self.assertEquals(len(meta['identities']), 1812)
splits = read_json(osp.join(root, 'splits.json'))
self.assertEquals(len(splits), 1)
self.assertDictEqual(meta, dataset.meta)
self.assertDictEqual(splits[split_id], dataset.split)
|
propeller/__init__.py
|
ColeFang/MOCO_ERNIE
| 3,712 |
137531
|
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Propeller"""
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import os
import sys
import logging
import six
from time import time
__version__ = '0.2'
log = logging.getLogger(__name__)
stream_hdl = logging.StreamHandler(stream=sys.stderr)
formatter = logging.Formatter(
fmt='[%(levelname)s] %(asctime)s [%(filename)12s:%(lineno)5d]:\t%(message)s'
)
try:
from colorlog import ColoredFormatter
fancy_formatter = ColoredFormatter(
fmt='%(log_color)s[%(levelname)s] %(asctime)s [%(filename)12s:%(lineno)5d]:\t%(message)s'
)
stream_hdl.setFormatter(fancy_formatter)
except ImportError:
stream_hdl.setFormatter(formatter)
log.setLevel(logging.INFO)
log.addHandler(stream_hdl)
log.propagate = False
from propeller.types import *
from propeller.util import ArgumentParser, parse_hparam, parse_runconfig, parse_file
|
habu/cli/cmd_crack_snmp.py
|
optimustoaster/habu
| 461 |
137532
|
#!/usr/bin/env python3
import logging
import os
import sys
from pathlib import Path
import click
logging.getLogger("scapy.runtime").setLevel(logging.ERROR)
from scapy.all import ASN1_OID, IP, SNMP, UDP, SNMPget, SNMPvarbind, conf, sr1
@click.command()
@click.argument('ip')
@click.option('-p', 'port', default=161, help='Port to use')
@click.option('-c', 'community', default=None, help='Community (default: list of most used)')
@click.option('-s', 'stop', is_flag=True, default=False, help='Stop after first match')
@click.option('-v', 'verbose', is_flag=True, default=False, help='Verbose')
def cmd_crack_snmp(ip, community, port, stop, verbose):
"""Launches snmp-get queries against an IP, and tells you when
finds a valid community string (is a simple SNMP cracker).
The dictionary used is the distributed with the onesixtyone tool
https://github.com/trailofbits/onesixtyone
Example:
\b
# habu.crack.snmp 172.16.58.3
Community found: private
Community found: public
Note: You can also receive messages like \<UNIVERSAL\> \<class
'scapy.asn1.asn1.ASN1\_Class\_metaclass'\>, I don't know how to supress
them for now.
"""
FILEDIR = os.path.dirname(os.path.abspath(__file__))
DATADIR = os.path.abspath(os.path.join(FILEDIR, '../data'))
COMMFILE = Path(os.path.abspath(os.path.join(DATADIR, 'dict_snmp.txt')))
if community:
communities = [community]
else:
with COMMFILE.open() as cf:
communities = cf.read().split('\n')
conf.verb = False
for pkt in IP(dst=ip)/UDP(sport=port, dport=port)/SNMP(community="public", PDU=SNMPget(varbindlist=[SNMPvarbind(oid=ASN1_OID("1.3.6.1"))])):
if verbose:
print(pkt[IP].dst)
for community in communities:
if verbose:
print('.', end='')
sys.stdout.flush()
pkt[SNMP].community=community
ans = sr1(pkt, timeout=0.5, verbose=0)
if ans and UDP in ans:
print('\n{} - Community found: {}'.format(pkt[IP].dst, community))
if stop:
break
return True
if __name__ == '__main__':
cmd_crack_snmp()
|
RecoJets/JetAnalyzers/test/JetIdExample.py
|
ckamtsikis/cmssw
| 852 |
137533
|
# PYTHON configuration file for class: JetIdExample
# Description: Example showing how to conveniently select collection
# of jets in the event which pass the jet id criteria ("loose",
# "medium", "tight",..etc) using a simple one-line plugin.
#
# Author: <NAME>
# Date: 18 - January - 2011
import FWCore.ParameterSet.Config as cms
## ____ _ __ __ ____
## | _ \ __ _| |_ __ _ ___ _ __ | \/ |/ ___|
## | | | |/ _` | __/ _` | / _ \| '__| | |\/| | |
## | |_| | (_| | || (_| | | (_) | | | | | | |___
## |____/ \__,_|\__\__,_| \___/|_| |_| |_|\____|
isMC = True
## ____ __ _ _ _
## / ___|___ _ __ / _(_) __ _ _ _ _ __ __ _| |__ | | ___ ___
## | | / _ \| '_ \| |_| |/ _` | | | | '__/ _` | '_ \| |/ _ \/ __|
## | |__| (_) | | | | _| | (_| | |_| | | | (_| | |_) | | __/\__ \
## \____\___/|_| |_|_| |_|\__, |\__,_|_| \__,_|_.__/|_|\___||___/
## |___/
NJetsToKeep = 2
GLOBAL_TAG = 'GR_R_38X_V15::All'
inputFile = 'file:/uscms_data/d2/kalanand/dijet-Run2010A-JetMET-Nov4ReReco-9667events.root'
if isMC:
GLOBAL_TAG = 'START38_V14::All'
inputFile ='/store/mc/Fall10/QCD_Pt_80to120_TuneZ2_7TeV_pythia6/GEN-SIM-RECO/START38_V12-v1/0000/FEF4D100-4CCB-DF11-94CB-00E08178C12F.root'
## _ _ _
## (_)_ __ ___| |_ _ __| | ___ ___
## | | '_ \ / __| | | | |/ _` |/ _ \/ __|
## | | | | | (__| | |_| | (_| | __/\__ \
## |_|_| |_|\___|_|\__,_|\__,_|\___||___/
process = cms.Process("Ana")
process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
process.load("Configuration.StandardSequences.Geometry_cff")
process.GlobalTag.globaltag = GLOBAL_TAG
############# Format MessageLogger #################
process.load("FWCore.MessageService.MessageLogger_cfi")
process.MessageLogger.cerr.FwkReport.reportEvery = 10
## ____ _ ____
## | _ \ ___ ___ | / ___| ___ _ _ _ __ ___ ___
## | |_) / _ \ / _ \| \___ \ / _ \| | | | '__/ __/ _ \
## | __/ (_) | (_) | |___) | (_) | |_| | | | (_| __/
## |_| \___/ \___/|_|____/ \___/ \__,_|_| \___\___|
############# Set the number of events #############
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(100)
)
############# Define the source file ###############
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring(inputFile)
)
process.source.inputCommands = cms.untracked.vstring("keep *","drop *_MEtoEDMConverter_*_*")
## _ _ ___ _
## | | ___| |_ |_ _|__| |
## _ | |/ _ \ __| | |/ _` |
## | |_| | __/ |_ | | (_| |
## \___/ \___|\__| |___\__,_|
############# JetID: Calo Jets ###########################
process.load("RecoJets.JetProducers.ak5JetID_cfi")
process.CaloJetsLooseId = cms.EDProducer("CaloJetIdSelector",
src = cms.InputTag( "ak5CaloJets" ),
idLevel = cms.string("LOOSE"),
jetIDMap = cms.untracked.InputTag("ak5JetID")
)
process.CaloJetsTightId = cms.EDProducer("CaloJetIdSelector",
src = cms.InputTag( "ak5CaloJets" ),
idLevel = cms.string("TIGHT"),
jetIDMap = cms.untracked.InputTag("ak5JetID")
)
############# JetID: PF Jets ###########################
process.PFJetsLooseId = cms.EDProducer("PFJetIdSelector",
src = cms.InputTag( "ak5PFJets" ),
idLevel = cms.string("LOOSE")
)
process.PFJetsTightId = cms.EDProducer("PFJetIdSelector",
src = cms.InputTag( "ak5PFJets" ),
idLevel = cms.string("TIGHT")
)
## ____ _ _
## | _ \| | ___ | |_ ___
## | |_) | |/ _ \| __/ __|
## | __/| | (_) | |_\__ \
## |_| |_|\___/ \__|___/
#######################################################
############# Analysis: Calo Jets ##################
process.caloJetAnalysisLooseId = cms.EDAnalyzer("CaloJetPlotsExample",
JetAlgorithm = cms.string('CaloJetsLooseId'),
HistoFileName = cms.string('CaloJetPlotsExample_LooseId.root'),
NJets = cms.int32(NJetsToKeep)
)
process.caloJetAnalysisTightId = process.caloJetAnalysisLooseId.clone()
process.caloJetAnalysisTightId.JetAlgorithm = cms.string('CaloJetsTightId')
process.caloJetAnalysisTightId.HistoFileName = cms.string('CaloJetPlotsExample_TightId.root')
############# Analysis: PF Jets ###################
process.pfJetAnalysisLooseId = cms.EDAnalyzer("PFJetPlotsExample",
JetAlgorithm = cms.string('PFJetsLooseId'),
HistoFileName = cms.string('PFJetPlotsExample_LooseId.root'),
NJets = cms.int32(NJetsToKeep)
)
process.pfJetAnalysisTightId = process.pfJetAnalysisLooseId.clone()
process.pfJetAnalysisTightId.JetAlgorithm = cms.string('PFJetsTightId')
process.pfJetAnalysisTightId.HistoFileName = cms.string('PFJetPlotsExample_TightId.root')
## ____ _ _
## | _ \ __ _| |_| |__
## | |_) / _` | __| '_ \
## | __/ (_| | |_| | | |
## |_| \__,_|\__|_| |_|
## ############# Path ###########################
process.p = cms.Path( process.ak5JetID +
process.CaloJetsLooseId +
process.CaloJetsTightId +
process.PFJetsLooseId +
process.PFJetsTightId +
process.caloJetAnalysisLooseId +
process.caloJetAnalysisTightId +
process.pfJetAnalysisLooseId +
process.pfJetAnalysisTightId
)
|
cctbx/xray/targets/tst_shelxl_wght_ls.py
|
dperl-sol/cctbx_project
| 155 |
137539
|
from __future__ import absolute_import, division, print_function
import cctbx.xray.targets
from cctbx.array_family import flex
from libtbx.test_utils import approx_equal
from six.moves import range
def calc_k(f_obs, i_calc):
fc = flex.sqrt(i_calc)
num = flex.sum(f_obs * fc)
den = flex.sum(fc * fc)
assert den != 0
k = num / den
return k
def calc_w(wa, wb, i_obs, i_sig, i_calc, k):
assert i_sig.size() == i_obs.size()
assert i_calc.size() == i_obs.size()
ik = i_obs / k**2
sk = i_sig / k**2
ik.set_selected(ik < 0, 0)
p = (ik + 2 * i_calc) / 3
den = flex.pow2(sk) + flex.pow2(wa*p) + wb*p
assert den.all_gt(1e-8)
weights = 1 / den
return weights
def calc_t(i_obs, i_calc, k, weights):
delta = i_obs - k**2 * i_calc
t_num = flex.sum(weights * flex.pow2(delta))
t_den = flex.sum(weights * flex.pow2(i_obs))
assert t_den != 0
return t_num / t_den
def kwt(f_obs, i_obs, i_sig, f_calc, i_calc, wa, wb):
if (f_calc is not None):
i_calc = flex.norm(f_calc)
k = calc_k(f_obs, i_calc)
weights = calc_w(
wa=wa,
wb=wb,
i_obs=i_obs,
i_sig=i_sig,
i_calc=i_calc,
k=k)
t = calc_t(
i_obs=i_obs,
i_calc=i_calc,
k=k,
weights=weights)
return k, weights, t
def kwt2(f_obs, i_obs, i_sig, f_calc, i_calc, wa, wb):
k, weights, t = kwt(f_obs, i_obs, i_sig, f_calc, i_calc, wa, wb)
trg = cctbx.xray.targets.shelxl_wght_ls(
f_obs=f_obs,
i_obs=i_obs,
i_sig=i_sig,
f_calc=f_calc,
i_calc=i_calc,
wa=wa,
wb=wb)
assert approx_equal(trg.scale_factor, k)
assert approx_equal(trg.weights, weights)
assert approx_equal(trg.target, t)
return trg
def exercise(mt, n_refl):
f_obs = mt.random_double(size=n_refl)
i_obs = flex.pow2(f_obs)
i_sig = mt.random_double(size=i_obs.size())
f_calc = flex.complex_double(
mt.random_double(size=f_obs.size()),
mt.random_double(size=f_obs.size()))
i_calc = flex.norm(f_calc)
wa = 1.23
wb = 2.34
trg = kwt2(
f_obs=f_obs, i_obs=i_obs, i_sig=i_sig,
f_calc=f_calc, i_calc=None, wa=wa, wb=wb)
def check_i_derivs():
g_ana = trg.i_gradients
c_ana = trg.i_curvatures
eps = 1e-6
g_fin = flex.double()
c_fin = flex.double()
for ih in range(i_calc.size()):
fs = []
gs = []
c_orig = i_calc[ih]
for signed_eps in [eps, -eps]:
i_calc[ih] = c_orig + signed_eps
trg_eps = kwt2(
f_obs=f_obs, i_obs=i_obs, i_sig=i_sig,
f_calc=None, i_calc=i_calc, wa=wa, wb=wb)
fs.append(trg_eps.target)
gs.append(trg_eps.i_gradients[ih])
g_fin.append((fs[0]-fs[1])/(2*eps))
c_fin.append((gs[0]-gs[1])/(2*eps))
i_calc[ih] = c_orig
assert approx_equal(g_ana, g_fin)
assert approx_equal(c_ana, c_fin)
def check_f_derivs():
g_ana = trg.f_gradients
c_ana = trg.f_hessians
eps = 1e-6
g_fin = flex.complex_double()
c_fin = flex.vec3_double()
for ih in range(i_calc.size()):
c_orig = f_calc[ih]
g_fin_ab = []
c_fin_ab = []
for iab in [0,1]:
fs = []
gs = []
for signed_eps in [eps, -eps]:
if (iab == 0):
f_calc[ih] = complex(c_orig.real + signed_eps, c_orig.imag)
else:
f_calc[ih] = complex(c_orig.real, c_orig.imag + signed_eps)
trg_eps = kwt2(
f_obs=f_obs, i_obs=i_obs, i_sig=i_sig,
f_calc=f_calc, i_calc=None, wa=wa, wb=wb)
fs.append(trg_eps.target)
gs.append(trg_eps.f_gradients[ih])
g_fin_ab.append((fs[0]-fs[1])/(2*eps))
c_fin_ab.append((gs[0]-gs[1])/(2*eps))
g_fin.append(complex(*g_fin_ab))
assert approx_equal(c_fin_ab[0].imag, c_fin_ab[1].real)
c_fin.append((c_fin_ab[0].real, c_fin_ab[1].imag, c_fin_ab[0].imag))
f_calc[ih] = c_orig
assert approx_equal(g_ana, g_fin)
assert approx_equal(c_ana, c_fin)
check_i_derivs()
check_f_derivs()
def run(args):
assert len(args) < 3
arg_vals = [int(arg) for arg in args]
arg_vals = arg_vals + [3, 2][len(arg_vals):]
n_refl, n_trials = arg_vals
assert n_refl > 0
assert n_trials > 0
mt = flex.mersenne_twister(seed=0)
for i_trial in range(n_trials):
exercise(mt, n_refl)
print("OK")
if (__name__ == "__main__"):
import sys
run(args=sys.argv[1:])
|
xdoctest/_old_pkgutil_code.py
|
MatsLanGoH/xdoctest
| 142 |
137544
|
<filename>xdoctest/_old_pkgutil_code.py
def _pkgutil_submodule_names(modpath, with_pkg=False, with_mod=True):
"""
Ignore:
x = sorted(submodule_paths(modname_to_modpath('ubelt')))
y = sorted(_pkgutil_submodule_names(modname_to_modpath('ubelt')))
x = [modpath_to_modname(p, hide_init=False, hide_main=False) for p in x]
print('x = {!r}'.format(x))
print('y = {!r}'.format(y))
Notes:
this will take into account pyc files, we choose not to.
"""
package_name = modpath_to_modname(modpath)
if isfile(modpath):
# If input is a file, just return it
yield package_name
else:
# Otherwise, if it is a package, find sub-packages and sub-modules
import pkgutil
# dont use the pkgutil version, as it is incompatible with pytest
prefix = package_name + '.'
walker = pkgutil.walk_packages([modpath], prefix=prefix,
onerror=lambda x: None) # nocover
for importer, modname, ispkg in walker:
if not ispkg and with_mod:
yield modname
elif ispkg and with_pkg:
yield modname
|
PhysicsTools/IsolationAlgos/python/highPtTrackIsolations_cff.py
|
ckamtsikis/cmssw
| 852 |
137553
|
import FWCore.ParameterSet.Config as cms
from PhysicsTools.IsolationAlgos.tkIsoDeposits_cff import *
EcalIsolationForTracks = cms.EDProducer("IsolationProducerForTracks",
highPtTracks = cms.InputTag("highPtTracks"),
tracks = cms.InputTag("goodTracks"),
isoDeps = cms.InputTag("tkIsoDepositCalByAssociatorTowers","ecal"),
coneSize = cms.double(0.3),
trackPtMin = cms.double(20.0)
)
HcalIsolationForTracks = cms.EDProducer("IsolationProducerForTracks",
highPtTracks = cms.InputTag("highPtTracks"),
tracks = cms.InputTag("goodTracks"),
isoDeps = cms.InputTag("tkIsoDepositCalByAssociatorTowers","hcal"),
coneSize = cms.double(0.3),
trackPtMin = cms.double(20.0)
)
highPtTrackIsolations = cms.Sequence(tkIsoDeposits+EcalIsolationForTracks+HcalIsolationForTracks)
|
apps/dash-deck-explorer/demos/usage-heatmap-layer.py
|
JeroenvdSande/dash-sample-apps
| 2,332 |
137643
|
<filename>apps/dash-deck-explorer/demos/usage-heatmap-layer.py
"""
Adapted from: https://pydeck.gl/gallery/heatmap_layer.html
Location of livestock raised in New Mexico in the United States in 2006,
via the United Nations and FAOSTAT, with the source data viewable here:
http://www.fao.org/faostat/en/
Locations for poultry are viewable in blue and cattle are in orange.
Overlaid with the satellite imagery from Mapbox to highlight the how
terrain affects agriculture.
"""
import os
import dash
import dash_deck
import dash_html_components as html
import pydeck as pdk
import pandas as pd
mapbox_api_token = os.getenv("MAPBOX_ACCESS_TOKEN")
CATTLE_DATA = (
"https://raw.githubusercontent.com/ajduberstein/geo_datasets/master/nm_cattle.csv"
)
POULTRY_DATA = (
"https://raw.githubusercontent.com/ajduberstein/geo_datasets/master/nm_chickens.csv"
)
HEADER = ["lng", "lat", "weight"]
cattle_df = pd.read_csv(CATTLE_DATA, header=None).sample(frac=0.5)
poultry_df = pd.read_csv(POULTRY_DATA, header=None).sample(frac=0.5)
cattle_df.columns = HEADER
poultry_df.columns = HEADER
COLOR_BREWER_BLUE_SCALE = [
[240, 249, 232],
[204, 235, 197],
[168, 221, 181],
[123, 204, 196],
[67, 162, 202],
[8, 104, 172],
]
view = pdk.data_utils.compute_view(cattle_df[["lng", "lat"]])
view.zoom = 6
cattle = pdk.Layer(
"HeatmapLayer",
data=cattle_df,
opacity=0.9,
get_position=["lng", "lat"],
aggregation='"MEAN"',
color_range=COLOR_BREWER_BLUE_SCALE,
threshold=1,
get_weight="weight",
pickable=True,
)
poultry = pdk.Layer(
"HeatmapLayer",
data=poultry_df,
opacity=0.9,
get_position=["lng", "lat"],
threshold=0.75,
aggregation='"MEAN"',
get_weight="weight",
pickable=True,
)
r = pdk.Deck(layers=[cattle, poultry], initial_view_state=view,)
app = dash.Dash(__name__)
app.layout = html.Div(
dash_deck.DeckGL(
r.to_json(),
id="deck-gl",
tooltip={
"text": "Concentration of cattle in blue, concentration of poultry in orange"
},
mapboxKey=mapbox_api_token,
)
)
if __name__ == "__main__":
app.run_server(debug=True)
|
zentral/core/secret_engines/backends/base.py
|
janheise/zentral
| 634 |
137649
|
<reponame>janheise/zentral<filename>zentral/core/secret_engines/backends/base.py
class BaseSecretEngine:
def __init__(self, config_d):
self.name = config_d['secret_engine_name']
self.default = config_d.get("default", False)
def encrypt(self, data, **context):
raise NotImplementedError
def decrypt(self, data, **context):
raise NotImplementedError
|
contrib/nn/layers.py
|
cjgalvin/deepchem
| 3,782 |
137650
|
<gh_stars>1000+
"""Custom Keras Layers.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
__author__ = "<NAME> and <NAME>"
__copyright__ = "Copyright 2016, Stanford University"
__license__ = "MIT"
import warnings
import numpy as np
import tensorflow as tf
from deepchem.nn import activations
from deepchem.nn import initializations
from deepchem.nn import model_ops
def affine(x, W, b):
return tf.matmul(x, W) + b
def tf_affine(x, vm, scope):
W = vm.var(scope, 'W')
b = vm.var(scope, 'b')
return tf.matmul(x, W) + b
def cos(x, y):
denom = (
model_ops.sqrt(model_ops.sum(tf.square(x)) * model_ops.sum(tf.square(y)))
+ model_ops.epsilon())
return model_ops.dot(x, tf.transpose(y)) / denom
|
orchestra/workflow/directory.py
|
code-review-doctor/orchestra
| 444 |
137660
|
<reponame>code-review-doctor/orchestra
import json
import os
from orchestra.core.errors import WorkflowError
def parse_workflow_directory(workflow_directory):
parsed = {
'versions': [],
}
# Verify that the directory exists.
if not os.path.exists(workflow_directory):
raise WorkflowError('Workflow directory does not exist.')
# Look for and parse the workflow manifest.
workflow_files = os.listdir(workflow_directory)
if 'workflow.json' not in workflow_files:
raise WorkflowError('No "workflow.json" manifest file found.')
with open(os.path.join(workflow_directory, 'workflow.json'), 'r') as f:
parsed['workflow'] = json.load(f)
# Look for and parse workflow version subdirectories.
workflow_subdirs = [
os.path.join(workflow_directory, workflow_file)
for workflow_file in workflow_files
if os.path.isdir(os.path.join(workflow_directory, workflow_file))]
for version_directory in workflow_subdirs:
version_files = os.listdir(version_directory)
if 'version.json' not in version_files:
continue # Subdirectory wasn't a workflow version.
with open(os.path.join(version_directory, 'version.json'), 'r') as f:
parsed['versions'].append(json.load(f))
# Complain if the workflow has no versions.
if len(parsed['versions']) == 0:
raise WorkflowError('Workflow directory {} does not contain any '
'versions'.format(workflow_directory))
return parsed
|
saleor/product/migrations/0093_auto_20190521_0124.py
|
elwoodxblues/saleor
| 15,337 |
137668
|
<reponame>elwoodxblues/saleor
# Generated by Django 2.2.1 on 2019-05-21 06:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("product", "0092_auto_20190507_0309")]
operations = [
migrations.AddField(
model_name="collectionproduct",
name="sort_order",
field=models.PositiveIntegerField(db_index=True, editable=False, null=True),
),
migrations.AlterField(
model_name="collectionproduct",
name="product",
field=models.ForeignKey(
on_delete=models.deletion.CASCADE,
related_name="collectionproduct",
to="product.Product",
),
),
migrations.AlterField(
model_name="collectionproduct",
name="collection",
field=models.ForeignKey(
on_delete=models.deletion.CASCADE,
related_name="collectionproduct",
to="product.Collection",
),
),
migrations.AlterModelTable(name="collectionproduct", table=None),
migrations.AlterField(
model_name="attributevalue",
name="sort_order",
field=models.PositiveIntegerField(db_index=True, editable=False, null=True),
),
migrations.AlterField(
model_name="productimage",
name="sort_order",
field=models.PositiveIntegerField(db_index=True, editable=False, null=True),
),
]
|
aognet/loader/cifar100.py
|
xilaili/AOGNet
| 148 |
137673
|
import os
import sys
import mxnet as mx
def cifar100_iterator(cfg, kv):
train_rec = os.path.join(cfg.dataset.data_dir, "cifar100_train.rec")
val_rec = os.path.join(cfg.dataset.data_dir, "cifar100_test.rec")
mean = [129.31, 124.11, 112.4]
std = [68.21, 65.41, 70.41]
train = mx.io.ImageRecordIter(
path_imgrec = train_rec,
label_width = 1,
data_name = 'data',
label_name = 'softmax_label',
data_shape = (3, 32, 32),
batch_size = cfg.batch_size,
pad = 4,
fill_value = 127,
#mean_r = mean[0],
#mean_g = mean[1],
#mean_b = mean[2],
#std_r = std[0],
#std_g = std[1],
#std_b = std[2],
rand_crop = True if cfg.dataset.aug_level > 0 else False,
rand_mirror = True if cfg.dataset.aug_level > 0 else False,
shuffle = True if cfg.dataset.aug_level >= 0 else False,
num_parts = kv.num_workers,
part_index = kv.rank)
val = mx.io.ImageRecordIter(
path_imgrec = val_rec,
label_width = 1,
data_name = 'data',
label_name = 'softmax_label',
batch_size = cfg.batch_size,
data_shape = (3, 32, 32),
mean_r = mean[0],
#mean_g = mean[1],
#mean_b = mean[2],
#std_r = std[0],
#std_g = std[1],
#std_b = std[2],
rand_crop = False,
rand_mirror = False,
num_parts = kv.num_workers,
part_index = kv.rank)
return train, val
|
service_catalog/views/operation_list_view.py
|
LaudateCorpus1/squest
| 112 |
137733
|
from django.urls import reverse
from django_filters.views import FilterView
from django_tables2.views import SingleTableMixin
from guardian.mixins import LoginRequiredMixin
from service_catalog.filters.operation_filter import OperationFilter
from service_catalog.models import Operation, Service
from service_catalog.tables.operation_tables import OperationTable
class OperationListView(LoginRequiredMixin, SingleTableMixin, FilterView):
table_pagination = {'per_page': 10}
table_class = OperationTable
model = Operation
template_name = 'generics/list.html'
filterset_class = OperationFilter
def get_table_data(self, **kwargs):
filtered = super().get_table_data().distinct()
return Operation.objects.filter(service__id=self.kwargs.get('service_id')).distinct() & filtered
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
service_id = self.kwargs.get('service_id')
context['service_id'] = service_id
context['html_button_path'] = "generics/buttons/add_operation.html"
context['breadcrumbs'] = [
{'text': 'Service catalog', 'url': reverse('service_catalog:service_list')},
{'text': 'Manage services', 'url': reverse('service_catalog:manage_services')},
{'text': Service.objects.get(id=service_id).name, 'url': ""},
{'text': 'Operations', 'url': ""},
]
return context
|
mmcv/runner/hooks/sampler_seed.py
|
XinYangDong/mmcv-0.2.10
| 384 |
137741
|
<reponame>XinYangDong/mmcv-0.2.10<filename>mmcv/runner/hooks/sampler_seed.py
from .hook import Hook
class DistSamplerSeedHook(Hook):
def before_epoch(self, runner):
runner.data_loader.sampler.set_epoch(runner.epoch)
|
bulma/apps.py
|
makiolo/django-bulma
| 330 |
137759
|
from django.apps import AppConfig
class BulmaConfig(AppConfig):
name = 'bulma'
|
scripts/examples/OpenMV/02-Board-Control/adc_read_int_channel.py
|
jiskra/openmv
| 1,761 |
137763
|
<reponame>jiskra/openmv
# ADC Internal Channels Example
#
# This example shows how to read internal ADC channels.
import time, pyb
adc = pyb.ADCAll(12)
print("VREF = %.1fv VREF = %.1fv Temp = %d" % (adc.read_core_vref(), adc.read_core_vbat(), adc.read_core_temp()))
|
fastai_do_not_use/callbacks/general_sched.py
|
anhquan0412/fastai_v1
| 115 |
137766
|
<gh_stars>100-1000
from ..core import *
from ..callback import *
from ..basic_train import Learner
__all__ = ['GeneralScheduler', 'TrainingPhase']
@dataclass
class TrainingPhase():
"Schedule lr,mom according to `lr_anneal` and `mom_anneal` across a `length` schedule"
length:int
lrs:Floats
moms:Floats
lr_anneal:AnnealFunc=None
mom_anneal:AnnealFunc=None
def __post_init__(self)->None:
self.lr_step = Stepper(self.lrs, self.length, self.lr_anneal)
self.mom_step = Stepper(self.moms, self.length, self.mom_anneal)
@dataclass
class GeneralScheduler(Callback):
"Schedule multiple `TrainingPhase` for a `Learner`."
learn:Learner
phases:Collection[TrainingPhase]
def on_train_begin(self, n_epochs:int, **kwargs:Any)->None:
"Initialize our lr and mom schedules for training"
self.lr_scheds = [p.lr_step for p in self.phases]
self.mom_scheds = [p.mom_step for p in self.phases]
self.opt = self.learn.opt
self.opt.lr,self.opt.mom = self.lr_scheds[0].start,self.mom_scheds[0].start
self.idx_s = 0
def on_batch_end(self, **kwargs:Any)->None:
"Take a step in lr,mom sched, start next sched when current is complete"
if self.idx_s >= len(self.lr_scheds): return True
self.opt.lr = self.lr_scheds[self.idx_s].step()
self.opt.mom = self.mom_scheds[self.idx_s].step()
if self.lr_scheds[self.idx_s].is_done:
self.idx_s += 1
|
tools/type_whisperer/file_descriptor_set_text_gen.py
|
lopter-dbx/envoy
| 218 |
137796
|
<gh_stars>100-1000
# Generate a text proto from a given list of FileDescriptorSets.
# TODO(htuch): switch to base64 encoded binary output in the future,
# this will avoid needing to deal with option preserving imports below.
import sys
from google.protobuf import descriptor_pb2
# Needed to avoid annotation option stripping during pb_text generation.
from udpa.annotations import migrate_pb2
def Decode(path):
with open(path, 'rb') as f:
file_set = descriptor_pb2.FileDescriptorSet()
file_set.ParseFromString(f.read())
return str(file_set)
if __name__ == '__main__':
output_path = sys.argv[1]
input_paths = sys.argv[2:]
pb_text = '\n'.join(Decode(path) for path in input_paths)
with open(output_path, 'w') as f:
f.write(pb_text)
|
frappe-bench/env/lib/python2.7/site-packages/faker/providers/address/nl_NL/__init__.py
|
ibrahmm22/library-management
| 412 |
137797
|
# coding=utf-8
from __future__ import unicode_literals
from .. import Provider as AddressProvider
class Provider(AddressProvider):
building_number_formats = ('#', '##', '###', '#', '##', '###')
street_suffixes = (
'baan', 'boulevard', 'dreef', 'hof', 'laan', 'pad',
'ring', 'singel', 'steeg', 'straat', 'weg',
)
# the 4 digit numerical part of Dutch postcodes is between 1000 and 9999;
# see http://nl.wikipedia.org/wiki/Postcode#Postcodes_in_Nederland
postcode_formats = ('%###??', '%### ??')
city_formats = ('{{city}}',)
# countries are from http://nl.wikipedia.org/wiki/ISO_3166-1
countries = (
'Afghanistan', 'Albanië', 'Algerije', 'Amerikaans-Samoa',
'Amerikaanse Maagdeneilanden', 'Andorra', 'Angola', 'Anguilla',
'Antarctica', 'Antigua en Barbuda', 'Argentinië', 'Armenië', 'Aruba',
'Australië', 'Azerbeidzjan', "Bahama's", 'Bahrein', 'Bangladesh',
'Barbados', 'België', 'Belize', 'Benin', 'Bermuda', 'Bhutan',
'Bolivia', 'Bonaire, Sint Eustatius en Saba', 'Bosnië en Herzegovina',
'Botswana', 'Bouveteiland', 'Brazilië',
'Brits Indische Oceaanterritorium', 'Britse Maagdeneilanden', 'Brunei',
'Bulgarije', 'Burkina Faso', 'Burundi', 'Cambodja', 'Canada',
'Centraal-Afrikaanse Republiek', 'Chili', 'China', 'Christmaseiland',
'Cocoseilanden', 'Colombia', 'Comoren', 'Congo-Brazzaville',
'Congo-Kinshasa', 'Cookeilanden', '<NAME>', 'Cuba', 'Curaçao',
'Cyprus', 'Denemarken', 'Djibouti', 'Dominica',
'Dominicaanse Republiek', 'Duitsland', 'Ecuador', 'Egypte',
'El Salvador', 'Equatoriaal-Guinea', 'Eritrea', 'Estland', 'Ethiopië',
'Faeröer', 'Falklandeilanden', 'Fiji', 'Filipijnen', 'Finland',
'Frankrijk', 'Frans-Guyana', 'Frans-Polynesië',
'Franse Zuidelijke en Antarctische Gebieden', 'Gabon', 'Gambia',
'Georgië', 'Ghana', 'Gibraltar', 'Grenada', 'Griekenland', 'Groenland',
'Guadeloupe', 'Guam', 'Guatemala', 'Guernsey', 'Guinee',
'Guinee-Bissau', 'Guyana', 'Haïti', 'Heard en McDonaldeilanden',
'Honduras', 'Hongarije', 'Hongkong', 'IJsland', 'Ierland', 'India',
'Indonesië', 'Irak', 'Iran', 'Israël', 'Italië', 'Ivoorkust',
'Jamaica', 'Japan', 'Jemen', 'Jersey', 'Jordanië', 'Kaaimaneilanden',
'Kaapverdië', 'Kameroen', 'Kazachstan', 'Kenia', 'Kirgizië',
'Kiribati', 'Kleine Pacifische eilanden van de Verenigde Staten',
'Koeweit', 'Kroatië', 'Laos', 'Lesotho', 'Letland', 'Libanon',
'Liberia', 'Libië', 'Liechtenstein', 'Litouwen', 'Luxemburg', 'Macau',
'Macedonië', 'Madagaskar', 'Malawi', 'Maldiven', 'Maleisië', 'Mali',
'Malta', 'Man', 'Marokko', 'Marshalleilanden', 'Martinique',
'Mauritanië', 'Mauritius', 'Mayotte', 'Mexico', 'Micronesia',
'Moldavië', 'Monaco', 'Mongolië', 'Montenegro', 'Montserrat',
'Mozambique', 'Myanmar', 'Namibië', 'Nauru', 'Nederland', 'Nepal',
'Nicaragua', 'Nieuw-Caledonië', 'Nieuw-Zeeland', 'Niger', 'Nigeria',
'Niue', 'Noord-Korea', 'Noordelijke Marianen', 'Noorwegen', 'Norfolk',
'Oeganda', 'Oekraïne', 'Oezbekistan', 'Oman', 'Oost-Timor',
'Oostenrijk', 'Pakistan', 'Palau', 'Palestina', 'Panama',
'Papoea-Nieuw-Guinea', 'Paraguay', 'Peru', 'Pitcairneilanden', 'Polen',
'Portugal', 'Puerto Rico', 'Qatar', 'Roemenië', 'Rusland', 'Rwanda',
'Réunion', 'Saint Kitts en Nevis', 'Saint Lucia',
'Saint Vincent en de Grenadines', 'Saint-Barthélemy',
'Saint-Pierre en Miquelon', 'Salomonseilanden', 'Samoa', 'San Marino',
'Sao Tomé en Principe', 'Saoedi-Arabië', 'Senegal', 'Servië',
'Seychellen', 'Sierra Leone', 'Singapore', 'Sint Maarten',
'Sint-Helena, Ascension en Tristan da Cunha', 'Sint-Maarten',
'Slovenië', 'Slowakije', 'Soedan', 'Somalië', 'Spanje',
'Spitsbergen en Jan Mayen', 'Sri Lanka', 'Suriname', 'Swaziland',
'Syrië', 'Tadzjikistan', 'Taiwan', 'Tanzania', 'Thailand', 'Togo',
'Tokelau', 'Tonga', 'Trinidad en Tobago', 'Tsjaad', 'Tsjechië',
'Tunesië', 'Turkije', 'Turkmenistan', 'Turks- en Caicoseilanden',
'Tuvalu', 'Uruguay', 'Vanuatu', 'Vaticaanstad', 'Venezuela',
'Verenigd Koninkrijk', 'Verenigde Arabische Emiraten',
'Verenigde Staten', 'Vietnam', 'Wallis en Futuna', 'Westelijke Sahara',
'Wit-Rusland', 'Zambia', 'Zimbabwe', 'Zuid-Afrika',
'Zuid-Georgia en de Zuidelijke Sandwicheilanden', 'Zuid-Korea',
'Zuid-Soedan', 'Zweden', 'Zwitserland', 'Åland',
)
# cities are taken from the BAG "woonplaats";
# in this case the 8-Mar-2014 extract;
# see http://data.nlextract.nl/bag/csv/
cities = (
"'s Gravenmoer", "'s-Graveland", "'s-Gravendeel", "'s-Gravenhage",
"'s-Gravenpolder", "'s-Gravenzande", "'s-Heer Abtskerke",
"'s-Heer Arendskerke", "'s-Heer Hendrikskinderen", "'s-Heerenberg",
"'s-Heerenbroek", "'s-Heerenhoek", "'s-Hertogenbosch", "'t Goy",
"'t Haantje", "'t Harde", "'t Loo Oldebroek", "'t Veld", "'<NAME>",
"'<NAME>", "'<NAME>", '1e Exloërmond', '2e Exloërmond',
'2e Valthermond', 'Aadorp', 'Aagtekerke', 'Aalden', 'Aalsmeer',
'Aalsmeerderbrug', 'Aalst', 'Aalsum', 'Aalten', 'Aardenburg',
'Aarlanderveen', 'Aarle-Rixtel', 'Aartswoud', 'Abbega', 'Abbekerk',
'Abbenbroek', 'Abbenes', 'Abcoude', 'Achlum', 'Achterveld',
'Achthuizen', 'Achtmaal', 'Acquoy', 'Adorp', 'Aduard', 'Aerdenhout',
'Aerdt', 'Afferden', '<NAME>', 'Agelo', 'Akersloot', 'Akkrum',
'Akmarijp', 'Albergen', 'Alblasserdam', '<NAME>', 'Aldeboarn',
'Aldtsjerk', 'Alem', 'Alkmaar', 'Allingawier', 'Almelo', 'Almen',
'Almere', 'Almkerk', 'Alphen', '<NAME>', 'Alteveer',
'Alteveer gem Hoogeveen', 'Altforst', '<NAME>', 'Ameide', 'Amen',
'America', 'Amerongen', 'Amersfoort', 'Ammerstol', 'Ammerzoden',
'Amstelhoek', 'Amstelveen', 'Amstenrade', 'Amsterdam',
'Amsterdam-Duivendrecht', 'Andel', 'Andelst', 'Anderen', 'Andijk',
'Ane', 'Anerveen', 'Anevelde', 'Angeren', 'Angerlo', 'Anjum',
'Ankeveen', 'Anloo', '<NAME>', 'Annen', 'Annerveenschekanaal',
'Ansen', 'Apeldoorn', 'Appelscha', 'Appeltern', 'Appingedam', 'Arcen',
'Arkel', 'Arnemuiden', 'Arnhem', 'Arriën', 'Arum', 'Asch', 'Asperen',
'Assen', 'Assendelft', 'Asten', 'Augsbuurt', 'Augustinusga',
'Austerlitz', 'Avenhorn', 'Axel', 'Azewijn', 'Baaiduinen', 'Baaium',
'Baak', 'Baambrugge', 'Baard', 'Baarland', 'Baarle-Nassau', 'Baarlo',
'Baarn', 'Baars', 'Babberich', 'Babyloniënbroek', '<NAME>',
'Badhoevedorp', 'Baexem', 'Baflo', 'Bakel', 'Bakhuizen', 'Bakkeveen',
'Balgoij', 'Balinge', 'Balk', 'Balkbrug', 'Balloo', 'Balloërveld',
'Ballum', 'Baneheide', 'Banholt', 'Bant', 'Bantega', 'Barchem',
'Barendrecht', 'Barger-Compascuum', 'Barneveld', 'Barsingerhorn',
'Basse', 'Batenburg', 'Bathmen', 'Bavel', 'Bavel AC', 'Bears', 'Bedum',
'Beegden', 'Beek', 'Beek en Donk', 'Beekbergen', 'Beemte Broekland',
'Beers NB', 'Beerta', 'Beerze', 'Beerzerveld', 'Beesd', 'Beesel',
'Beets', 'Beetsterzwaag', 'Beilen', 'Beinsdorp', 'Belfeld',
'Bellingwolde', 'Belt-Schutsloot', 'Beltrum', 'Bemelen', 'Bemmel',
'Beneden-Leeuwen', 'Bennebroek', 'Bennekom', 'Benneveld',
'Benningbroek', 'Benschop', 'Bentelo', 'Benthuizen', 'Bentveld',
'<NAME>', 'Berg en Terblijt', 'Bergambacht', 'Bergeijk',
'Bergen (NH)', 'Bergen L', '<NAME>', 'Bergen op Zoom',
'Bergentheim', 'Bergharen', 'Berghem', 'Bergschenhoek', 'Beringe',
'<NAME>', 'Berkel-Enschot', 'Berkenwoude', 'Berkhout',
'Berlicum', 'Berltsum', 'Bern', 'Best', 'Beugen', 'Beuningen',
'Beuningen Gld', 'Beusichem', 'Beutenaken', 'Beverwijk',
'Biddinghuizen', 'Bierum', 'Biervliet', 'Biest-Houtakker',
'Biezenmortel', 'Biggekerke', 'Bilthoven', 'Bingelrade', 'Bitgum',
'Bitgummole', 'Bladel', 'Blankenham', 'Blaricum', 'Blauwestad',
'Blauwhuis', 'Bleiswijk', 'Blesdijke', 'Bleskensgraaf ca', 'Blessum',
'Blije', 'Blijham', 'Blitterswijck', 'Bloemendaal', 'Blokker',
'Blokzijl', 'Boazum', 'Bocholtz', 'Bodegraven', 'Boekel',
'Boelenslaan', 'Boer', 'Boerakker', 'Boesingheliede', 'Boijl',
'Boksum', 'Bolsward', 'Bontebok', 'Boornbergum', 'Boornzwaag',
'Borculo', 'Borger', 'Borgercompagnie', 'Borgsweer', 'Born', 'Borne',
'Bornerbroek', 'Bornwird', 'Borssele', 'Bosch en Duin', 'Boschoord',
'Boskoop', 'Bosschenhoofd', 'Botlek Rotterdam', 'Bourtange',
'Boven-Leeuwen', 'Bovenkarspel', 'Bovensmilde', 'Boxmeer', 'Boxtel',
'Braamt', 'Brakel', 'Brandwijk', 'Brantgum', 'Breda', 'Bredevoort',
'Breedenbroek', 'Breezand', 'Breezanddijk', 'Breskens', 'Breukelen',
'Breukeleveen', 'Brielle', 'Briltil', 'Britsum', 'Britswert', 'Broek',
'Broek in Waterland', 'Broek op Langedijk', 'Broekhuizen',
'Broekhuizenvorst', 'Broekland', 'Broeksterwâld', 'Bronkhorst',
'Bronneger', 'Bronnegerveen', 'Brouwershaven', 'Bruchem', 'Brucht',
'Bruchterveld', 'Bruinehaar', 'Bruinisse', 'Brummen', 'Brunssum',
'Bruntinge', 'Buchten', 'Budel', 'Budel-Dorplein', 'Budel-Schoot',
'Buggenum', 'Buinen', 'Buinerveen', 'Buitenkaag', 'Buitenpost',
'Bunde', 'Bunne', 'Bunnik', 'Bunschoten-Spakenburg', 'Burdaard',
'Buren', 'Burgerbrug', 'Burgerveen', 'Burgh-Haamstede', 'Burgum',
'Burgwerd', 'Burum', 'Bussum', 'Buurmalsen', 'Cadier en Keer',
'Cadzand', 'Callantsoog', 'Capelle aan den IJssel', 'Castelre',
'Castenray', 'Casteren', 'Castricum', 'Chaam', 'Clinge', 'Coevorden',
'Colijnsplaat', 'Collendoorn', 'Colmschate', 'Cornwerd', 'Cothen',
'Creil', 'Cromvoirt', 'Cruquius', 'Cuijk', 'Culemborg', 'Daarle',
'Daarlerveen', 'Dalem', 'Dalen', 'Dalerpeel', 'Dalerveen', 'Dalfsen',
'Dalmsholte', 'Damwâld', 'Darp', '<NAME>', '<NAME>', '<NAME>',
'De Cocksdorp', '<NAME>', 'De Glind', 'De Goorn', 'De Groeve',
'De Heen', 'De Heurne', 'De Hoeve', '<NAME>', '<NAME>', 'De Knipe',
'De Koog', 'De Krim', '<NAME>', '<NAME>', '<NAME>', '<NAME>',
'<NAME>', '<NAME>', 'De Punt', '<NAME>', '<NAME>',
'<NAME>', 'De Steeg', 'De Tike', 'De Veenhoop', 'De Waal',
'De Weere', 'De Westereen', '<NAME>', '<NAME>', '<NAME>',
'Dearsum', 'Dedemsvaart', 'Dedgum', 'Deelen', 'Deest', 'Deil',
'Deinum', 'Delden', 'Delfgauw', 'Delfstrahuizen', 'Delft', 'Delfzijl',
'Delwijnen', 'Demen', '<NAME>', '<NAME>', '<NAME>',
'<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>',
'<NAME>', '<NAME>', 'Den Ilp', 'Den Oever', 'Den Velde',
'Denekamp', 'Deurne', 'Deurningen', 'Deursen-Dennenburg', 'Deurze',
'Deventer', 'Didam', 'Dieden', 'Diemen', 'Diepenheim', 'Diepenveen',
'Dieren', 'Diessen', 'Diever', 'Dieverbrug', 'Diffelen', 'Dijken',
'Dinteloord', 'Dinxperlo', 'Diphoorn', 'Dirkshorn', 'Dirksland',
'Dodewaard', 'Doenrade', 'Doesburg', 'Doetinchem', 'Doeveren',
'Doezum', 'Dokkum', 'Doldersum', 'Domburg', 'Donderen', 'Dongen',
'Dongjum', 'Doniaga', 'Donkerbroek', 'Doorn', 'Doornenburg',
'Doornspijk', 'Doorwerth', 'Dordrecht', 'Dorst', 'Drachten',
'Drachten-Azeven', 'Drachtstercompagnie', 'Dreischor', 'Drempt',
'Dreumel', 'Driebergen-Rijsenburg', 'Drieborg', 'Driebruggen',
'Driehuis NH', 'Driehuizen', 'Driel', 'Driewegen', 'Driezum',
'Drijber', 'Drimmelen', 'Drogeham', 'Drogteropslagen', 'Drongelen',
'Dronryp', 'Dronten', 'Drouwen', 'Drouwenermond', 'Drouwenerveen',
'Drunen', 'Druten', 'Duiven', 'Duivendrecht', 'Duizel', 'Dussen',
'Dwingeloo', 'Eagum', 'Earnewâld', 'Easterein', 'Easterlittens',
'Eastermar', 'Easterwierrum', 'Echt', 'Echteld', 'Echten',
'Echtenerbrug', 'Eck en Wiel', 'Eckelrade', 'Edam', 'Ede', 'Ederveen',
'Ee', 'Eede', 'Eefde', 'Eelde', 'Eelderwolde', 'Eemdijk', 'Eemnes',
'Eemshaven', 'Een', 'Een-West', 'Eenrum', 'Eenum', 'Eerbeek', 'Eersel',
'Ees', 'Eesergroen', 'Eeserveen', 'Eesterga', 'Eesveen', 'Eethen',
'Eext', 'Eexterveen', 'Eexterveenschekanaal', 'Eexterzandvoort',
'Egchel', '<NAME>', 'Eg<NAME> den Hoef', 'Egmond-Binnen',
'Eibergen', 'Eijsden', 'Eindhoven', 'Einighausen', 'Ekehaar',
'Elahuizen', 'Elburg', 'Eldersloo', 'Eleveld', 'Elim', 'Elkenrade',
'Ell', 'Ellecom', 'Ellemeet', 'Ellertshaar', 'Ellewoutsdijk', 'Elp',
'Elsendorp', 'Elshout', 'Elsloo', 'Elspeet', 'Elst', 'El<NAME>',
'Emmeloord', 'Emmen', 'Emmer-Compascuum', 'Empe', 'Emst', 'Engwierum',
'Enkhuizen', 'Ens', 'Enschede', 'Enspijk', 'Enter', 'Enumatil', 'Epe',
'Epen', 'Eppenhuizen', 'Epse', 'Erica', 'Erichem', 'Erlecom', 'Erm',
'Ermelo', 'Erp', 'Esbeek', 'Esch', 'Escharen', 'Espel', 'Est', 'Etten',
'Etten-Leur', 'Europoort Rotterdam', 'Eursinge', 'Everdingen',
'Evertsoord', 'Ewijk', 'Exloo', 'Exloërveen', 'Exmorra', 'Eygelshoven',
'Eys', 'Ezinge', 'Farmsum', 'Feanwâlden', 'Feerwerd', 'Feinsum',
'Ferwert', 'Ferwoude', 'Fijnaart', 'Finsterwolde', 'Firdgum',
'Fleringen', 'Fluitenberg', 'Fochteloo', 'Follega', 'Folsgare',
'Formerum', 'Foudgum', 'Foxhol', 'Foxwolde', 'Franeker',
'Frederiksoord', 'Friens', 'Frieschepalen', 'Froombosch', 'Gaanderen',
'Gaast', 'Gaastmeer', 'Galder', 'Gameren', 'Gapinge', 'Garderen',
'Garmerwolde', 'Garminge', 'Garnwerd', 'Garrelsweer', 'Garsthuizen',
'Garyp', 'Gassel', 'Gasselte', 'Gasselternijveen',
'Gasselternijveenschemond', 'Gastel', 'Gasteren', 'Gauw', 'Geelbroek',
'Geerdijk', 'Geersdijk', 'Geertruidenberg', 'Geervliet', 'Gees',
'Geesbrug', 'Geesteren', 'Geeuwenbrug', 'Geffen', 'Geijsteren',
'Geldermalsen', 'Gelderswoude', 'Geldrop', 'Geleen', 'Gellicum',
'Gelselaar', 'Gemert', 'Gemonde', 'Genderen', 'Gendringen', 'Gendt',
'Genemuiden', 'Gennep', 'Gerkesklooster', 'Gersloot', 'Geulle',
'Giesbeek', 'Giessen', 'Giessenburg', 'Gieten', 'Gieterveen',
'Giethmen', 'Giethoorn', 'Gilze', 'Ginnum', 'Glane', 'Glimmen',
'Godlinze', 'Goedereede', 'Goes', 'Goingarijp', 'Goirle', 'Goor',
'Gorinchem', 'Gorredijk', 'Gorssel', 'Gouda', 'Gouderak', 'Goudriaan',
'Goudswaard', 'Goutum', 'Goënga', 'Goëngahuizen', 'Graauw',
'Grafhorst', 'Graft', 'Gramsbergen', 'Grashoek', 'Grathem', 'Grave',
'Greonterp', 'Grevenbicht', 'Griendtsveen', 'Grijpskerk',
'Grijpskerke', 'Groede', 'Groenekan', 'Groeningen', 'Groenlo',
'Groesbeek', 'Groessen', 'Groet', 'Grolloo', 'Groningen', 'Gronsveld',
'Groot-Ammers', 'Grootebroek', 'Grootegast', 'Grootschermer', 'Grou',
'Grubbenvorst', 'Gulpen', 'Guttecoven', 'Gytsjerk', 'Haaften',
'Haaksbergen', 'Haalderen', 'Haaren', 'Haarle', 'Haarlem',
'Haarlemmerliede', 'Haarlo', 'Haarsteeg', 'Haarzuilens', 'Haastrecht',
'Haelen', 'Hagestein', 'Haghorst', 'Haler', 'Halfweg', 'Hall', 'Halle',
'Hallum', 'Halsteren', 'Handel', 'Hank', 'Hansweert', 'Hantum',
'Hantumeruitburen', 'Hantumhuizen', 'Hapert', 'Haps', 'Harbrinkhoek',
'Hardenberg', 'Harderwijk', 'Hardinxveld-Giessendam', 'Haren',
'<NAME>', 'Harfsen', 'Harich', 'Haringhuizen', 'Harkema',
'Harkstede', 'Harlingen', 'Harmelen', 'Harreveld', 'Harskamp',
'Hartwerd', 'Haskerdijken', 'Haskerhorne', 'Hasselt', 'Hattem',
'Hattemerbroek', 'Haule', 'Haulerwijk', 'Hauwert', 'Havelte',
'Havelterberg', 'Hazerswoude-Dorp', 'Hazerswoude-Rijndijk', 'Hedel',
'Hedikhuizen', 'Hee', 'Heeg', 'Heel', 'Heelsum', 'Heelweg',
'Heemserveen', 'Heemskerk', 'Heemstede', 'Heenvliet', 'Heerde',
'Heerenveen', 'Heerewaarden', 'Heerhugowaard', 'Heerjansdam', 'Heerle',
'Heerlen', 'Heesbeen', 'Heesch', 'Heesselt', 'Heeswijk-Dinther',
'Heeten', 'Heeze', 'Hegebeintum', 'Hegelsom', 'Hei- en Boeicop',
'Heibloem', 'Heide', 'Heijen', 'Heijenrath', 'Heijningen', 'Heikant',
'Heilig Landstichting', 'Heiligerlee', 'Heiloo', 'Heinenoord',
'Heinkenszand', 'Heino', 'Hekelingen', 'Hekendorp', 'Helden',
'Helenaveen', 'Hellendoorn', 'Hellevoetsluis', 'Hellouw', 'Hellum',
'Helmond', 'Helvoirt', 'Hem', 'Hemelum', 'Hemmen', 'Hempens', 'Hemrik',
'Hendrik-Ido-Ambacht', 'Hengelo', 'Hengelo (Gld)', 'Hengevelde',
'Hengstdijk', 'Hensbroek', 'Herbaijum', 'Herkenbosch', 'Herkingen',
'Hernen', 'Herpen', 'Herpt', 'Herten', 'Hertme', 'Herveld', 'Herwen',
'Herwijnen', 'Heteren', 'Heukelom', 'Heukelum', 'Heumen', 'Heusden',
'Heveadorp', 'Heythuysen', 'Hezingen', 'Hiaure', 'Hichtum', 'Hidaard',
'Hierden', 'Hieslum', 'Hijken', 'Hijum', 'Hilaard', 'Hillegom',
'Hilvarenbeek', 'Hilversum', 'Hindeloopen', 'Hinnaard',
'Hippolytushoef', 'Hitzum', 'Hobrede', 'Hoedekenskerke', 'Hoek',
'Hoek van Holland', 'Hoenderloo', 'Hoensbroek', 'Hoenzadriel',
'Hoevelaken', 'Hoeven', 'Hoge Hexel', 'Hollandsche Rading',
'Hollandscheveld', 'Hollum', 'Holsloot', 'Holten', 'Holthees',
'Holtheme', 'Holthone', 'Holtum', 'Holwerd', 'Holwierde', 'Hommerts',
'Homoet', 'Honselersdijk', 'Hoofddorp', 'Hoofdplaat', 'Hoog Soeren',
'Hoog-Keppel', 'Hoogblokland', 'Hooge Mierde', 'Hooge Zwaluwe',
'Hoogeloon', 'Hoogenweg', 'Hoogerheide', 'Hoogersmilde', 'Hoogeveen',
'Hoogezand', 'Hooghalen', 'Hoogkarspel', 'Hoogland', 'Hooglanderveen',
'Hoogmade', 'Hoogvliet Rotterdam', 'Hoogwoud', 'Hoorn', 'Hoornaar',
'Hoornsterzwaag', 'Horn', 'Hornhuizen', 'Horssen', 'Horst', 'Houten',
'Houtigehage', 'Houwerzijl', 'Huijbergen', 'Huis ter Heide',
'Huisduinen', 'Huisseling', 'Huissen', 'Huizen', 'Huizinge',
'Hulsberg', 'Hulsel', 'Hulshorst', 'Hulst', 'Hulten', 'Hummelo',
'Hunsel', 'Hurdegaryp', 'Hurwenen', 'Húns', 'IJhorst', 'IJlst',
'IJmuiden', 'IJsselham', 'IJsselmuiden', 'IJsselstein', 'IJzendijke',
'IJzendoorn', 'Idaerd', 'Idsegahuizum', 'Idskenhuizen', 'Idzega',
'Iens', 'Ilpendam', 'Indijk', 'Ingber', 'Ingelum', 'Ingen',
'It Heidenskip', 'Itens', 'Ittervoort', 'Jaarsveld', 'Jabeek',
'Jannum', 'Jellum', 'Jelsum', 'Jirnsum', 'Jislum', 'Jisp', 'Jistrum',
'Jonkerslân', 'Jonkersvaart', 'Joppe', 'Jorwert', 'Joure', 'Jouswier',
'Jubbega', 'Julianadorp', 'Jutrijp', 'Kaag', 'Kaard', 'Kaatsheuvel',
'Kalenberg', 'Kallenkote', 'Kamerik', 'Kampen', 'Kamperland',
'Kamperveen', 'Kantens', '<NAME>', 'Kapel-Avezaath', 'Kapelle',
'Kapellebrug', 'Katlijk', 'Kats', 'Kattendijke', 'Katwijk',
'<NAME>', 'Katwoude', 'Kedichem', 'Keent', 'Keijenborg',
'Kekerdom', 'Kelpen-Oler', '<NAME>', 'Kerk-Avezaath',
'Kerkdriel', 'Kerkenveld', 'Kerkrade', 'Kerkwerve', 'Kerkwijk',
'Kessel', 'Kesteren', 'Kiel-Windeweer', 'Kilder', 'Kimswerd',
'Kinderdijk', 'Kinnum', 'Klaaswaal', 'Klarenbeek', 'Klazienaveen',
'Klazienaveen-Noord', '<NAME>', 'Klijndijk', 'Klimmen',
'Kloetinge', '<NAME>', 'Kloosterburen', 'Kloosterhaar',
'Kloosterzande', 'Klundert', 'Knegsel', 'Koarnjum', 'Kockengen',
'Koedijk', 'Koekange', 'Koewacht', 'Kolderwolde', 'Kolham', 'Kolhorn',
'Kollum', 'Kollumerpomp', 'Kollumerzwaag', 'Kommerzijl',
'Koningsbosch', 'Koningslust', 'Koog aan de Zaan', 'Koolwijk',
'Kootstertille', 'Kootwijk', 'Kootwijkerbroek', 'Kornhorn',
'Kornwerderzand', 'Kortehemmen', 'Kortenhoef', 'Kortgene',
'Koudekerk aan den Rijn', 'Koudekerke', 'Koudum', 'Koufurderrige',
'Krabbendijke', 'Kraggenburg', 'Kreileroord', 'Krewerd',
'Krimpen aan de Lek', 'Krimpen aan den IJssel', 'Kring van Dorth',
'Krommenie', 'Kronenberg', 'Kropswolde', 'Kruiningen', 'Kruisland',
'Kudelstaart', 'Kuinre', 'Kuitaart', 'Kwadendamme', 'Kwadijk',
'Kwintsheul', 'Kûbaard', 'Laag Zuthem', 'Laag-Keppel', 'Laag-Soeren',
'Lage Mierde', 'Lage Vuursche', 'Lage Zwaluwe', 'Lageland',
'Lambertschaag', 'Lamswaarde', 'Landerum', 'Landgraaf', 'Landhorst',
'Landsmeer', 'Langbroek', 'Langedijke', 'Langelille', 'Langelo',
'Langenboom', 'Langerak', 'Langeveen', 'Langeweg', 'Langezwaag',
'Langweer', 'Laren', 'Lathum', 'Lattrop-Breklenkamp', 'Lauwersoog',
'Lauwerzijl', 'Ledeacker', 'Leek', 'Leende', 'Leens', 'Leerbroek',
'Leerdam', 'Leermens', 'Leersum', 'Leeuwarden', 'Legemeer', 'Leiden',
'Leiderdorp', 'Leidschendam', 'Leimuiden', 'Leimuiderbrug',
'Lekkerkerk', 'Lekkum', 'Lellens', 'Lelystad', 'Lemele', 'Lemelerveld',
'Lemiers', 'Lemmer', 'Lengel', 'Lent', 'Leons', 'Lepelstraat',
'Lettelbert', 'Lettele', 'Leunen', 'Leur', 'Leusden', 'Leuth',
'Leutingewolde', 'Leuvenheim', 'Leveroy', 'Lewedorp', 'Lexmond',
'Lichtaard', 'Lichtenvoorde', 'Liempde', 'Lienden', 'Lierderholthuis',
'Lieren', 'Lierop', 'Lies', 'Lieshout', 'Liessel', 'Lievelde',
'Lieveren', 'Lijnden', 'Limbricht', 'Limmen', 'Linde', 'Linden',
'Linne', 'Linschoten', 'Lioessens', 'Lippenhuizen', 'Lisse',
'Lisserbroek', 'Lith', 'Lithoijen', 'Lobith', 'Lochem', 'Loenen',
'<NAME>', 'Loenersloot', 'Loerbeek', 'Lollum', 'Lomm',
'Longerhouw', '<NAME>', 'Loon', '<NAME>', 'Loosbroek',
'Loosdrecht', 'Loozen', 'Lopik', 'Lopikerkapel', 'Loppersum',
'Losdorp', 'Losser', 'Lottum', 'Loënga', 'Lucaswolde', 'Luddeweer',
'Luinjeberd', 'Lunteren', 'Lutjebroek', 'Lutjegast', 'Lutjewinkel',
'Luttelgeest', 'Lutten', 'Luttenberg', 'Luxwoude', 'Luyksgestel',
'Lytsewierrum', 'Maarheeze', 'Maarn', 'Maarsbergen', 'Maarssen',
'Maartensdijk', 'Maasbommel', 'Maasbracht', 'Maasbree', 'Maasdam',
'Maasdijk', 'Maashees', 'Maasland', 'Maassluis', 'Maastricht',
'Maastricht-Airport', 'Maasvlakte Rotterdam', 'Macharen', 'Made',
'Makkinga', 'Makkum', 'Malden', 'Mander', 'Manderveen', 'Mantgum',
'Mantinge', 'Maren-Kessel', 'Margraten', '<NAME>', 'Mariahout',
'Mariaparochie', 'Marijenkampen', 'Mariënberg', 'Mariënheem',
'Mariënvelde', 'Markelo', 'Marken', 'Markenbinnen', 'Marknesse',
'Marle', 'Marrum', 'Marsum', 'Marum', 'Marwijksoord', 'Mastenbroek',
'Matsloot', 'Maurik', 'Mechelen', 'Medemblik', 'Meeden', 'Meedhuizen',
'Meerkerk', 'Meerlo', 'Meerssen', 'Meerstad', 'Meeuwen', 'Megchelen',
'Megen', 'Meijel', 'Melderslo', 'Melick', 'Meliskerke', 'Melissant',
'Menaam', 'Mensingeweer', 'Meppel', 'Meppen', 'Merkelbeek', 'Merselo',
'Meteren', 'Meterik', 'Metslawier', 'Mheer', 'Middelaar', 'Middelburg',
'Middelharnis', 'Middelie', 'Middelstum', 'Middenbeemster',
'Middenmeer', 'Midlaren', 'Midlum', 'Midsland', 'Midwolda', 'Midwolde',
'Midwoud', 'Miedum', 'Mierlo', 'Mijdrecht', 'Mijnsheerenland',
'Mildam', 'Milheeze', 'Mill', '<NAME>', 'Milsbeek',
'Minnertsga', 'Mirns', 'Moddergat', 'Moerdijk', 'Moergestel',
'Moerkapelle', 'Moerstraten', 'Molenaarsgraaf', 'Molenhoek',
'Molenschot', 'Molkwerum', 'Monnickendam', 'Monster', 'Montfoort',
'Montfort', 'Mook', 'Mookhoek', 'Moordrecht', 'Moorveld', 'Morra',
'Muiden', 'Muiderberg', 'Munnekeburen', 'Munnekezijl', 'Munstergeleen',
'Muntendam', 'Mussel', 'Musselkanaal', 'Mûnein', 'Naaldwijk',
'Naarden', 'Nagele', 'Nederasselt', 'Nederhemert',
'<NAME>', 'Nederland', 'Nederweert', 'Nederweert-Eind',
'Neede', 'Neer', 'Neerijnen', 'Neeritter', 'Neerkant', 'Neerlangel',
'Neerloon', 'Nes', 'Netersel', 'Netterden', 'Niawier', 'Nibbixwoud',
'Niebert', 'Niehove', 'Niekerk', 'Nietap', '<NAME>',
'<NAME>', 'Nieuw Heeten', 'Nieuw Namen', 'Nieuw Scheemda',
'Nieuw- en Sint Joosland', 'Nieuw-Amsterdam', 'Nieuw-Balinge',
'Nieuw-Beijerland', 'Nieuw-Buinen', 'Nieuw-Dordrecht',
'Nieuw-Lekkerland', 'Nieuw-Roden', 'Nieuw-Schoonebeek', 'Nieuw-Vennep',
'Nieuw-Vossemeer', 'Nieuw-Weerdinge', 'Nieuwaal', 'Nieuwdorp',
'<NAME>', '<NAME>', '<NAME>', 'Nieuwe-Tonge',
'Nieuwebrug', 'Nieuwediep', 'Nieuwegein', 'Nieuwehorne', 'Nieuwendijk',
'<NAME>', '<NAME>', 'Nieuwerkerk',
'<NAME>', 'Nieuweroord', 'Nieuwersluis',
'Nieuweschoot', 'Nieuwkoop', 'Nieuwkuijk', 'Nieuwland', 'Nieuwlande',
'Nieuwlande Coevorden', 'Nieuwleusen', 'Nieuwolda', 'Nieuwpoort',
'Nieuwstadt', 'Nieuwveen', 'Nieuwvliet', 'Niezijl', 'Niftrik',
'Nigtevecht', '<NAME>', '<NAME>', 'Nijbroek', 'Nijeberkoop',
'Nijega', 'Nijehaske', 'Nijeholtpade', 'Nijeholtwolde', 'Nijelamer',
'Nijemirdum', 'Nijensleek', 'Nijetrijne', 'Nijeveen', 'Nijhuizum',
'Nijkerk', 'Nijkerkerveen', 'Nijland', 'Nijlande', 'Nijmegen',
'Nijverdal', 'Nispen', 'Nisse', 'Nistelrode', 'Noardburgum',
'Nooitgedacht', 'Noorbeek', 'Noord-Scharwoude', 'Noord-Sleen',
'Noordbeemster', 'Noordbroek', 'Noordeinde', 'Noordeinde Gld',
'Noordeloos', 'Noorden', 'Noordgouwe', 'Noordhoek', 'Noordhorn',
'Noordlaren', 'Noordscheschut', 'Noordwelle', 'Noordwijk',
'Noordwijkerhout', 'Noordwolde', 'Nootdorp', 'Norg', 'Notter',
'Nuenen', 'Nuis', 'Nuland', 'Numansdorp', 'Nunhem', 'Nunspeet', 'Nuth',
'Nutter', 'Obbicht', 'Obdam', 'Ochten', 'Odijk', 'Odiliapeel',
'Odoorn', 'Odoornerveen', 'Oeffelt', 'Oegstgeest', 'Oene', 'Oentsjerk',
'Offingawier', 'Oh<NAME>', 'Oijen', 'Oirlo', 'Oirsbeek', 'Oirschot',
'Oisterwijk', 'Okkenbroek', 'Olburgen', 'Oldeberkoop', 'Oldebroek',
'Oldeholtpade', 'Oldeholtwolde', 'Oldehove', 'Oldekerk', 'Oldelamer',
'Oldemarkt', 'Oldenzaal', 'Oldenzijl', 'Oldeouwer', 'Oldetrijne',
'Olst', 'Olterterp', 'Ommel', 'Ommen', 'Ommeren', 'Onderdendam',
'Onna', 'Onnen', 'Onstwedde', 'Ooij', 'Ooltgensplaat',
'Oost West en Middelbeers', 'Oost-Graftdijk', 'Oost-Souburg',
'Oostburg', 'Oostdijk', 'Oosteind', 'Oosterbeek', 'Oosterbierum',
'Oosterblokker', 'Oosterend', 'Oosterhesselen', 'Oosterhout',
'Oosterland', 'Oosterleek', 'Oosternieland', 'Oosternijkerk',
'Oosterstreek', 'Oosterwijk', 'Oosterwijtwerd', 'Oosterwolde',
'O<NAME>', 'Oosterzee', 'Oosthem', 'Oosthuizen', 'Oostkapelle',
'Oostknollendam', 'Oostrum', 'Oostvoorne', 'Oostwold', 'Oostwoud',
'Oostzaan', 'Ootmarsum', 'Opeinde', 'Opende', 'Ophemert', 'Opheusden',
'Opijnen', 'Oploo', 'Opmeer', 'Oppenhuizen', 'Opperdoes', 'Oranje',
'Oranjewoud', 'Orvelte', 'Ospel', 'Oss', 'Ossendrecht', 'Ossenisse',
'Ossenwaard', 'Ossenzijl', 'Oterleek', 'Otterlo', 'Ottersum',
'Ottoland', '<NAME>', '<NAME>', '<NAME>', '<NAME>',
'<NAME>', 'Oud-Alblas', 'Oud-Beijerland', 'Oud-Vossemeer',
'Ouddorp', '<NAME>', '<NAME>', '<NAME>', 'O<NAME>',
'<NAME>', 'Oude-Tonge', 'Oudebildtzijl', 'Oudega', 'Oudehaske',
'Oudehorne', 'Oudelande', 'Oudemirdum', 'Oudemolen', 'Oudenbosch',
'Oudendijk', 'Oudenhoorn', 'Ouderkerk aan de Amstel',
'Ouderkerk aan den IJssel', 'Oudeschans', 'Oudeschild', 'Oudeschip',
'Oudeschoot', 'Oudesluis', 'Oudewater', 'Oudezijl', 'Oudheusden',
'Oudkarspel', 'Oudorp', 'Oudwoude', 'Ouwerkerk', 'Ouwster-Nijega',
'Ouwsterhaule', 'Overasselt', 'Overberg', 'Overdinkel', 'Overlangel',
'Overloon', 'Overschild', 'Overslag', 'Overveen', 'Ovezande',
'Paasloo', 'Paesens', 'Pannerden', 'Panningen', 'Papekop',
'Papendrecht', 'Papenhoven', 'Papenvoort', 'Parrega', 'Paterswolde',
'Peest', 'Peins', 'Peize', 'Peperga', '<NAME>', 'Persingen',
'Pesse', 'Petten', 'Philippine', 'Piaam', 'Piershil', 'Pieterburen',
'Pietersbierum', 'Pieterzijl', 'Pijnacker', 'Pingjum', 'Plasmolen',
'Poederoijen', 'Poeldijk', 'Polsbroek', 'Poortugaal', 'Poortvliet',
'Poppenwier', 'Posterholt', 'Prinsenbeek', 'Puiflijk', 'Punthorst',
'Purmer', 'Purmerend', 'Purmerland', 'Puth', 'Putte', 'Putten',
'Puttershoek', 'Raalte', 'Raamsdonk', 'Raamsdonksveer', 'Raard',
'Radewijk', '<NAME>', 'Raerd', 'Randwijk', 'Ransdaal',
'Rasquert', 'Ravenstein', 'Ravenswaaij', 'Ravenswoud', 'Readtsjerk',
'Reahûs', 'Reduzum', 'Reek', 'Reeuwijk', 'Reijmerstok', 'Reitsum',
'Rekken', 'Renesse', 'Renkum', 'Renswoude', 'Ressen', 'Retranchement',
'Reusel', 'Reutum', 'Reuver', 'Rha', 'Rheden', 'Rhee', 'Rheeze',
'Rheezerveen', 'Rhenen', 'Rhenoy', 'Rhoon', 'Ridderkerk', 'Ried',
'Riel', 'Rien', 'Riethoven', 'Rietmolen', 'Rijen', 'Rijkevoort',
'Rijkevoort-De Walsert', 'Rijnsaterwoude', 'Rijnsburg', 'Rijpwetering',
'Rijs', 'Rijsbergen', 'Rijsenhout', 'Rijssen', 'Rijswijk',
'Rijswijk (GLD)', 'Rijswijk (NB)', 'Rilland', 'Rinsumageast',
'Ritthem', 'Rockanje', 'Roden', 'Roderesch', 'Roderwolde',
'Roelofarendsveen', 'Roermond', 'Rogat', 'Roggel', 'Rohel', 'Rolde',
'Roodeschool', 'Roosendaal', 'Roosteren', 'Rosmalen', 'Rossum',
'Roswinkel', 'Rotstergaast', 'Rotsterhaule', 'Rotterdam',
'Rotterdam-Albrandswaard', 'Rottevalle', 'Rottum', 'Rouveen',
'Rozenburg', 'Rozendaal', 'Rucphen', 'Ruigahuizen', 'Ruinen',
'Ruinerwold', 'Rumpt', 'Rutten', 'Ruurlo', 'Ryptsjerk', 'Saaksum',
'Saasveld', 'Saaxumhuizen', 'Sambeek', 'Sandfirden', 'Santpoort-Noord',
'Santpoort-Zuid', 'Sappemeer', '<NAME>', 'Sassenheim', 'Sauwerd',
'Schagen', 'Schagerbrug', 'Schaijk', 'Schalkhaar', 'Schalkwijk',
'Schalsum', 'Schardam', 'Scharendijke', 'Scharmer', 'Scharnegoutum',
'Scharsterbrug', 'Scharwoude', 'Scheemda', 'Scheerwolde',
'Schellinkhout', 'Schelluinen', 'Schermerhorn', 'Scherpenisse',
'Scherpenzeel', 'Schettens', 'Scheulder', 'Schiedam',
'Schiermonnikoog', 'Schijf', 'Schijndel', 'Schildwolde', 'Schimmert',
'Schin op Geul', 'Schinnen', 'Schinveld', 'Schipborg', 'Schiphol',
'Schiphol-Rijk', 'Schipluiden', 'Schokland', 'Schoondijke',
'Schoonebeek', 'Schoonhoven', 'Schoonloo', 'Schoonoord',
'Schoonrewoerd', 'Schoorl', 'Schore', 'Schouwerzijl', 'Schraard',
'Schuinesloot', 'Sebaldeburen', 'Sellingen', 'Serooskerke', 'Sevenum',
'Sexbierum', 'Sibculo', 'Sibrandabuorren', 'Sibrandahûs', 'Siddeburen',
'Siebengewald', 'Siegerswoude', 'Sijbekarspel', 'Silvolde',
'Simonshaven', 'Simpelveld', 'Sinderen', 'Sint Agatha', 'Sint Annen',
'Sint Anthonis', 'Sint Geertruid', 'Sint Hubert', 'Sint Jansklooster',
'Sint Jansteen', 'Sint Joost', 'Sint Kruis', 'Sint Maarten',
'Sint Maartensbrug', 'Sint Maartensvlotbrug', 'Sint Nicolaasga',
'Sint Odiliënberg', 'Sint Pancras', 'Sint Philipsland',
'Sint-Annaland', 'Sint-Maartensdijk', 'Sint-Michielsgestel',
'Sint-Oedenrode', 'Sintjohannesga', 'Sirjansland', 'Sittard',
'Skingen', 'Slagharen', 'Slappeterp', 'Sleen', 'Sleeuwijk', 'Slenaken',
'Sliedrecht', 'Slijk-Ewijk', 'Slijkenburg', 'Slochteren', 'Slootdorp',
'Sloten', 'Sluis', 'Sluiskil', 'Smakt', '<NAME>', 'Smallebrugge',
'Smilde', 'Snakkerburen', 'Sneek', 'Snelrewaard', 'Snikzwaag',
'Soerendonk', 'Soest', 'Soesterberg', 'Someren', 'Sommelsdijk',
'<NAME>', 'Sondel', 'Sonnega', 'Spaarndam',
'Spaarndam gem. Haarlem', 'Spanbroek', 'Spanga', 'Spankeren',
'Spannum', 'Spaubeek', 'Spier', 'Spierdijk', 'Spijk', '<NAME>',
'Spijkenisse', 'Spijkerboor', 'Sprang-Capelle', 'Sprundel', 'Spui',
'<NAME>', 'St.-Annaparochie', 'St.-Jacobiparochie',
"Stad aan 't Haringvliet", 'Stadskanaal', 'Stampersgat',
'Standdaarbuiten', 'Staphorst', 'Starnmeer', 'Startenhuizen',
'Stavenisse', 'Stavoren', 'Stedum', 'Steenbergen', 'Steendam',
'Steenderen', 'Steenenkamer', 'Steensel', 'Steenwijk',
'Steenwijkerwold', 'Stegeren', 'Steggerda', 'Stein', 'Stellendam',
'Sterksel', 'Stevensbeek', 'Stevensweert', 'Steyl', 'Stieltjeskanaal',
'Stiens', 'Stitswerd', 'Stokkum', 'Stolwijk', 'Stompetoren',
'Stoutenburg', 'Stoutenburg Noord', 'Stramproy', 'Streefkerk',
'Striep', 'Strijbeek', 'Strijen', 'Strijensas', 'Stroe', 'Stroobos',
'Stuifzand', 'Sumar', 'Surhuisterveen', 'Surhuizum', 'Susteren',
'Suwâld', 'Swalmen', 'Sweikhuizen', 'Swichum', 'Swifterbant',
'Swolgen', 'Taarlo', 'Teeffelen', 'Teerns', 'Tegelen', '<NAME>',
'Ten Post', '<NAME>', '<NAME>', '<NAME>', '<NAME>',
'<NAME>', '<NAME>', 'Terband', 'Terborg', 'Terheijden',
'Terherne', 'Terhole', 'Terkaple', 'Termunten', 'Termunterzijl',
'Ternaard', 'Terneuzen', 'Teroele', 'Terschuur', 'Tersoal',
'Terwispel', 'Terwolde', 'Teteringen', 'Teuge', 'Thesinge', 'Tholen',
'Thorn', 'Tiel', 'Tiendeveen', 'Tienhoven', 'Tienray', 'Tijnje',
'Tilburg', 'Tilligte', 'Tinallinge', 'Tinte', 'Tirns', 'Tjalhuizum',
'Tjalleberd', 'Tjerkgaast', 'Tjerkwerd', 'Tjuchem', 'Tolbert',
'Toldijk', 'Tolkamer', 'Tollebeek', 'Tonden', 'Toornwerd', 'Tricht',
'Triemen', 'Tripscompagnie', 'Tubbergen', 'Tuil', 'Tuitjenhorn', 'Tuk',
"Tull en 't Waal", 'Twello', 'Twijzel', 'Twijzelerheide', 'Twisk',
'Tynaarlo', 'Tytsjerk', 'Tzum', 'Tzummarum', 'Ubbena', 'Ubbergen',
'Uddel', 'Uden', 'Udenhout', 'Uffelte', 'Ugchelen', 'Uitdam',
'Uitgeest', 'Uithoorn', 'Uithuizen', 'Uithuizermeeden',
'Uitwellingerga', 'Uitwijk', 'Ulestraten', 'Ulft', 'Ulicoten', 'Ulrum',
'Ulvenhout', 'Ulvenhout AC', 'Ureterp', 'Urk', 'Urmond', 'Ursem',
'Ursem gem. S', 'Usquert', 'Utrecht', 'Vaals', 'Vaassen', 'Valburg',
'Valkenburg', 'Valkenswaard', 'Valthe', 'Valthermond', 'Varik',
'Varsselder', 'Varsseveld', 'Vasse', 'Veelerveen', 'Veen', 'Veendam',
'Veenendaal', 'Veenhuizen', 'Veeningen', 'Veenklooster', 'Veenoord',
'Veere', 'Veessen', 'Vegelinsoord', 'Veghel', 'Velddriel', 'Velden',
'Veldhoven', 'Velp', 'Velsen-Noord', 'Velsen-Zuid', 'Velserbroek',
'Ven-Zelderheide', 'Venebrugge', 'Venhorst', 'Venhuizen', 'Venlo',
'Venray', 'Vessem', 'Vethuizen', 'Veulen', 'Vianen', 'Vianen NB',
'Vierakker', 'Vierhouten', 'Vierhuizen', 'Vierlingsbeek',
'Vierpolders', 'Vijfhuizen', 'Vijlen', 'Vilsteren', 'Vinkega',
'Vinkel', 'Vinkenbuurt', 'Vinkeveen', 'Visvliet', 'Vlaardingen',
'Vlagtwedde', 'Vledder', 'Vledderveen', 'Vleuten', 'Vlieland',
'Vlierden', 'Vlijmen', 'Vlissingen', 'Vlist', 'Vlodrop', 'Voerendaal',
'Vogelenzang', 'Vogelwaarde', 'Volendam', 'Volkel', 'Vollenhove',
'Vondelingenplaat Rotterdam', 'Voorburg', 'Voorhout', 'Voorschoten',
'Voorst', 'Voorthuizen', 'Vorchten', 'Vorden', 'Vorstenbosch',
'Vortum-Mullem', 'Vragender', 'Vredenheim', 'Vredepeel', 'Vreeland',
'Vries', 'Vriescheloo', 'Vriezenveen', 'Vroomshoop', 'Vrouwenakker',
'Vrouwenparochie', 'Vrouwenpolder', 'Vught', 'Vuren', 'Waaksens',
'Waal', 'Waalre', 'Waalwijk', 'Waarde', 'Waardenburg', 'Waarder',
'Waardhuizen', 'Waarland', 'Waaxens', 'Wachtum', 'Waddinxveen',
'Wadenoijen', 'Wagenberg', 'Wagenborgen', 'Wageningen', 'Walem',
'Walsoorden', 'Wamel', 'Wanneperveen', 'Wanroij', 'Wanssum',
'Wapenveld', 'Wapse', 'Wapserveen', 'Warder', 'Warffum', 'Warfhuizen',
'Warfstermolen', 'Warmenhuizen', 'Warmond', 'Warns', 'Warnsveld',
'Warstiens', 'Warten', 'Waskemeer', 'Waspik', 'Wassenaar', 'Wateren',
'Watergang', 'Waterhuizen', 'Wateringen', 'Waterlandkerkje',
'Waverveen', 'Wedde', 'Weerselo', 'Weert', 'Weesp', 'Wehe-den Hoorn',
'Wehl', 'Weidum', 'Weiteveen', 'Wekerom', 'Well', 'Well L',
'Wellerlooi', 'Welsum', 'Wemeldinge', 'Wenum Wiesel', 'Wergea',
'Werkendam', 'Werkhoven', 'Wernhout', 'Wervershoof', 'Wesepe',
'Wessem', 'West-Graftdijk', 'West-Terschelling', 'Westbeemster',
'Westbroek', 'Westdorp', 'Westdorpe', 'Westendorp', 'Westerbeek',
'Westerbork', 'Westerbroek', 'Westeremden', 'Westergeest',
'Westerhaar-Vriezenveensewijk', 'Westerhoven', 'Westerland',
'Westerlee', 'Westernieland', 'Westervelde', 'Westervoort',
'Westerwijtwerd', 'Westhem', 'Westhoek', 'Westkapelle',
'Westknollendam', 'Westmaas', 'Westwoud', 'Westzaan', 'Wetering',
'Weteringbrug', 'Wetsens', 'Wetsinge', 'Weurt', 'Wezep', 'Wezup',
'Wezuperbrug', 'Wichmond', 'Wier', 'Wierden', 'Wieringerwaard',
'Wieringerwerf', 'Wierum', 'Wijchen', 'Wijckel', 'Wijdenes',
'Wijdewormer', 'Wijhe', '<NAME>ee', 'Wijk bij Duurstede',
'Wijk en Aalburg', 'Wijlre', 'Wijnaldum', 'Wijnandsrade', 'Wijnbergen',
'Wijngaarden', 'Wijnjewoude', 'Wijster', 'Wilbertoord', 'Wildervank',
'Wilhelminadorp', 'Wilhelminaoord', 'Willemsoord', 'Willemstad',
'Wilnis', 'Wilp', 'Wilsum', 'Winde', 'Windraak', 'Winkel', 'Winneweer',
'Winschoten', 'Winssen', 'Winsum', 'Wintelre', 'Winterswijk',
'Winterswijk Brinkheurne', 'Winterswijk Corle', 'Winterswijk Henxel',
'Winterswijk Huppel', 'Winterswijk Kotten', 'Winterswijk Meddo',
'Winterswijk Miste', 'Winterswijk Ratum', 'Winterswijk Woold',
'Wirdum', '<NAME>', 'Wissenkerke', 'Witharen', 'Witmarsum',
'<NAME>', 'Wittelte', 'Wittem', 'Witteveen', 'Wiuwert',
'Wjelsryp', 'Woensdrecht', 'Woerden', '<NAME>', 'Wognum',
'Woldendorp', 'Wolfheze', 'Wolphaartsdijk', 'Wolsum', 'Woltersum',
'Wolvega', 'Wommels', 'Wons', 'Workum', 'Wormer', 'Wormerveer',
'Woubrugge', 'Woudbloem', 'Woudenberg', 'Woudrichem', 'Woudsend',
'Wouw', '<NAME>', 'Wyns', 'Wytgaard', 'Wâlterswâld',
'Wânswert', 'Yde', 'Yerseke', 'Ypecolsga', 'Ysbrechtum', 'Ysselsteyn',
'Zaamslag', 'Zaandam', 'Zaandijk', 'Zalk', 'Zaltbommel', 'Zandberg',
'Zandeweer', 'Zandhuizen', 'Zandpol', 'Zandvoort', 'Zeddam', 'Zeegse',
'Zeeland', 'Zeerijp', 'Zeewolde', 'Zegge', 'Zegveld', 'Zeijen',
'Zeijerveen', 'Zeijerveld', 'Zeist', 'Zelhem', 'Zenderen',
'Zennewijnen', 'Zetten', 'Zevenaar', 'Zevenbergen',
'<NAME>', 'Zevenhoven', 'Zevenhuizen', 'Zierikzee',
'Zieuwent', 'Zijderveld', 'Zijdewind', 'Zijldijk', 'Zoelen',
'Zoelmond', 'Zoetermeer', 'Zoeterwoude', 'Zonnemaire', 'Zorgvlied',
'Zoutelande', 'Zoutkamp', 'Zuid-Beijerland', 'Zuid-Scharwoude',
'Zuidbroek', 'Zuiddorpe', 'Zuidermeer', 'Zuiderwoude', 'Zuidhorn',
'Zuidlaarderveen', 'Zuidland', 'Zuidlaren', 'Zuidoostbeemster',
'Zuidschermer', 'Zuidveen', 'Zuidveld', 'Zuidvelde', 'Zuidwolde',
'Zuidzande', 'Zuilichem', 'Zuna', 'Zundert', 'Zurich', 'Zutphen',
'Zuurdijk', 'Zwaag', 'Zwaagdijk-Oost', 'Zwaagdijk-West', 'Zwaanshoek',
'Zwagerbosch', 'Zwammerdam', 'Zwanenburg', 'Zwartebroek', 'Zwartemeer',
'Zwartewaal', 'Zwartsluis', 'Zweeloo', 'Zweins', 'Zwiggelte',
'Zwijndrecht', 'Zwinderen', 'Zwolle', 'de Hoef', 'de Lutte', 'de Wijk',
'de Woude',
)
provinces = (
'Drenthe', 'Flevoland', 'Friesland', 'Gelderland', 'Groningen',
'Limburg', 'Noord-Brabant', 'Noord-Holland', 'Overijssel', 'Utrecht',
'Zeeland', 'Zuid-Holland',
)
street_name_formats = (
'{{first_name}}{{street_suffix}}',
)
street_address_formats = (
'{{street_name}} {{building_number}}',
)
address_formats = (
"{{street_address}}\n{{postcode}}\n{{city}}",
)
def province(self):
return self.random_element(self.provinces)
def city(self):
return self.random_element(self.cities)
|
frontera/worker/components/batch_generator.py
|
buildfail/frontera
| 1,267 |
137799
|
<gh_stars>1000+
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import threading
from time import asctime, time
from collections import defaultdict
from logging import DEBUG
from frontera.exceptions import NotConfigured
from frontera.utils.url import parse_domain_from_url_fast
from . import DBWorkerThreadComponent
class BatchGenerator(DBWorkerThreadComponent):
"""Component to get data from backend and send it to spider feed log."""
NAME = 'batchgen'
def __init__(self, worker, settings, stop_event,
no_batches=False, partitions=None, **kwargs):
super(BatchGenerator, self).__init__(worker, settings, stop_event, **kwargs)
if no_batches:
raise NotConfigured('BatchGenerator is disabled with --no-batches')
self.run_backoff = settings.get('NEW_BATCH_DELAY')
self.backend = worker.backend
self.spider_feed = worker.message_bus.spider_feed()
self.spider_feed_producer = self.spider_feed.producer()
self.get_key_function = self.get_fingerprint
if settings.get('QUEUE_HOSTNAME_PARTITIONING'):
self.get_key_function = self.get_hostname
self.domains_blacklist = settings.get('DOMAINS_BLACKLIST')
self.max_next_requests = settings.MAX_NEXT_REQUESTS
self.partitions = partitions
# create an event to disable/enable batches generation via RPC
self.disabled_event = threading.Event()
# domain statistics logging
self.domain_stats = dict([(partition_id, defaultdict(int)) for partition_id in self.partitions])
self.domain_stats_interval = settings.get('DOMAIN_STATS_LOG_INTERVAL')
self.rotate_time = time() + self.domain_stats_interval
def get_ready_partitions(self):
pending_partitions = self.spider_feed.available_partitions()
if not self.partitions:
return pending_partitions
return list(set(pending_partitions) & set(self.partitions))
def run(self):
if self.disabled_event.is_set():
return True
if self.logger.isEnabledFor(DEBUG) and time() > self.rotate_time:
self.rotate_and_log_domain_stats()
partitions = self.get_ready_partitions()
if not partitions:
return True
batch_count = sum(self._handle_partition(partition_id)
for partition_id in partitions)
if not batch_count:
return True
# let's count full batches in the same way as before
self.update_stats(increments={'batches_after_start': 1},
replacements={'last_batch_size': batch_count,
'last_batch_generated': asctime()})
def _handle_partition(self, partition_id):
self.logger.info("Getting new batches for partition %d", partition_id)
count = 0
for request in self.backend.get_next_requests(self.max_next_requests,
partitions=[partition_id]):
if self._is_domain_blacklisted(request):
continue
try:
request.meta[b'jid'] = self.worker.job_id
eo = self.worker._encoder.encode_request(request)
except Exception as e:
self.logger.error("Encoding error, %s, fingerprint: %s, url: %s" %
(e, self.get_fingerprint(request), request.url))
count += 1 # counts as a processed request
continue
try:
self.spider_feed_producer.send(self.get_key_function(request), eo)
except Exception:
self.logger.exception("Sending message error fingerprint: %s, url: %s" %
(self.get_fingerprint(request), request.url))
finally:
count += 1
hostname = self.get_hostname(request)
if self.logger.isEnabledFor(DEBUG):
self.domain_stats[partition_id][hostname] += 1
self.update_stats(increments={'pushed_since_start': count})
return count
def _is_domain_blacklisted(self, request):
if not self.domains_blacklist:
return
if 'domain' in request.meta:
hostname = request.meta['domain'].get('name')
else:
_, hostname, _, _, _, _ = parse_domain_from_url_fast(request.url)
if hostname:
hostname = hostname.lower()
if hostname in self.domains_blacklist:
self.logger.debug("Dropping black-listed hostname, URL %s", request.url)
return True
return False
def close(self):
self.spider_feed_producer.close()
def rotate_and_log_domain_stats(self):
self.logger.debug("Domain statistics of requests pushed to spider feed")
for partition_id, host_stats in sorted(self.domain_stats.items(), key=lambda x: x[0]):
self.logger.debug("PID %d =================================================================", partition_id)
for hostname, count in host_stats.items():
self.logger.debug("%s\t%d", hostname, count)
self.domain_stats[partition_id] = defaultdict(int)
self.rotate_time = time() + self.domain_stats_interval
# --------------------------- Auxiliary tools --------------------------------
def get_fingerprint(self, request):
return request.meta[b'fingerprint']
def get_hostname(self, request):
return request.meta[b'domain'][b'name']
|
Tests/compat/sbs_exceptions/while_loop.py
|
cwensley/ironpython2
| 1,078 |
137804
|
# Licensed to the .NET Foundation under one or more agreements.
# The .NET Foundation licenses this file to you under the Apache 2.0 License.
# See the LICENSE file in the project root for more information.
from common import runtests
from .shared import while_loop_maker
from .shared import setGenerator, setKnownFailures, test_exceptions
setGenerator(while_loop_maker)
'''
def test8553():
global log
log+="preloop"
whilevar1_12755 = 0
while whilevar1_12755 < 3:
whilevar1_12755 += 1
log+="inloop"
log+="predefine"
def func2_12756():
global log
try:
log+="try"
log+="break"
break
except:
log+="except"
log+=dump_exc_info()
log+="pass"
pass
func2_12756()
##
same## <type 'exceptions.SystemError'>
same## <type 'exceptions.SyntaxError'>
same## preloopinlooppredefinetrybreak
'''
setKnownFailures([ #IP emits SyntaxError...CPy emits SystemError. Known
#incompatibility. See the docstring above for an example
#of this incompat.
8553, 8554, 8555, 8556, 8656, 8657, 8658, 8697, 8698, 8699,
8738, 8739, 8740,
])
runtests(test_exceptions)
|
support/compute-powers.py
|
ldanko/fmt
| 13,148 |
137832
|
<gh_stars>1000+
#!/usr/bin/env python
# Compute 10 ** exp with exp in the range [min_exponent, max_exponent] and print
# normalized (with most-significant bit equal to 1) significands in hexadecimal.
from __future__ import print_function
min_exponent = -348
max_exponent = 340
step = 8
significand_size = 64
exp_offset = 2000
class fp:
pass
powers = []
for i, exp in enumerate(range(min_exponent, max_exponent + 1, step)):
result = fp()
n = 10 ** exp if exp >= 0 else 2 ** exp_offset / 10 ** -exp
k = significand_size + 1
# Convert to binary and round.
binary = '{:b}'.format(n)
result.f = (int('{:0<{}}'.format(binary[:k], k), 2) + 1) / 2
result.e = len(binary) - (exp_offset if exp < 0 else 0) - significand_size
powers.append(result)
# Sanity check.
exp_offset10 = 400
actual = result.f * 10 ** exp_offset10
if result.e > 0:
actual *= 2 ** result.e
else:
for j in range(-result.e):
actual /= 2
expected = 10 ** (exp_offset10 + exp)
precision = len('{}'.format(expected)) - len('{}'.format(actual - expected))
if precision < 19:
print('low precision:', precision)
exit(1)
print('Significands:', end='')
for i, fp in enumerate(powers):
if i % 3 == 0:
print(end='\n ')
print(' {:0<#16x}'.format(fp.f, ), end=',')
print('\n\nExponents:', end='')
for i, fp in enumerate(powers):
if i % 11 == 0:
print(end='\n ')
print(' {:5}'.format(fp.e), end=',')
print('\n\nMax exponent difference:',
max([x.e - powers[i - 1].e for i, x in enumerate(powers)][1:]))
|
test/python/test_push.py
|
yiranwang52/makisu
| 2,355 |
137876
|
<reponame>yiranwang52/makisu
import os
import subprocess
import tempfile
from .utils import new_image_name, docker_save_image, makisu_push_image, docker_run_image
def test_push_simple(registry1, registry2, storage_dir, tmpdir):
new_image = new_image_name()
replica_image = new_image_name()
_, image_tar_path = tempfile.mkstemp(dir='/tmp') # TODO: prevent leaking if test failed.
docker_save_image('busybox:latest', image_tar_path)
makisu_push_image(
new_image, image_tar_path,
registry=registry1.addr,
replicas=[registry2.addr + '/' + replica_image],
registry_cfg={"*": {"*": {"security": {"tls": {"client": {"disabled": True}}}}}})
code, err = docker_run_image(registry1.addr, new_image)
assert code == 0, err
code, err = docker_run_image(registry2.addr, replica_image)
assert code == 0, err
|
hedera-node/forensics/start-investigation.py
|
tinker-michaelj/hedera-services
| 164 |
137880
|
import os
import re
import sys
import shutil
import filecmp
from functools import reduce
fcm_types = ['accounts', 'storage', 'topics']
avail_nodes = []
dumps_root_dir = ''
investigation_name = 'iss'
rounds_avail = {}
account_fcm_pattern = re.compile(r'accounts-round(\d+)[.]fcm')
first_round_post_iss = 0
def prepare_env():
global avail_nodes, dumps_root_dir, investigation_name
if len(sys.argv) < 3:
print('USAGE: python3 {} '.format(sys.argv[0]) +
'<dumps-root-dir> [<investigation-name>]')
sys.exit(1)
dumps_root_dir = sys.argv[1]
investigation_name = sys.argv[2] or investigation_name
avail_nodes = [n for n in next(os.walk(dumps_root_dir))[1]]
if not os.path.exists(os.path.join('.', investigation_name)):
os.mkdir(investigation_name)
for node in avail_nodes:
if not os.path.exists(
os.path.join('.', investigation_name, node)):
os.mkdir(os.path.join(investigation_name, node))
def load_rounds_avail():
for node in avail_nodes:
rounds_dir = os.path.join(dumps_root_dir, node)
rounds = set([num_from(fcm) for fcm in next(os.walk(rounds_dir))[2]
if re.match(account_fcm_pattern, fcm)])
rounds_avail[node] = rounds
def pick_first_round():
global first_round_post_iss, rounds_avail
reducer = lambda x, y: x.intersection(y)
first_round_post_iss = min(reduce(reducer, rounds_avail.values()))
def num_from(accounts_fcm):
m = re.match(account_fcm_pattern, accounts_fcm)
return int(m.group(1))
def copy_round_fcms():
for node in avail_nodes:
for fcm_type in fcm_types:
f = fcm_file(fcm_type)
shutil.copyfile(
os.path.join(dumps_root_dir, node, f),
os.path.join('.', investigation_name, node, f))
def diff_matrix(fcm_type, f):
field_width = max(map(len, avail_nodes)) + 1
write_and_print('\n' + ''.join('-' for _ in range(len(fcm_type))), f)
write_and_print(fcm_type.upper(), f)
write_and_print(''.join('-' for _ in range(len(fcm_type))), f)
write_and_print('{:{w}}'.format('', w=field_width) +
''.join(['{:{w}}'.format(node, w=field_width)
for node in avail_nodes]), f)
blank = '{:{w}}'.format('', w=field_width)
for i, node in enumerate(avail_nodes):
l = ['{:<{w}}'.format(node, w=field_width)]
for j, other in enumerate(avail_nodes):
if j < i:
l.append(blank)
else:
answer = 'X' if differ(node, other, fcm_type) else '.'
l.append('{:{w}}'.format(answer, w=field_width))
line = ''.join(l)
write_and_print(line, f)
def write_and_print(s, f):
print(s)
f.write(s + '\n')
def differ(node1, node2, fcm_type):
fcm1, fcm2 = fcm_path(node1, fcm_type), fcm_path(node2, fcm_type)
return not filecmp.cmp(fcm1, fcm2)
def fcm_file(fcm_type):
return '{}-round{}.fcm'.format(fcm_type, first_round_post_iss)
def fcm_path(node, fcm_type):
return os.path.join('.', investigation_name, node, fcm_file(fcm_type))
def write_list_literals():
p = os.path.join('.', investigation_name, 'fcm-paths.excerpt')
with open(p, 'w') as f:
for fcm_type in fcm_types:
f.write(' final List<String> {}Locs = List.of(\n'.format(
fcm_type))
for i, node in enumerate(avail_nodes):
fq = os.path.join(
os.path.abspath('.'),
investigation_name, node, fcm_file(fcm_type))
opt_comma = '' if (i == len(avail_nodes) - 1) else ','
f.write(' "{}"{}\n'.format(fq, opt_comma))
f.write(' );\n')
if __name__ == '__main__':
prepare_env()
load_rounds_avail()
pick_first_round()
print('\nRound {} is first available for all nodes.'.format(
first_round_post_iss) + ' The dumped FCMs differ as below.')
copy_round_fcms()
p = os.path.join('.', investigation_name, 'fcm-diffs.txt')
with open(p, 'w') as f:
for fcm_type in fcm_types:
diff_matrix(fcm_type, f)
write_list_literals()
|
examples/aiohttp_fetch.py
|
zerotypic/asyncqt
| 158 |
137882
|
import sys
import asyncio
import aiohttp
from asyncqt import QEventLoop, asyncSlot, asyncClose
# from PyQt5.QtWidgets import (
from PySide2.QtWidgets import (
QApplication, QWidget, QLabel, QLineEdit, QTextEdit, QPushButton,
QVBoxLayout)
class MainWindow(QWidget):
"""Main window."""
_DEF_URL = 'https://jsonplaceholder.typicode.com/todos/1'
"""str: Default URL."""
_SESSION_TIMEOUT = 1.
"""float: Session timeout."""
def __init__(self):
super().__init__()
self.setLayout(QVBoxLayout())
self.lblStatus = QLabel('Idle', self)
self.layout().addWidget(self.lblStatus)
self.editUrl = QLineEdit(self._DEF_URL, self)
self.layout().addWidget(self.editUrl)
self.editResponse = QTextEdit('', self)
self.layout().addWidget(self.editResponse)
self.btnFetch = QPushButton('Fetch', self)
self.btnFetch.clicked.connect(self.on_btnFetch_clicked)
self.layout().addWidget(self.btnFetch)
self.session = aiohttp.ClientSession(
loop=asyncio.get_event_loop(),
timeout=aiohttp.ClientTimeout(total=self._SESSION_TIMEOUT))
@asyncClose
async def closeEvent(self, event):
await self.session.close()
@asyncSlot()
async def on_btnFetch_clicked(self):
self.btnFetch.setEnabled(False)
self.lblStatus.setText('Fetching...')
try:
async with self.session.get(self.editUrl.text()) as r:
self.editResponse.setText(await r.text())
except Exception as exc:
self.lblStatus.setText('Error: {}'.format(exc))
else:
self.lblStatus.setText('Finished!')
finally:
self.btnFetch.setEnabled(True)
if __name__ == '__main__':
app = QApplication(sys.argv)
loop = QEventLoop(app)
asyncio.set_event_loop(loop)
mainWindow = MainWindow()
mainWindow.show()
with loop:
sys.exit(loop.run_forever())
|
spacy/tests/lang/ja/test_lemmatization.py
|
g4brielvs/spaCy
| 22,040 |
137885
|
<filename>spacy/tests/lang/ja/test_lemmatization.py
import pytest
@pytest.mark.parametrize(
"word,lemma",
[("新しく", "新しい"), ("赤く", "赤い"), ("すごく", "すごい"), ("いただきました", "いただく"), ("なった", "なる")],
)
def test_ja_lemmatizer_assigns(ja_tokenizer, word, lemma):
test_lemma = ja_tokenizer(word)[0].lemma_
assert test_lemma == lemma
|
localflavor/au/forms.py
|
Malomalsky/django-localflavor
| 619 |
137905
|
"""Australian-specific Form helpers."""
from django.forms.fields import CharField, RegexField, Select
from django.utils.translation import gettext_lazy as _
from .au_states import STATE_CHOICES
from .validators import AUBusinessNumberFieldValidator, AUCompanyNumberFieldValidator, AUTaxFileNumberFieldValidator
class AUPostCodeField(RegexField):
"""
Australian post code field.
Assumed to be 4 digits.
Northern Territory 3-digit postcodes should have leading zero.
"""
default_error_messages = {
'invalid': _('Enter a 4 digit postcode.'),
}
def __init__(self, max_length=4, **kwargs):
super().__init__(r'^\d{4}$', max_length=max_length, **kwargs)
class AUStateSelect(Select):
"""A Select widget that uses a list of Australian states/territories as its choices."""
def __init__(self, attrs=None):
super().__init__(attrs, choices=STATE_CHOICES)
class AUBusinessNumberField(CharField):
"""
A form field that validates input as an Australian Business Number (ABN).
.. versionadded:: 1.3
.. versionchanged:: 1.4
"""
default_validators = [AUBusinessNumberFieldValidator()]
def to_python(self, value):
value = super().to_python(value)
if value in self.empty_values:
return self.empty_value
return value.upper().replace(' ', '')
def prepare_value(self, value):
"""Format the value for display."""
if value is None:
return value
spaceless = ''.join(value.split())
return '{} {} {} {}'.format(spaceless[:2], spaceless[2:5], spaceless[5:8], spaceless[8:])
class AUCompanyNumberField(CharField):
"""
A form field that validates input as an Australian Company Number (ACN).
.. versionadded:: 1.5
"""
default_validators = [AUCompanyNumberFieldValidator()]
def to_python(self, value):
value = super().to_python(value)
if value in self.empty_values:
return self.empty_value
return value.upper().replace(' ', '')
def prepare_value(self, value):
"""Format the value for display."""
if value is None:
return value
spaceless = ''.join(value.split())
return '{} {} {}'.format(spaceless[:3], spaceless[3:6], spaceless[6:])
class AUTaxFileNumberField(CharField):
"""
A form field that validates input as an Australian Tax File Number (TFN).
.. versionadded:: 1.4
"""
default_validators = [AUTaxFileNumberFieldValidator()]
def prepare_value(self, value):
"""Format the value for display."""
if value is None:
return value
spaceless = ''.join(value.split())
return '{} {} {}'.format(spaceless[:3], spaceless[3:6], spaceless[6:])
|
rotkehlchen/chain/ethereum/modules/gitcoin/__init__.py
|
rotkehlchenio/rotkehlchen
| 137 |
137924
|
<reponame>rotkehlchenio/rotkehlchen
from .accountant import GitcoinAccountant # noqa: F401
from .decoder import GitcoinDecoder # noqa: F401
|
knifey.py
|
bogianoithonda/tensoflow_tutorial
| 9,714 |
137953
|
########################################################################
#
# Functions for downloading the Knifey-Spoony data-set from the internet
# and loading it into memory. Note that this only loads the file-names
# for the images in the data-set and does not load the actual images.
#
# Implemented in Python 3.5
#
########################################################################
#
# This file is part of the TensorFlow Tutorials available at:
#
# https://github.com/Hvass-Labs/TensorFlow-Tutorials
#
# Published under the MIT License. See the file LICENSE for details.
#
# Copyright 2016 by <NAME>
#
########################################################################
from dataset import load_cached
import download
import os
########################################################################
# Directory where you want to download and save the data-set.
# Set this before you start calling any of the functions below.
data_dir = "data/knifey-spoony/"
# Directory for the training-set after copying the files using copy_files().
train_dir = os.path.join(data_dir, "train/")
# Directory for the test-set after copying the files using copy_files().
test_dir = os.path.join(data_dir, "test/")
# URL for the data-set on the internet.
data_url = "https://github.com/Hvass-Labs/knifey-spoony/raw/master/knifey-spoony.tar.gz"
########################################################################
# Various constants for the size of the images.
# Use these constants in your own program.
# Width and height of each image.
img_size = 200
# Number of channels in each image, 3 channels: Red, Green, Blue.
num_channels = 3
# Shape of the numpy-array for an image.
img_shape = [img_size, img_size, num_channels]
# Length of an image when flattened to a 1-dim array.
img_size_flat = img_size * img_size * num_channels
# Number of classes.
num_classes = 3
########################################################################
# Public functions that you may call to download the data-set from
# the internet and load the data into memory.
def maybe_download_and_extract():
"""
Download and extract the Knifey-Spoony data-set if it doesn't already exist
in data_dir (set this variable first to the desired directory).
"""
download.maybe_download_and_extract(url=data_url, download_dir=data_dir)
def load():
"""
Load the Knifey-Spoony data-set into memory.
This uses a cache-file which is reloaded if it already exists,
otherwise the Knifey-Spoony data-set is created and saved to
the cache-file. The reason for using a cache-file is that it
ensure the files are ordered consistently each time the data-set
is loaded. This is important when the data-set is used in
combination with Transfer Learning as is done in Tutorial #09.
:return:
A DataSet-object for the Knifey-Spoony data-set.
"""
# Path for the cache-file.
cache_path = os.path.join(data_dir, "knifey-spoony.pkl")
# If the DataSet-object already exists in a cache-file
# then load it, otherwise create a new object and save
# it to the cache-file so it can be loaded the next time.
dataset = load_cached(cache_path=cache_path,
in_dir=data_dir)
return dataset
def copy_files():
"""
Copy all the files in the training-set to train_dir
and copy all the files in the test-set to test_dir.
This creates the directories if they don't already exist,
and it overwrites the images if they already exist.
The images are originally stored in a directory-structure
that is incompatible with e.g. the Keras API. This function
copies the files to a dir-structure that works with e.g. Keras.
"""
# Load the Knifey-Spoony dataset.
# This is very fast as it only gathers lists of the files
# and does not actually load the images into memory.
dataset = load()
# Copy the files to separate training- and test-dirs.
dataset.copy_files(train_dir=train_dir, test_dir=test_dir)
########################################################################
if __name__ == '__main__':
# Download and extract the data-set if it doesn't already exist.
maybe_download_and_extract()
# Load the data-set.
dataset = load()
# Get the file-paths for the images and their associated class-numbers
# and class-labels. This is for the training-set.
image_paths_train, cls_train, labels_train = dataset.get_training_set()
# Get the file-paths for the images and their associated class-numbers
# and class-labels. This is for the test-set.
image_paths_test, cls_test, labels_test = dataset.get_test_set()
# Check if the training-set looks OK.
# Print some of the file-paths for the training-set.
for path in image_paths_train[0:5]:
print(path)
# Print the associated class-numbers.
print(cls_train[0:5])
# Print the class-numbers as one-hot encoded arrays.
print(labels_train[0:5])
# Check if the test-set looks OK.
# Print some of the file-paths for the test-set.
for path in image_paths_test[0:5]:
print(path)
# Print the associated class-numbers.
print(cls_test[0:5])
# Print the class-numbers as one-hot encoded arrays.
print(labels_test[0:5])
########################################################################
|
src/genie/libs/parser/iosxr/tests/ShowPlacementProgramAll/cli/equal/golden_output_2_expected.py
|
balmasea/genieparser
| 204 |
137957
|
expected_output = {
"program": {
"auto_ip_ring": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "central-services",
"jid": "1156",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"bfd": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "central-services",
"jid": "1158",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"bgp": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "v4-routing",
"jid": "1051",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
},
"test": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "Group_10_bgp2",
"jid": "1052",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
},
"test1": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "Group_5_bgp3",
"jid": "1053",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
},
"test2": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "Group_5_bgp4",
"jid": "1054",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
},
}
},
"bgp_epe": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "v4-routing",
"jid": "1159",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"bpm": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "v4-routing",
"jid": "1066",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"bundlemgr_distrib": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "central-services",
"jid": "1157",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"domain_services": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "central-services",
"jid": "1160",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"es_acl_mgr": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "central-services",
"jid": "1169",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"eth_gl_cfg": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "central-services",
"jid": "1151",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"ethernet_stats_controller_edm": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "central-services",
"jid": "1161",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"ftp_fs": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "central-services",
"jid": "1162",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"icpe_satmgr": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "central-services",
"jid": "1163",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"igmp": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "mcast-routing",
"jid": "1208",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"intf_mgbl": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "central-services",
"jid": "1143",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"ipv4_connected": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "v4-routing",
"jid": "1152",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"ipv4_local": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "v4-routing",
"jid": "1153",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"ipv4_mfwd_ma": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "mcast-routing",
"jid": "1204",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"ipv4_mpa": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "central-services",
"jid": "1149",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"ipv4_rib": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "v4-routing",
"jid": "1146",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"ipv4_rump": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "v4-routing",
"jid": "1167",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"ipv4_static": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "v4-routing",
"jid": "1043",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"ipv6_connected": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "v6-routing",
"jid": "1154",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"ipv6_local": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "v6-routing",
"jid": "1155",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"ipv6_mfwd_ma": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "mcast-routing",
"jid": "1205",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"ipv6_mpa": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "central-services",
"jid": "1150",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"ipv6_rib": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "v6-routing",
"jid": "1147",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"ipv6_rump": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "v6-routing",
"jid": "1168",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"l2tp_mgr": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "v4-routing",
"jid": "1176",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"l2vpn_mgr": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "v4-routing",
"jid": "1175",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"mld": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "mcast-routing",
"jid": "1209",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"mpls_ldp": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "v4-routing",
"jid": "1199",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"mpls_static": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "v4-routing",
"jid": "1142",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"mrib": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "mcast-routing",
"jid": "1206",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"mrib6": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "mcast-routing",
"jid": "1207",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"netconf": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "central-services",
"jid": "1189",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"nfmgr": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "central-services",
"jid": "1145",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"ospf": {
"instance": {
"1": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "v4-routing",
"jid": "1018",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"ospf_uv": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "v4-routing",
"jid": "1114",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"pbr_ma": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "central-services",
"jid": "1171",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"pim": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "mcast-routing",
"jid": "1210",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"pim6": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "mcast-routing",
"jid": "1211",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"policy_repository": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "v4-routing",
"jid": "1148",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"python_process_manager": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "central-services",
"jid": "1164",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"qos_ma": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "central-services",
"jid": "1172",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"rcp_fs": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "central-services",
"jid": "1165",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"rt_check_mgr": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "v4-routing",
"jid": "1170",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"schema_server": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "central-services",
"jid": "1177",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"snmppingd": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "central-services",
"jid": "1195",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"spa_cfg_hlpr": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "central-services",
"jid": "1130",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"ssh_conf_verifier": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "central-services",
"jid": "1183",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"ssh_server": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "central-services",
"jid": "1184",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"statsd_manager_g": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "netmgmt",
"jid": "1144",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"telemetry_encoder": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "central-services",
"jid": "1194",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"tty_verifyd": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "central-services",
"jid": "1166",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"vservice_mgr": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "central-services",
"jid": "1173",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"wanphy_proc": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "central-services",
"jid": "1178",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
"xtc_agent": {
"instance": {
"default": {
"active": "0/RSP1/CPU0",
"active_state": "RUNNING",
"group": "central-services",
"jid": "1174",
"standby": "0/RSP0/CPU0",
"standby_state": "RUNNING",
}
}
},
}
}
|
GramAddict/core/bot_flow.py
|
GramAddict/bot
| 323 |
138022
|
import logging
import random
from datetime import datetime, timedelta
from sys import exit
from time import sleep
from colorama import Fore, Style
from GramAddict.core.config import Config
from GramAddict.core.device_facade import create_device, get_device_info
from GramAddict.core.filter import load_config as load_filter
from GramAddict.core.interaction import load_config as load_interaction
from GramAddict.core.log import (
configure_logger,
is_log_file_updated,
update_log_file_name,
)
from GramAddict.core.navigation import check_if_english
from GramAddict.core.persistent_list import PersistentList
from GramAddict.core.report import print_full_report
from GramAddict.core.session_state import SessionState, SessionStateEncoder
from GramAddict.core.storage import Storage
from GramAddict.core.utils import (
ask_for_a_donation,
can_repeat,
check_adb_connection,
check_if_updated,
close_instagram,
config_examples,
get_instagram_version,
get_value,
kill_atx_agent,
)
from GramAddict.core.utils import load_config as load_utils
from GramAddict.core.utils import (
move_usernames_to_accounts,
open_instagram,
pre_post_script,
print_telegram_reports,
save_crash,
set_time_delta,
stop_bot,
wait_for_next_session,
)
from GramAddict.core.views import AccountView, ProfileView, SearchView, TabBarView
from GramAddict.core.views import load_config as load_views
TESTED_IG_VERSION = "214.0.0.27.120"
def start_bot(**kwargs):
# Logging initialization
logger = logging.getLogger(__name__)
# Pre-Load Config
configs = Config(first_run=True, **kwargs)
configure_logger(configs.debug, configs.username)
if not kwargs:
if "--config" not in configs.args:
logger.info(
"We strongly recommend to use a config.yml file. Follow these links for more details: https://docs.gramaddict.org/#/configuration and https://github.com/GramAddict/bot/tree/master/config-examples",
extra={"color": f"{Fore.GREEN}{Style.BRIGHT}"},
)
sleep(3)
# Config-example hint
config_examples()
# Check for updates
check_if_updated()
# Move username folders to a main directory -> accounts
if "--move-folders-in-accounts" in configs.args:
move_usernames_to_accounts()
# Global Variables
sessions = PersistentList("sessions", SessionStateEncoder)
# Load Config
configs.load_plugins()
configs.parse_args()
# Some plugins need config values without being passed
# through. Because we do a weird config/argparse hybrid,
# we need to load the configs in a weird way
load_filter(configs)
load_interaction(configs)
load_utils(configs)
load_views(configs)
if not configs.args or not check_adb_connection():
return
if len(configs.enabled) < 1:
logger.error(
"You have to specify one of the actions: " + ", ".join(configs.actions)
)
return
device = create_device(configs.device_id)
session_state = None
if str(configs.args.total_sessions) != "-1":
total_sessions = get_value(configs.args.total_sessions, None, -1)
else:
total_sessions = -1
# init
analytics_at_end = False
telegram_reports_at_end = False
followers_now = None
following_now = None
while True:
set_time_delta(configs.args)
inside_working_hours, time_left = SessionState.inside_working_hours(
configs.args.working_hours, configs.args.time_delta_session
)
if not inside_working_hours:
wait_for_next_session(
time_left, session_state, sessions, device, configs.args.screen_record
)
pre_post_script(path=configs.args.pre_script)
get_device_info(device)
session_state = SessionState(configs)
session_state.set_limits_session(configs.args)
sessions.append(session_state)
device.wake_up()
logger.info(
"-------- START: "
+ str(session_state.startTime.strftime("%H:%M:%S - %Y/%m/%d"))
+ " --------",
extra={"color": f"{Style.BRIGHT}{Fore.YELLOW}"},
)
if not device.get_info()["screenOn"]:
device.press_power()
if device.is_screen_locked():
device.unlock()
if device.is_screen_locked():
logger.error(
"Can't unlock your screen. There may be a passcode on it. If you would like your screen to be turned on and unlocked automatically, please remove the passcode."
)
exit(0)
logger.info("Device screen ON and unlocked.")
if open_instagram(device, configs.args.screen_record, configs.args.close_apps):
try:
running_ig_version = get_instagram_version()
logger.info(f"Instagram version: {running_ig_version}")
if tuple(running_ig_version.split(".")) > tuple(
TESTED_IG_VERSION.split(".")
):
logger.info(
f"You have a newer version of IG then the one tested! (Tested version: {TESTED_IG_VERSION})",
extra={"color": f"{Style.BRIGHT}"},
)
except Exception as e:
logger.error(f"Error retrieving the IG version. Exception: {e}")
SearchView(device)._close_keyboard()
else:
break
try:
profileView = check_if_english(device)
if configs.args.username is not None:
success = AccountView(device).changeToUsername(configs.args.username)
if not success:
logger.error(
f"Not able to change to {configs.args.username}, abort!"
)
save_crash(device)
device.back()
break
AccountView(device).refresh_account()
(
session_state.my_username,
session_state.my_posts_count,
session_state.my_followers_count,
session_state.my_following_count,
) = profileView.getProfileInfo()
except Exception as e:
logger.error(f"Exception: {e}")
save_crash(device)
break
if (
session_state.my_username is None
or session_state.my_posts_count is None
or session_state.my_followers_count is None
or session_state.my_following_count is None
):
logger.critical(
"Could not get one of the following from your profile: username, # of posts, # of followers, # of followings. This is typically due to a soft ban. Review the crash screenshot to see if this is the case."
)
logger.critical(
f"Username: {session_state.my_username}, Posts: {session_state.my_posts_count}, Followers: {session_state.my_followers_count}, Following: {session_state.my_following_count}"
)
save_crash(device)
exit(1)
if not is_log_file_updated():
try:
update_log_file_name(session_state.my_username)
except Exception as e:
logger.error(
f"Failed to update log file name. Will continue anyway. {e}"
)
report_string = f"Hello, @{session_state.my_username}! You have {session_state.my_followers_count} followers and {session_state.my_following_count} followings so far."
logger.info(report_string, extra={"color": f"{Style.BRIGHT}"})
if configs.args.repeat:
logger.info(
f"You have {total_sessions + 1 - len(sessions) if total_sessions > 0 else 'infinite'} session(s) left. You can stop the bot by pressing CTRL+C in console.",
extra={"color": f"{Style.BRIGHT}{Fore.YELLOW}"},
)
sleep(3)
storage = Storage(session_state.my_username)
if configs.args.shuffle_jobs:
jobs_list = random.sample(configs.enabled, len(configs.enabled))
else:
jobs_list = configs.enabled
if "analytics" in jobs_list:
jobs_list.remove("analytics")
if configs.args.analytics:
analytics_at_end = True
if "telegram-reports" in jobs_list:
jobs_list.remove("telegram-reports")
if configs.args.telegram_reports:
telegram_reports_at_end = True
for plugin in jobs_list:
inside_working_hours, time_left = SessionState.inside_working_hours(
configs.args.working_hours, configs.args.time_delta_session
)
if not inside_working_hours:
logger.info(
"Outside of working hours. Ending session.",
extra={"color": f"{Fore.CYAN}"},
)
break
if not session_state.check_limit(
configs.args, limit_type=session_state.Limit.ALL, output=True
):
logger.info(
f"Current job: {plugin}",
extra={"color": f"{Style.BRIGHT}{Fore.BLUE}"},
)
if configs.args.scrape_to_file is not None:
logger.warning("You're in scraping mode!")
if ProfileView(device).getUsername() != session_state.my_username:
logger.debug("Not in your main profile.")
TabBarView(device).navigateToProfile()
configs.actions[plugin].run(device, configs, storage, sessions, plugin)
else:
logger.info(
"At last one of these limits has been reached: interactions/successful/follower/likes or scraped. Ending session.",
extra={"color": f"{Fore.CYAN}"},
)
break
# save the session in sessions.json
session_state.finishTime = datetime.now()
sessions.persist(directory=session_state.my_username)
# print reports
if telegram_reports_at_end:
logger.info("Going back to your profile..")
ProfileView(device)._click_on_avatar()
if ProfileView(device).getFollowingCount() is None:
ProfileView(device)._click_on_avatar()
AccountView(device).refresh_account()
(
_,
_,
followers_now,
following_now,
) = ProfileView(device).getProfileInfo()
if analytics_at_end:
configs.actions["analytics"].run(
device, configs, storage, sessions, "analytics"
)
# turn off bot
close_instagram(device, configs.args.screen_record)
if configs.args.screen_sleep:
device.screen_off()
logger.info("Screen turned off for sleeping time.")
kill_atx_agent(device)
logger.info(
"-------- FINISH: "
+ str(session_state.finishTime.strftime("%H:%M:%S - %Y/%m/%d"))
+ " --------",
extra={"color": f"{Style.BRIGHT}{Fore.YELLOW}"},
)
pre_post_script(pre=False, path=configs.args.post_script)
if configs.args.repeat and can_repeat(len(sessions), total_sessions):
print_full_report(sessions, configs.args.scrape_to_file)
inside_working_hours, time_left = SessionState.inside_working_hours(
configs.args.working_hours, configs.args.time_delta_session
)
if inside_working_hours:
time_left = (
get_value(configs.args.repeat, "Sleep for {} minutes.", 180) * 60
)
print_telegram_reports(
configs,
telegram_reports_at_end,
followers_now,
following_now,
time_left,
)
logger.info(
f'Next session will start at: {(datetime.now() + timedelta(seconds=time_left)).strftime("%H:%M:%S (%Y/%m/%d)")}.'
)
try:
sleep(time_left)
except KeyboardInterrupt:
stop_bot(
device,
sessions,
session_state,
configs.args.screen_record,
was_sleeping=True,
)
else:
print_telegram_reports(
configs,
telegram_reports_at_end,
followers_now,
following_now,
time_left.total_seconds(),
)
wait_for_next_session(
time_left,
session_state,
sessions,
device,
configs.args.screen_record,
)
else:
break
print_telegram_reports(
configs,
telegram_reports_at_end,
followers_now,
following_now,
)
print_full_report(sessions, configs.args.scrape_to_file)
ask_for_a_donation()
|
src/visitpy/visit_flow/flow/src/filters/pyocl_kernels.py
|
visit-dav/vis
| 226 |
138051
|
# Copyright (c) Lawrence Livermore National Security, LLC and other VisIt
# Project developers. See the top-level LICENSE file for dates and other
# details. No copyright assignment is required to contribute to VisIt.
"""
file: pyocl_kernels.py
author: <NAME> <<EMAIL>>
created: 9/6/2012
description:
"""
# Guarded import of pyopencl
found_pyopencl = False
try:
import numpy as npy
import pyopencl as cl
found_pyopencl = True
except ImportError:
pass
Kernels = {
### add
"kadd" : {"name": "add",
"input_ports": ["in_a","in_b"],
"src":"""
float kadd(const float a,const float b)
{return a + b;}
"""},
### sub
"ksub" : {"name": "sub",
"input_ports": ["in_a","in_b"],
"src":"""
float ksub(const float a,const float b)
{return a - b;}
"""},
### mult
"kmult": {"name": "mult",
"input_ports": ["in_a","in_b"],
"src":"""
float kmult(const float a,const float b)
{return a * b;}
"""},
### div
"kdiv": {"name": "div",
"input_ports": ["in_a","in_b"],
"src":"""
float kdiv(const float a,const float b)
{return a / b;}
"""},
### mod
"kmod": {"name": "mod",
"input_ports": ["in_a","in_b"],
"src":"""
float kmod(const float a, const float b)
{return a % b;}
"""},
### cos
"kcos" : {"name": "cos",
"input_ports": ["in"],
"src":"""
float kcos(const float a)
{return cos(a);}
"""},
### sin
"ksin": {"name": "sin",
"input_ports": ["in"],
"src":"""
float ksin(const float a)
{return sin(a);}
"""},
### sin
"ktan": {"name": "tan",
"input_ports": ["in"],
"src": """
float ktan(const float a)
{return tan(a);}
"""},
### ciel
"kciel": {"name": "ciel",
"input_ports": ["in"],
"src": """
float kceil(const float a)
{return ceil(a);}
"""},
### floor
"kfloor": {"name": "floor",
"input_ports": ["in"],
"src":"""
float kfloor(const float a)
{return floor(a);}
"""},
### abs
"kabs": {"name": "abs",
"input_ports": ["in"],
"src":"""
float kabs(const float a)
{return abs(a);}
"""},
### log10
"klog10": {"name": "log10",
"input_ports": ["in"],
"src":"""
float klog10(const float a)
{return log10(a);}
"""},
### log
"klog": {"name": "log",
"input_ports": ["in"],
"src":"""
float klog10(const float a)
{return log10(a);}
"""},
### exp
"kexp": {"name": "exp",
"input_ports": ["in"],
"src":"""
float kexp(const float a)
{return exp(a);}
"""},
### pow
"kpow": {"name": "pow",
"input_ports": ["in"],
"src":"""
float kpow(const float a, const float b)
{return kpow(a, b);}
"""},
### id
"kid": {"name": "id",
"input_ports": ["in"],
"src":"""
float kid(const float a)
{return a;}
"""},
### square
"ksquare": {"name": "square",
"input_ports": ["in"],
"src": """
float ksquare(const float a)
{return a*a;}
"""},
### sqrt
"ksqrt": {"name": "sqrt",
"input_ports": ["in"],
"src":"""
float ksqrt(const float a)
{return sqrt(a);}
"""},
### curl3d
"curl3d": {"name": "sqrt",
"input_ports": ["dfx","dfy","dfx"],
"in_types": ["direct",
"direct",
"direct"],
"out_type": "float4",
"src":"""
float4 kcurl3d(__global const float *dfx,
__global const float *dfy,
__global const float *dfz
{
int gid = get_global_id(0);
float dfzdy = dfz[gid*3+1];
float dfydz = dfy[gid*3+2];
float dfxdz = dfx[gid*3+2];
float dfzdx = dfz[gid*3];
float dfydx = dfy[gid*3];
float dfxdy = dfx[gid*3+1];
float4 res;
res.x = dfzdy - dfydz;
res.y = dfxdz - dfzdx;
res.z = dfydx - dfxdy;
}
"""},
### grad3d
"kgrad3d": {"name": "grad3d",
"input_ports": ["in","dims","x","y","z"],
"in_types": ["direct",
"direct",
"direct",
"direct",
"direct"],
"out_type": "float4",
"src":"""
float4 kgrad3d(__global const float *v,
__global const int *d,
__global const float *x,
__global const float *y,
__global const float *z)
{
int gid = get_global_id(0);
int di = d[0]-1;
int dj = d[1]-1;
int dk = d[2]-1;
int zi = gid % di;
int zj = (gid / di) % dj;
int zk = (gid / di) / dj;
// for rectilinear, we only need 2 points to get dx,dy,dz
int pi0 = zi + zj*(di+1) + zk*(di+1)*(dj+1);
int pi1 = zi + 1 + (zj+1)*(di+1) + (zk+1)*(di+1)*(dj+1);
float vv = v[gid];
float4 p_0 = (float4)(x[pi0],y[pi0],z[pi0],1.0);
float4 p_1 = (float4)(x[pi1],y[pi1],z[pi1],1.0);
float4 dg = p_1 - p_0;
// value
float4 f_0 = (float4)(vv,vv,vv,1.0);
float4 f_1 = (float4)(vv,vv,vv,1.0);
// i bounds
if(zi > 0)
{
f_0.x = v[gid-1];
}
if(zi < (di-1))
{
f_1.x = v[gid+1];
}
// j bounds
if(zj > 0)
{
f_0.y = v[gid-di];
}
if(zj < (dj-1))
{
f_1.y = v[gid+di];
}
// k bounds
if(zk > 0)
{
f_0.z = v[gid-(di*dj)];
}
if(zk < (dk-1))
{
f_1.z = v[gid+(di*dj)];
}
float4 df = (f_1 - f_0) / dg;
// central diff if we aren't on the edges
if( (zi != 0) && (zi != (di-1)))
{
df.x *= .5;
}
// central diff if we aren't on the edges
if( (zj != 0) && (zj != (dj-1)))
{
df.y *= .5;
}
// central diff if we aren't on the edges
if( (zk != 0) && (zk != (dk-1)))
{
df.z *= .5;
}
//return (float4)(1.0,2.0,3.0,0.0);
return df;
}
"""}
}
# fill in set defaults
for k,v in list(Kernels.items()):
if "out_type" not in v:
v["out_type"] = "float"
if "in_types" not in v:
v["in_types"] = [ "fetch" for ipt in v["input_ports"]]
def create_stub(filter,inputs):
# gen stub glue & execute
ident = " "
args_ident = " "
res = filter.kernel_source
res += "\n%s__kernel void kmain(" % ident
ninputs = len(inputs)
for idx in range(ninputs):
if isinstance(inputs[idx],float):
itype = "float"
elif inputs[idx].dtype == npy.int32:
itype = "int "
else:
itype = "float"
iname = "in_%04d" % idx
res += "__global const %s *%s,\n%s " % (itype,iname,args_ident)
res += "__global float *out)\n"
res += "%s{\n" % ident
res += "%s int gid = get_global_id(0);\n" % ident
call_names = []
for idx in range(ninputs):
iname = "in_%04d" % idx
if filter.in_types[idx] == "fetch":
if isinstance(inputs[idx],float):
itype = "float"
elif inputs[idx].dtype == npy.int32:
itype = "int "
else:
itype = "float"
cname = "%s_fetch" % iname
res += "%s %s %s = %s[gid];\n" % (ident,itype,cname,iname)
else:
cname = iname
call_names.append(cname)
call = "k%s(" % filter.filter_type
for cn in call_names:
call += "%s," % cn
call = call[:-1] + ")"
out_dim = None
if filter.out_type == "float":
res += "%s out[gid] = %s;\n" % (ident,call)
elif filter.out_type == "float4":
res += "%s float4 res = %s;\n" % (ident,call)
res += "%s out[gid*3] = res.x;\n" % (ident)
res += "%s out[gid*3+1] = res.y;\n" % (ident)
res += "%s out[gid*3+2] = res.z;\n" % (ident)
out_dim = 3
res += "%s}\n" % ident
return res, out_dim
|
timemachines/skatertools/offlinetesting/optimizerandomskater.py
|
iklasky/timemachines
| 253 |
138059
|
<reponame>iklasky/timemachines
from timemachines.skatertools.tuning.hyper import optimal_r
from timemachines.skatertools.data.synthetic import brownian_with_exogenous
from timemachines.skaters.localskaters import SKATERS_R1, SKATERS_R2, SKATERS_R3
import time
import random
from pprint import pprint
from timemachines.skaters.proph.prophparams import PROPHET_META
try:
from humpday.optimizers.alloptimizers import OPTIMIZERS
except ImportError:
raise ImportError('You need to pip install humpday')
def optimize_random_skater():
print('Available optimizers...')
print([o.__name__ for o in OPTIMIZERS])
start_time = time.time()
optimizer = random.choice(OPTIMIZERS)
f = random.choice(SKATERS_R1+SKATERS_R2+SKATERS_R3)
k = random.choice([1,2,3,5,8,13,21])
n_trials = random.choice([15,50])
n_burn = PROPHET_META['n_warm']
n = n_burn+100 # Length of synthetic data
print('Skater is '+str(f.__name__))
print('Using '+str(optimizer.__name__))
r_star, best_val, info = optimal_r(f=f, y=brownian_with_exogenous(n=n),k=k,a=None,
n_trials=n_trials, optimizer=optimizer, n_burn=n_burn)
print("Best hyper-param is " + str(r_star))
print('Took ' + str( (time.time()-start_time)/60 ) + ' minutes.' )
pprint(info)
if __name__=='__main__':
optimize_random_skater()
|
tests/test_attr_access.py
|
pdxjohnny/rpyc
| 238 |
138062
|
import rpyc
import copy
import unittest
from rpyc.utils.server import ThreadedServer
class MyClass(object):
def __add__(self, other):
return self.foo() + str(other)
def foo(self):
return "foo"
def bar(self):
return "bar"
def spam(self):
return "spam"
def _privy(self):
return "privy"
def exposed_foobar(self):
return "Fee Fie Foe Foo"
class YourClass(object):
def lala(self):
return MyClass()
def baba(self):
return "baba"
def gaga(self):
return "gaga"
try:
long
except NameError:
long = int
unicode = str
try:
bytes
except NameError:
bytes = str
class Protector(object):
def __init__(self, safetypes=(int, list, bool, tuple, str, float, long, unicode, bytes)):
self._safetypes = set(safetypes)
self._typereg = {}
def register(self, typ, attrs):
self._typereg[typ] = frozenset(attrs)
def wrap(self, obj):
class Restrictor(object):
def __call__(_, *args, **kwargs):
return self.wrap(obj(*args, **kwargs))
def _rpyc_getattr(_, name):
if type(obj) not in self._safetypes:
attrs = self._typereg.get(type(obj), ())
if name not in attrs:
raise AttributeError(name)
obj2 = getattr(obj, name)
return self.wrap(obj2)
__getattr__ = _rpyc_getattr
return Restrictor()
SVC_RESTRICTED = ["exposed_foobar", "__add__", "_privy", "foo", "bar"]
class MyService(rpyc.Service):
exposed_MyClass = MyClass
def exposed_get_one(self):
return rpyc.restricted(MyClass(), SVC_RESTRICTED)
def exposed_get_two(self):
protector = Protector()
protector.register(MyClass, SVC_RESTRICTED)
protector.register(YourClass, ["lala", "baba"])
return protector.wrap(YourClass())
class TestRestricted(unittest.TestCase):
def setUp(self):
self.server = ThreadedServer(MyService)
self.thd = self.server._start_in_thread()
self.conn = rpyc.connect("localhost", self.server.port)
def tearDown(self):
self.conn.close()
while self.server.clients:
pass
self.server.close()
self.thd.join()
def test_restricted(self):
obj = self.conn.root.get_one()
self.assertEqual(obj.foo(), "foo")
self.assertEqual(obj.bar(), "bar")
self.assertEqual(obj.__add__("bar"), "foobar")
self.assertEqual(obj._privy(), "privy")
self.assertEqual(obj.exposed_foobar(), "Fee Fie Foe Foo")
self.assertRaises(AttributeError, lambda: obj.spam)
def test_restricted2(self):
self.server.protocol_config = {'allow_public_attrs': False}
obj = self.conn.root.get_one()
self.assertEqual(obj.foo(), "foo")
self.assertEqual(obj.bar(), "bar")
self.assertEqual(obj.__add__("bar"), "foobar")
self.assertEqual(obj._privy(), "privy")
self.assertRaises(AttributeError, lambda: obj.spam)
class TestConfigAllows(unittest.TestCase):
def setUp(self):
self.cfg = self._reset_cfg()
self.server = ThreadedServer(MyService, port=0)
self.thd = self.server._start_in_thread()
self.conn = rpyc.connect("localhost", self.server.port)
def tearDown(self):
self.conn.close()
while self.server.clients:
pass
self.server.close()
self.thd.join()
def _reset_cfg(self):
self.cfg = copy.copy(rpyc.core.protocol.DEFAULT_CONFIG)
return self.cfg
def _get_myclass(self, proto_config):
self.conn.close()
self.server.protocol_config.update(proto_config)
self.conn = rpyc.connect("localhost", self.server.port)
return self.conn.root.MyClass()
def test_default_config(self):
obj = self._get_myclass(self.cfg)
self.assertEqual(obj + 'bar', "foobar")
self.assertEqual(obj.foobar(), "Fee Fie Foe Foo")
self.assertEqual(obj.exposed_foobar(), "Fee Fie Foe Foo")
self.assertRaises(AttributeError, lambda: obj._privy)
self.assertRaises(AttributeError, lambda: obj.foo)
self.assertRaises(AttributeError, lambda: obj.bar)
self.assertRaises(AttributeError, lambda: obj.spam)
def test_allow_all(self):
self._reset_cfg()
self.cfg['allow_all_attrs'] = True
obj = self._get_myclass(self.cfg)
self.assertEqual(obj + 'bar', "foobar")
self.assertEqual(obj.__add__("bar"), "foobar")
self.assertEqual(obj._privy(), "privy")
self.assertEqual(obj.foobar(), "Fee Fie Foe Foo")
self.assertEqual(obj.exposed_foobar(), "Fee Fie Foe Foo")
def test_allow_exposed(self):
self._reset_cfg()
self.cfg['allow_exposed_attrs'] = False
try:
self._get_myclass(self.cfg) # returns obj, but ignored
passed = False
except Exception:
passed = True
self.assertEqual(passed, True)
def test_allow_safe_attrs(self):
self._reset_cfg()
self.cfg['allow_safe_attrs'] = False
obj = self._get_myclass(self.cfg)
self.assertEqual(obj.foobar(), "Fee Fie Foe Foo")
self.assertEqual(obj.exposed_foobar(), "Fee Fie Foe Foo")
self.assertRaises(AttributeError, lambda: obj._privy)
self.assertRaises(AttributeError, lambda: obj + 'bar')
self.assertRaises(AttributeError, lambda: obj.foo)
self.assertRaises(AttributeError, lambda: obj.bar)
self.assertRaises(AttributeError, lambda: obj.spam)
def test_allow_public_attrs(self):
self._reset_cfg()
self.cfg['allow_public_attrs'] = True
obj = self._get_myclass(self.cfg)
self.assertEqual(obj + 'bar', "foobar")
self.assertEqual(obj.foo(), "foo")
self.assertEqual(obj.bar(), "bar")
self.assertEqual(obj.foobar(), "Fee Fie Foe Foo")
self.assertEqual(obj.exposed_foobar(), "Fee Fie Foe Foo")
self.assertRaises(AttributeError, lambda: obj._privy)
# def test_type_protector(self):
# obj = self.conn.root.get_two()
# assert obj.baba() == "baba"
# try:
# obj.gaga()
# except AttributeError:
# pass
# else:
# assert False, "expected an attribute error!"
# obj2 = obj.lala()
# assert obj2.foo() == "foo"
# assert obj2.spam() == "spam"
# try:
# obj.bar()
# except AttributeError:
# pass
# else:
# assert False, "expected an attribute error!"
#
if __name__ == "__main__":
unittest.main()
|
tests/commands/test__vi_f.py
|
my-personal-forks/Vintageous
| 1,146 |
138065
|
<reponame>my-personal-forks/Vintageous<gh_stars>1000+
from collections import namedtuple
from Vintageous.tests import ViewTest
from Vintageous.vi.utils import modes
test_data = namedtuple('test_data', 'text startRegion findChar mode expectedRegion msg')
NORMAL_CASES = (
test_data('0a23x5', (1, 1), 'x', modes.NORMAL, (4, 4), 'Find ahead'),
test_data('0ax345', (1, 1), 'x', modes.NORMAL, (2, 2), 'Find next'),
test_data('0x2345', (1, 1), 'x', modes.NORMAL, (1, 1), 'Find self'),
test_data('0a2xx5', (1, 1), 'x', modes.NORMAL, (3, 3), 'Find multiple'),
test_data('0x2x45', (1, 1), 'x', modes.NORMAL, (3, 3), 'Find self multiple'),
)
INTERNAL_NORMAL_CASES = (
test_data('0a23x5', (1, 1), 'x', modes.INTERNAL_NORMAL, (1, 5), 'Find ahead'),
test_data('0ax345', (1, 1), 'x', modes.INTERNAL_NORMAL, (1, 3), 'Find next'),
test_data('0x2345', (1, 1), 'x', modes.INTERNAL_NORMAL, (1, 1), 'Find self'),
test_data('0a2xx5', (1, 1), 'x', modes.INTERNAL_NORMAL, (1, 4), 'Find multiple'),
test_data('0x2x45', (1, 1), 'x', modes.INTERNAL_NORMAL, (1, 4), 'Find self multiple'),
)
VISUAL_MULTI_CHAR_CASES = (
test_data('0ab3x5', (1, 3), 'x', modes.VISUAL, (1, 5), 'Forward'),
test_data('0a23x5', (1, 5), 'x', modes.VISUAL, (1, 5), 'Forward find b'),
test_data('0b2xa5', (5, 1), 'x', modes.VISUAL, (5, 3), 'Reverse no crossover'),
test_data('0ba3x5', (3, 1), 'x', modes.VISUAL, (2, 5), 'Reverse crossover'),
test_data('0b2x45', (4, 1), 'x', modes.VISUAL, (4, 3), 'Reverse find a'),
test_data('0x2a45', (4, 1), 'x', modes.VISUAL, (4, 1), 'Reverse find b'),
)
VISUAL_ONE_CHAR_CASES = (
test_data('ax', (0, 2), 'x', modes.VISUAL, (0, 2), 'Forward find b'),
test_data('bx', (2, 0), 'x', modes.VISUAL, (2, 1), 'Reverse find a'),
test_data('fx', (0, 1), 'x', modes.VISUAL, (0, 2), 'Forward find next'),
test_data('rx', (1, 0), 'x', modes.VISUAL, (0, 2), 'Reverse find next'),
test_data('f', (0, 1), 'f', modes.VISUAL, (0, 1), 'Forward find self'),
test_data('r', (1, 0), 'r', modes.VISUAL, (1, 0), 'Reverse find self'),
)
VISUAL_MULTI_MATCHES_CASES = (
test_data('0abxx5', (1, 3), 'x', modes.VISUAL, (1, 4), 'Forward find first'),
test_data('0axx45', (1, 3), 'x', modes.VISUAL, (1, 4), 'Forward find b'),
test_data('0bxx45', (3, 1), 'x', modes.VISUAL, (3, 2), 'Reverse find a'),
test_data('0bxx45', (4, 1), 'x', modes.VISUAL, (4, 2), 'Reverse find a'),
test_data('0xax45', (3, 1), 'x', modes.VISUAL, (2, 4), 'Reverse find b'),
)
VISUAL_MULTI_LINE_CASES = (
test_data('012\n456', (0, 5), '2', modes.VISUAL, (0, 5), 'Select L1->L2, find on L1'),
test_data('012\n456', (0, 5), '6', modes.VISUAL, (0, 7), 'Select L1->L2, find on L2'),
test_data('012\n456', (0, 4), '2', modes.VISUAL, (0, 4), 'Select L1->LF, find on L1'),
test_data('012\n456', (0, 4), '6', modes.VISUAL, (0, 4), 'Select L1->LF, find on L2'),
test_data('012\n456', (5, 0), '2', modes.VISUAL, (5, 2), 'Select L2->L1, find on L1'),
test_data('012\n456', (5, 0), '6', modes.VISUAL, (5, 0), 'Select L2->L1, find on L2'),
test_data('012\n456', (5, 3), '2', modes.VISUAL, (5, 3), 'Select L2->LF, find on L1'),
test_data('012\n456', (5, 3), '6', modes.VISUAL, (5, 3), 'Select L2->LF, find on L2'),
test_data('0123\n5678', (7, 5), '8', modes.VISUAL, (6, 9), 'Select L2->LF+1, find on L2'),
)
class Test_vi_f(ViewTest):
def runTests(self, data):
for (i, data) in enumerate(data):
self.write(data.text)
self.clear_sel()
self.add_sel(self.R(*data.startRegion))
self.view.run_command('_vi_find_in_line',
{'mode': data.mode, 'count': 1, 'char': data.findChar, 'inclusive': True})
self.assert_equal_regions(self.R(*data.expectedRegion), self.first_sel(),
"Failed on index {} {} : Text:\"{}\" Region:{} Find:'{}'"
.format(i, data.msg, data.text, data.startRegion, data.findChar))
def testNormalCases(self):
self.runTests(NORMAL_CASES)
def testInternalNormalCases(self):
self.runTests(INTERNAL_NORMAL_CASES)
def testVisualMultipleCharacterCases(self):
self.runTests(VISUAL_MULTI_CHAR_CASES)
def testVisualSingleCharacterCases(self):
self.runTests(VISUAL_ONE_CHAR_CASES)
def testVisualMultipleMatchesCases(self):
self.runTests(VISUAL_MULTI_MATCHES_CASES)
def testVisualMultipleLinesCases(self):
self.runTests(VISUAL_MULTI_LINE_CASES)
|
tests/pytests/unit/modules/test_zfs_solaris10.py
|
markgras/salt
| 9,425 |
138076
|
"""
Tests for salt.modules.zfs on Solaris
"""
import pytest
import salt.config
import salt.loader
import salt.modules.zfs as zfs
import salt.utils.zfs
from tests.support.mock import MagicMock, patch
from tests.support.zfs import ZFSMockData
@pytest.fixture
def utils_patch():
return ZFSMockData().get_patched_utils()
@pytest.fixture
def configure_loader_modules():
opts = salt.config.DEFAULT_MINION_OPTS.copy()
utils = salt.loader.utils(opts, whitelist=["zfs"])
zfs_obj = {
zfs: {
"__opts__": opts,
"__grains__": {
"osarch": "sparcv9",
"os_family": "Solaris",
"osmajorrelease": 10,
"kernel": "SunOS",
"kernelrelease": 5.10,
},
"__utils__": utils,
}
}
return zfs_obj
@pytest.mark.skip_unless_on_sunos(reason="test to ensure no -t only applies to Solaris")
def test_get_success_solaris():
"""
Tests zfs get success
"""
cmd_out = {
"pid": 7278,
"retcode": 0,
"stdout": "testpool\tmountpoint\t/testpool\tdefault",
"stderr": "",
}
run_all_mock = MagicMock(return_value=cmd_out)
patches = {
"cmd.run_all": run_all_mock,
}
with patch.dict(zfs.__salt__, patches):
with patch("sys.platform", MagicMock(return_value="sunos5")):
result = zfs.get("testpool", type="filesystem", properties="mountpoint")
assert result == {
"testpool": {
"mountpoint": {"value": "/testpool", "source": "default"},
},
}
run_all_mock.assert_called_once_with(
"/usr/sbin/zfs get -H -o name,property,value,source mountpoint testpool",
python_shell=False,
)
|
europilot/__version__.py
|
kc8055/europilot
| 1,069 |
138082
|
__title__ = 'europilot'
__description__ = 'End to end driving simulation inside Euro Truck Simulator 2'
__version__ = '0.0.1'
|
tests/test_odict.py
|
thomasrockhu-codecov/aws-cfn-template-flip
| 904 |
138091
|
"""
Copyright 2016-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at
http://aws.amazon.com/apache2.0/
or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
"""
from cfn_tools.odict import ODict
from copy import deepcopy
import pickle
import pytest
def test_get_set():
"""
It should at least work the same as a dict
"""
case = ODict()
case["one"] = 1
case["two"] = 2
assert len(case.keys()) == 2
assert case["one"] == 1
def test_list_constructor():
"""
We should be able to construct one from a tuple of pairs
"""
case = ODict((
("one", 1),
("two", 2),
))
assert len(case.keys()) == 2
assert case["one"] == 1
assert case["two"] == 2
assert case["two"] == 2
def test_ordering():
"""
Ordering should be left intact
"""
case = ODict()
case["z"] = 1
case["a"] = 2
assert list(case.keys()) == ["z", "a"]
def test_ordering_from_constructor():
"""
Ordering should be left intact
"""
case = ODict([
("z", 1),
("a", 2),
])
assert list(case.keys()) == ["z", "a"]
def test_constructor_disallows_dict():
"""
For the sake of python<3.6, don't accept dicts
as ordering will be lost
"""
with pytest.raises(Exception, match="ODict does not allow construction from a dict"):
ODict({
"z": 1,
"a": 2,
})
def test_explicit_sorting():
"""
Even an explicit sort should result in no change
"""
case = ODict((
("z", 1),
("a", 2),
)).items()
actual = sorted(case)
assert actual == case
def test_post_deepcopy_repr():
"""
Repr should behave normally after deepcopy
"""
dct = ODict([("a", 1)])
dct2 = deepcopy(dct)
assert repr(dct) == repr(dct2)
dct2["b"] = 2
assert repr(dct) != repr(dct2)
def test_pickle():
"""
Should be able to pickle and unpickle
"""
dct = ODict([
("c", 3),
("d", 4),
])
data = pickle.dumps(dct)
dct2 = pickle.loads(data)
assert dct == dct2
|
bin/api_connector_splunk/solnlib/packages/schematics/datastructures.py
|
CyberGRX/api-connector-splunk
| 106 |
138154
|
<filename>bin/api_connector_splunk/solnlib/packages/schematics/datastructures.py
# -*- coding: utf-8 -*-
# pylint: skip-file
from __future__ import unicode_literals, absolute_import
from collections import MutableMapping, KeysView, ValuesView, ItemsView
from copy import deepcopy
from operator import eq
from .common import *
from .util import get_ident
class OrderedDict(MutableMapping, dict):
"""
An ordered dictionary.
The implementation is based on ``collections.OrderedDict`` of the standard library.
It preserves the original technique of storing the keys as a regular list, whereas
the reference implementation now uses a linked list. The built-in list gives better
performance in use cases that are typical with Schematics.
"""
def __init__(*args, **kwargs):
if not args:
raise TypeError("OrderedDict.__init__() needs an instance as the first argument")
self = args[0]
args = args[1:]
if len(args) > 1:
raise TypeError("OrderedDict() takes at most 1 positional argument, got %d" % len(args))
dict.__init__(self)
if not self:
self._keys = []
MutableMapping.update(self, *args, **kwargs)
__contains__ = dict.__contains__
__getitem__ = dict.__getitem__
__len__ = dict.__len__
get = dict.get
def __setitem__(self, key, item, setitem=dict.__setitem__):
if key not in self:
self._keys.append(key)
setitem(self, key, item)
def __delitem__(self, key, delitem=dict.__delitem__):
delitem(self, key)
self._keys.remove(key)
def __iter__(self):
return iter(self._keys)
def __reversed__(self):
return reversed(self._keys)
def clear(self):
del self._keys[:]
dict.clear(self)
def copy(self):
return self.__class__(self)
__copy__ = copy
def move_to_end(self, key, last=True):
if key not in self:
raise KeyError(key)
self._keys.remove(key)
if last:
self._keys.append(key)
else:
self._keys.insert(0, key)
__token = object()
def pop(self, key, default=__token):
if key in self:
self._keys.remove(key)
return dict.pop(self, key)
elif default is self.__token:
raise KeyError(key)
else:
return default
def popitem(self, last=True):
if not self:
raise KeyError('dictionary is empty')
key = self._keys.pop(-1 if last else 0)
value = dict.pop(self, key)
return key, value
def setdefault(self, key, default=None):
if key in self:
return self[key]
else:
self[key] = default
return default
def sort(self, key=None, reverse=False):
if key is not None:
_key = lambda k: key((k, self[k]))
else:
_key = None
self._keys.sort(key=_key, reverse=reverse)
def reverse(self):
self._keys.reverse()
@classmethod
def fromkeys(cls, iterable, value=None):
return cls((key, value) for key in iterable)
def __eq__(self, other):
if isinstance(other, OrderedDict):
return dict.__eq__(self, other) and all(map(eq, self, other))
else:
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other
def __reduce_ex__(self, protocol=0):
attrs = vars(self).copy()
for k in vars(self.__class__()):
attrs.pop(k, None)
if protocol <= 2:
# Express tuples as lists to enable proper PyYAML serialization.
items = [[k, self[k]] for k in self]
return (self.__class__, (items,), attrs or None)
else:
# Provide items as an iterator. This variant can handle recursive dictionaries.
return (self.__class__, (), attrs or None, None, iter(self.items()))
__reduce__ = __reduce_ex__
def __repr__(self, memo=set()):
call_key = (id(self), get_ident())
if call_key in memo:
return '...'
else:
memo.add(call_key)
try:
return '%s(%s)' % (self.__class__.__name__, repr(list(self.items())) if self else '')
finally:
memo.remove(call_key)
if PY3:
def keys(self):
return _ODKeysView(self)
def values(self):
return _ODValuesView(self)
def items(self):
return _ODItemsView(self)
class _ODKeysView(KeysView):
def __reversed__(self):
for key in reversed(self._mapping):
yield key
class _ODValuesView(ValuesView):
def __reversed__(self):
for key in reversed(self._mapping):
yield self._mapping[key]
class _ODItemsView(ItemsView):
def __reversed__(self):
for key in reversed(self._mapping):
yield (key, self._mapping[key])
class DataObject(object):
"""
An object for holding data as attributes.
``DataObject`` can be instantiated like ``dict``::
>>> d = DataObject({'one': 1, 'two': 2}, three=3)
>>> d.__dict__
{'one': 1, 'two': 2, 'three': 3}
Attributes are accessible via the regular dot notation (``d.x``) as well as
the subscription syntax (``d['x']``)::
>>> d.one == d['one'] == 1
True
To convert a ``DataObject`` into a dictionary, use ``d._to_dict()``.
``DataObject`` implements the following collection-like operations:
* iteration through attributes as name-value pairs
* ``'x' in d`` for membership tests
* ``len(d)`` to get the number of attributes
Additionally, the following methods are equivalent to their ``dict` counterparts:
``_clear``, ``_get``, ``_keys``, ``_items``, ``_pop``, ``_setdefault``, ``_update``.
An advantage of ``DataObject`` over ``dict` subclasses is that every method name
in ``DataObject`` begins with an underscore, so attributes like ``"update"`` or
``"values"`` are valid.
"""
def __init__(self, *args, **kwargs):
source = args[0] if args else {}
self._update(source, **kwargs)
def __repr__(self):
return self.__class__.__name__ + '(%s)' % repr(self.__dict__)
def _copy(self):
return self.__class__(self)
__copy__ = _copy
def __eq__(self, other):
return isinstance(other, DataObject) and self.__dict__ == other.__dict__
def __iter__(self):
return iter(self.__dict__.items())
def _update(self, source=None, **kwargs):
if isinstance(source, DataObject):
source = source.__dict__
self.__dict__.update(source, **kwargs)
def _setdefaults(self, source):
if isinstance(source, dict):
source = source.items()
for name, value in source:
self._setdefault(name, value)
return self
def _to_dict(self):
d = dict(self.__dict__)
for k, v in d.items():
if isinstance(v, DataObject):
d[k] = v._to_dict()
return d
def __setitem__(self, key, value): self.__dict__[key] = value
def __getitem__(self, key): return self.__dict__[key]
def __delitem__(self, key): del self.__dict__[key]
def __len__(self): return len(self.__dict__)
def __contains__(self, key): return key in self.__dict__
def _clear(self): return self.__dict__.clear()
def _get(self, *args): return self.__dict__.get(*args)
def _items(self): return self.__dict__.items()
def _keys(self): return self.__dict__.keys()
def _pop(self, *args): return self.__dict__.pop(*args)
def _setdefault(self, *args): return self.__dict__.setdefault(*args)
class Context(DataObject):
_fields = ()
def __init__(self, *args, **kwargs):
super(Context, self).__init__(*args, **kwargs)
if self._fields:
unknowns = [name for name in self._keys() if name not in self._fields]
if unknowns:
raise ValueError('Unexpected field names: %r' % unknowns)
@classmethod
def _new(cls, *args, **kwargs):
if len(args) > len(cls._fields):
raise TypeError('Too many positional arguments')
return cls(zip(cls._fields, args), **kwargs)
@classmethod
def _make(cls, obj):
if obj is None:
return cls()
elif isinstance(obj, cls):
return obj
else:
return cls(obj)
def __setattr__(self, name, value):
if name in self:
raise TypeError("Field '{0}' already set".format(name))
super(Context, self).__setattr__(name, value)
def _branch(self, **kwargs):
if not kwargs:
return self
items = dict(((k, v) for k, v in kwargs.items() if v is not None and v != self[k]))
if items:
return self.__class__(self, **items)
else:
return self
def _setdefaults(self, source):
if not isinstance(source, dict):
source = source.__dict__
new_values = source.copy()
new_values.update(self.__dict__)
self.__dict__.update(new_values)
return self
def __bool__(self):
return True
__nonzero__ = __bool__
__all__ = module_exports(__name__)
|
src/gonla/nlabin/nladbm/nladbm.py
|
RudSmith/beluganos
| 119 |
138181
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2017 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import grpc
from nlaapi import nlaapi_pb2 as api
def dump_nodes(stub):
print("# Node")
cnt = 0
for node in stub.GetNodes(api.GetNodesRequest()):
cnt += 1
print(node)
print("# {0} nodes".format(cnt))
def dump_links(stub):
print("# Link")
cnt = 0
for link in stub.GetLinks(api.GetLinksRequest()):
cnt += 1
print(link)
print("# {0} links".format(cnt))
def dump_addrs(stub):
print("# Addr")
cnt = 0
for addr in stub.GetAddrs(api.GetAddrsRequest()):
cnt += 1
print(addr)
print("# {0} addrs".format(cnt))
def dump_neighs(stub):
print("# Neigh")
cnt = 0
for neigh in stub.GetNeighs(api.GetNeighsRequest()):
cnt += 1
print(neigh)
print("# {0} neighs".format(cnt))
def dump_routes(stub):
print("# Route")
cnt = 0
for route in stub.GetRoutes(api.GetRoutesRequest()):
cnt += 1
print(route)
print("# {0} routes".format(cnt))
def dump_mplss(stub):
print("# MPLS")
cnt = 0
for mpls in stub.GetMplss(api.GetMplssRequest()):
cnt += 1
print(mpls)
print("# {0} mplss".format(cnt))
def dump_vpns(stub):
print("# VPN")
cnt = 0
for vpn in stub.GetVpns(api.GetVpnsRequest()):
cnt += 1
print(vpn)
print("# {0} vpns".format(cnt))
def mon_netlink(stub):
for nlmsg in stub.MonNetlink(api.MonNetlinkRequest()):
print(nlmsg)
def dump_all(stub):
dump_nodes(stub)
dump_links(stub)
dump_addrs(stub)
dump_neighs(stub)
dump_routes(stub)
dump_mplss(stub)
dump_vpns(stub)
dump_cmd = dict(
node = dump_nodes,
link = dump_links,
addr = dump_addrs,
neigh= dump_neighs,
route= dump_routes,
mpls = dump_mplss,
vpn = dump_vpns,
all = dump_all,
)
def _getopts():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-a", "--addr", default="127.0.0.1:50052")
parser.add_argument("cmd")
parser.add_argument("table", nargs="?", default="all")
return parser.parse_args(), parser
def _help():
_, p = _getopts
p.print_help()
def _main():
opts, _ = _getopts()
channel = grpc.insecure_channel(opts.addr)
stub = api.NLAApiStub(channel)
if opts.cmd == "dump":
if opts.table not in dump_cmd:
print("{0} not found".format(opts.table))
return
dump_cmd[opts.table](stub)
elif opts.cmd == "mon":
mon_netlink(stub)
else:
_help()
if __name__ == "__main__":
_main()
|
demo/objsearch/objsearch.py
|
dgtlmoon/deepdetect
| 1,672 |
138221
|
import os, sys, argparse
from os import listdir
from os.path import isfile, join
from os import walk
from dd_client import DD
from annoy import AnnoyIndex
import shelve
import cv2
parser = argparse.ArgumentParser()
parser.add_argument("--index",help="repository of images to be indexed")
parser.add_argument("--index-batch-size",type=int,help="size of image batch when indexing",default=1)
parser.add_argument("--search",help="image input file for similarity search")
parser.add_argument("--search-size",help="number of nearest neighbors",type=int,default=10)
parser.add_argument("--confidence-threshold",help="confidence threshold on bounding boxes",type=float,default=0.01)
parser.add_argument("--nclasses",help="number of classes in the model",type=int,default=21)
parser.add_argument("--model-dir",help="model directory",default="model")
args = parser.parse_args()
def batch(iterable, n=1):
l = len(iterable)
for ndx in range(0, l, n):
yield iterable[ndx:min(ndx + n, l)]
def image_resize(imgfile,width):
imgquery = cv2.imread(imgfile)
r = width / imgquery.shape[1]
dim = (int(width), int(imgquery.shape[0] * r))
small = cv2.resize(imgquery,dim)
return small
host = 'localhost'
sname = 'imageserv'
description = 'image classification'
mllib = 'caffe'
mltype = 'supervised'
extract_layer = 'rois'
nclasses = args.nclasses
layer_size = 512 # auto anyways
width = height = 300
dd = DD(host)
dd.set_return_format(dd.RETURN_PYTHON)
ntrees = 1000
metric = 'angular' # or 'euclidean'
# creating ML service
model_repo = os.getcwd() + '/' + args.model_dir
model = {'repository':model_repo,'templates':'../templates/caffe/'}
parameters_input = {'connector':'image','width':width,'height':height}
parameters_mllib = {'nclasses':nclasses}
parameters_output = {}
try:
dd.put_service(sname,model,description,mllib,
parameters_input,parameters_mllib,parameters_output,mltype)
except:
pass
# reset call params
parameters_input = {}
parameters_mllib = {'gpu':True}
parameters_output = {'rois':'rois','confidence_threshold':args.confidence_threshold,'best':1}
if args.index:
try:
os.remove('data.bin')
except:
pass
s = shelve.open('data.bin')
# list files in image repository
c = 0
d = 1
onlyfiles = []
for (dirpath, dirnames, filenames) in walk(args.index):
nfilenames = []
for f in filenames:
nfilenames.append(dirpath + '/' + f)
onlyfiles.extend(nfilenames)
for x in batch(onlyfiles,args.index_batch_size):
classif = dd.post_predict(sname,x,parameters_input,parameters_mllib,parameters_output)
for p in classif['body']['predictions']:
uri = p['uri']
rois = p['rois']
sys.stdout.write('\rIndexing image '+str(d)+'/'+str(len(onlyfiles)) + ' : ' + str(len(rois)) + ' rois total:' + str(c) + ' ')
sys.stdout.flush()
for roi in rois:
bbox = roi['bbox']
cat = roi['cat']
prob = roi['prob']
vals = roi['vals']
if c == 0:
layer_size = len(vals)
s['layer_size'] = layer_size
t = AnnoyIndex(layer_size,metric) # prepare index
t.add_item(c,vals)
s[str(c)] = {'uri':uri, 'bbox' : bbox, 'cat' : cat, 'prob' : prob}
c = c + 1
d = d + 1
#if c >= 10000:
# break
print 'building index...\n'
print 'layer_size=',layer_size
t.build(ntrees)
t.save('index.ann')
s.close()
if args.search:
s = shelve.open('data.bin')
u = AnnoyIndex(s['layer_size'],metric)
u.load('index.ann')
data = [args.search]
classif = dd.post_predict(sname,data,parameters_input,parameters_mllib,parameters_output)
# search for every roi
res = classif['body']['predictions'][0]['rois']
print('number of ROI in query: ' + str(len(res)))
for roi in res:
near = u.get_nns_by_vector(roi['vals'],args.search_size,include_distances=True)
near_data = []
near_distance = []
for n in near[1]:
near_distance.append(n)
print('distances: ')
print(near_distance)
for n in near[0]:
near_data.append(s[str(n)])
# print query bbox
img = cv2.imread(args.search)
bbox = roi['bbox']
cat = roi['cat']
cv2.rectangle(img, (int(bbox['xmin']),int(bbox['ymax'])),(int(bbox['xmax']),int(bbox['ymin'])),(255,0,0),2)
cv2.putText(img,cat,(int(bbox['xmin']),int(bbox['ymax'])),cv2.FONT_HERSHEY_PLAIN,1,255)
cv2.imshow('query',img)
cv2.waitKey(0)
for n in near_data:
resimg = cv2.imread(n['uri'])
bbox = n['bbox']
cat = n['cat']
cv2.rectangle(resimg, (int(bbox['xmin']),int(bbox['ymax'])),(int(bbox['xmax']),int(bbox['ymin'])),(255,0,0),2)
cv2.putText(resimg,cat,(int(bbox['xmin']),int(bbox['ymax'])),cv2.FONT_HERSHEY_PLAIN,1,255)
cv2.imshow('res',resimg)
cv2.waitKey(0)
dd.delete_service(sname,clear='')
|
psdaq/psdaq/pyxpm/surf/dsp/fixed/_FirFilterMultiChannel.py
|
ZhenghengLi/lcls2
| 134 |
138225
|
<gh_stars>100-1000
#-----------------------------------------------------------------------------
# This file is part of the 'SLAC Firmware Standard Library'. It is subject to
# the license terms in the LICENSE.txt file found in the top-level directory
# of this distribution and at:
# https://confluence.slac.stanford.edu/display/ppareg/LICENSE.html.
# No part of the 'SLAC Firmware Standard Library', including this file, may be
# copied, modified, propagated, or distributed except according to the terms
# contained in the LICENSE.txt file.
#-----------------------------------------------------------------------------
import pyrogue as pr
class FirFilterMultiChannel(pr.Device):
def __init__(
self,
numberTaps = None, # TAP_SIZE_G
numberChannels = None, # CH_SIZE_G
dataWordBitSize = None, # WIDTH_G
**kwargs):
super().__init__(**kwargs)
if numberTaps is None:
raise ValueError( f'{self.path}: numberTaps is undefined' )
if numberChannels is None:
raise ValueError( f'{self.path}: numberChannels is undefined' )
if dataWordBitSize is None:
raise ValueError( f'{self.path}: dataWordBitSize is undefined' )
def addBoolPair(ch,tap):
self.add(pr.RemoteVariable(
name = f'RawCh{ch}Tap[{tap}]',
description = f'Tap[{tap}] Fixed Point Coefficient',
offset = 0x0,
bitSize = dataWordBitSize,
bitOffset = (ch*numberTaps+tap)*dataWordBitSize,
base = pr.Int,
mode = 'RW',
hidden = True,
))
var = self.variables[ f'RawCh{ch}Tap[{tap}]' ]
self.add(pr.LinkVariable(
name = f'Ch{ch}Tap[{tap}]',
description = f'Tap[{tap}] Floating Point Coefficient',
mode = 'RW',
linkedGet = lambda: var.value()/2**dataWordBitSize,
linkedSet = lambda value, write: var.set(int(value*2**dataWordBitSize)),
dependencies = [var],
disp = '{:1.3f}',
))
for ch in range(numberChannels):
for tap in range(numberTaps):
addBoolPair(ch=ch,tap=tap)
|
pyNastran/converters/nastran/test_nastran_gui.py
|
luzpaz/pyNastran
| 293 |
138274
|
"""tests the NastranIO class"""
import os
from copy import deepcopy
import unittest
import numpy as np
try:
import matplotlib
matplotlib.use('Agg')
IS_MATPLOTLIB = True
except ModuleNotFoundError: # pyparsing is missing
IS_MATPLOTLIB = False
#except ImportError:
#pass
import vtk
from cpylog import SimpleLogger
import pyNastran
from pyNastran.bdf.bdf import BDF
from pyNastran.bdf.cards.test.test_aero import get_zona_model
from pyNastran.bdf.errors import DuplicateIDsError
from pyNastran.gui.testing_methods import FakeGUIMethods
from pyNastran.converters.nastran.gui.nastran_io import NastranIO
from pyNastran.converters.nastran.nastran_to_vtk import nastran_to_vtk
RED = (1., 0., 0.)
class NastranGUI(NastranIO, FakeGUIMethods):
def __init__(self, inputs=None):
FakeGUIMethods.__init__(self, inputs=inputs)
NastranIO.__init__(self)
self.build_fmts(['nastran'], stop_on_failure=True)
PKG_PATH = pyNastran.__path__[0]
STL_PATH = os.path.join(PKG_PATH, 'converters', 'stl')
MODEL_PATH = os.path.join(PKG_PATH, '..', 'models')
class TestNastranGUI(unittest.TestCase):
def test_settings(self):
from qtpy import QtCore
settings = QtCore.QSettings()
test = NastranGUI()
is_loaded = test.settings.load(settings)
assert is_loaded is True
test.settings.save(settings, is_testing=True)
is_loaded = test.settings.load(settings)
assert is_loaded is True
test.settings.set_annotation_size_color(size=10, color=None)
#test.settings.set_annotation_size_color(size=10, color=RED)
test.settings.set_coord_scale(2.0, render=True)
test.settings.set_coord_text_scale(10, render=True)
test.settings.update_coord_scale(coord_scale=None, render=True)
test.settings.set_background_color_to_white(render=True)
color = RED
opacity = 0.4
test.settings.set_background_color(color, render=True)
test.settings.set_background_color2(color, render=True)
test.settings.set_highlight_color(color)
test.settings.set_highlight_opacity(opacity)
test.settings.set_text_color(color, render=True)
test.settings.set_text_size(10)
test.settings.set_magnify(magnify=4)
#self.settings.s
def test_solid_shell_bar_obj(self):
bdf_filename = os.path.join(MODEL_PATH, 'sol_101_elements', 'static_solid_shell_bar.bdf')
obj_filename = os.path.join(MODEL_PATH, 'sol_101_elements', 'static_solid_shell_bar.obj')
model = BDF()
model.read_bdf(bdf_filename)
model.save(obj_filename, unxref=True)
test = NastranGUI()
test.load_nastran_geometry(obj_filename)
@unittest.skipIf(IS_MATPLOTLIB is False, 'No matplotlib')
def test_solid_shell_bar_01(self):
bdf_filename = os.path.join(MODEL_PATH, 'sol_101_elements', 'static_solid_shell_bar.bdf')
op2_filename = os.path.join(MODEL_PATH, 'sol_101_elements', 'static_solid_shell_bar.op2')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
test.load_nastran_results(op2_filename)
test.cycle_results()
test.on_rcycle_results()
#print('test.result_cases', test.result_cases)
#gpforce = test.model.grid_point_forces[1]
icase_gpforce = None
for icase, (case, dummy) in test.result_cases.items():
if hasattr(case, 'gpforce_array'):
icase_gpforce = icase
break
else:
raise RuntimeError('missing gpforce')
case, (unused_i, unused_name) = test.result_cases[icase_gpforce]
str(case)
gpforce = case.gpforce_array
model_name = 'main'
p1 = [0., 0., 0.]
p3 = [1., 0., 0.]
p2 = [0., 1., 0.]
zaxis = [0., 0., 1.]
force_sum, moment_sum = test.shear_moment_torque_obj.plot_shear_moment_torque(
model_name, gpforce,
p1, p2, p3, zaxis,
method='Z-Axis Projection',
cid_p1=0, cid_p2=0, cid_p3=0, cid_zaxis=0,
nplanes=5, plane_color=None, plane_opacity=0.5,
csv_filename=None, show=False, stop_on_failure=True)
assert np.allclose(np.abs(force_sum).max(), 0.000732421875), np.abs(force_sum).max()
assert np.allclose(np.abs(moment_sum).max(), 0.000244140625), np.abs(moment_sum).max()
p1 = np.array([0., 0., 0.]) # origin
p2 = np.array([1., 0., 0.]) # xaxis
p3 = np.array([1., 0., 0.]) # end
zaxis = np.array([0., 0., 1.])
#idir = 0
test.shear_moment_torque_obj.plot_shear_moment_torque(
model_name, gpforce,
p1, p2, p3, zaxis,
method='Z-Axis Projection',
cid_p1=0, cid_p2=0, cid_p3=0, cid_zaxis=0,
nplanes=5, plane_color=None, plane_opacity=0.5,
csv_filename=None, show=False, stop_on_failure=True)
with self.assertRaises(TypeError):
# we need to set the case to a grid point force result
test.cutting_plane_obj.make_cutting_plane(
model_name,
p1, p2, zaxis,
method='Z-Axis Projection',
cid_p1=0, cid_p2=0, cid_zaxis=0,
ytol=1., plane_atol=1e-5,
plane_color=None, plane_opacity=0.5,
csv_filename=None, show=False, stop_on_failure=True)
# setting the case to a grid point force result
test.icase_fringe = icase_gpforce
test._cycle_results(icase_gpforce)
test.cutting_plane_obj.make_cutting_plane(
model_name,
p1, p2, zaxis,
method='Z-Axis Projection',
cid_p1=0, cid_p2=0, cid_zaxis=0,
ytol=1., plane_atol=1e-5,
plane_color=None, plane_opacity=0.5,
csv_filename=None, show=False, stop_on_failure=True)
test.icase_fringe = 0
#with self.assertRaises(RuntimeError):
test.cutting_plane_obj.make_cutting_plane(
model_name,
p1, p2, zaxis,
method='Z-Axis Projection',
cid_p1=0, cid_p2=0, cid_zaxis=0,
ytol=1., plane_atol=1e-5,
plane_color=None, plane_opacity=0.5,
csv_filename=None, show=False, stop_on_failure=True)
def test_solid_shell_bar_02(self):
bdf_filename = os.path.join(MODEL_PATH, 'sol_101_elements', 'mode_solid_shell_bar.bdf')
op2_filename = os.path.join(MODEL_PATH, 'sol_101_elements', 'mode_solid_shell_bar.op2')
test = NastranGUI()
test.legend_obj.set_legend_menu()
test.load_nastran_geometry(bdf_filename)
test.load_nastran_results(op2_filename)
assert len(test.models['main'].elements) > 0
test.on_rcycle_results()
test.on_update_legend(
title='Title', min_value=0., max_value=1.,
scale=0.0, phase=0.0,
arrow_scale=1.,
data_format='%.0f',
is_low_to_high=True, is_discrete=True, is_horizontal=True,
nlabels=None, labelsize=None, ncolors=None, colormap=None,
is_shown=True, render=True)
test.on_update_legend(
title='Title', min_value=0., max_value=1.,
scale=0.0, phase=0.0,
arrow_scale=1.,
data_format='%.0f',
is_low_to_high=True, is_discrete=True, is_horizontal=False,
nlabels=None, labelsize=None, ncolors=None, colormap='viridis',
is_shown=True, render=True)
test.legend_obj.set_legend_menu()
test.on_set_camera_data(
{'distance': 15.23729238729831,
'prallel_proj': None,
'view_angle': 30.0,
'parallel_scale': 3.9437014656284517,
'position': (-8.279127062822164, 4.306812025814127, 11.191236382055052),
'view_up': (0.14388395111701072, 0.9587296714789404, -0.245224031523912),
'clip_range': (7.44295814719721, 25.085506595796954),
'focal_point': (0.49249999999999994, 0.0, -0.5)}
)
test.settings.reset_settings()
test.on_set_font_size(8)
test.on_increase_font_size()
test.on_decrease_font_size()
labels_list = []
text = 'text'
x, y, z = 0., 0., 0.
labels_list.append(test.create_annotation(text, x, y, z))
cell_id = 1
world_position = [0., 0., 1.]
res_name, result_values, xyz = test.get_result_by_cell_id(
cell_id, world_position,
icase=0)
assert res_name == 'NodeID', 'res_name=%r' % res_name
assert result_values == 2, 'result_values=%r' % result_values
assert isinstance(xyz, list), xyz
#node_xyz = None
cell_id = 5
#out = test.mark_actions.get_result_by_xyz_cell_id(node_xyz, cell_id)
#result_name, result_values, node_id, xyz = out
eids = [1, 2]
icase_result = 2
icase_to_apply = 3
test.label_actors[2] = []
test.label_actors[3] = []
test.mark_actions.mark_elements_by_different_case(
eids, icase_result, icase_to_apply, stop_on_failure=True, )
#eids = [1, 2]
with self.assertRaises(NotImplementedError):
test.mark_actions.highlight_elements(eids, model_name='main')
nids = [1, 2]
icase = 1
test.label_actors[1] = []
text = 'cat'
test.mark_actions.mark_nodes(nids, icase, text)
with self.assertRaises(RuntimeError): # icase_to_apply=166 doesn't exist
test.mark_elements(eids, stop_on_failure=True, show_command=True)
#test.mark_elements_by_case(eids, stop_on_failure=True, show_command=True)
test.icase = 2 # PropertyID
test.mark_elements(eids, stop_on_failure=True, show_command=True)
test.mark_elements_by_case(eids, stop_on_failure=True, show_command=True)
#for key, obj in test.result_cases.items():
#print(key)
#print(obj)
# fail mapping strain energy because we're on NodeID
test.icase = 0 # NodeID
test.icase_fringe = 0 # NodeID
is_passed = test.map_element_centroid_to_node_fringe_result(update_limits=True, show_msg=True)
obj, (itime, name) = test.result_cases[test.icase]
str(obj)
assert is_passed == False, f'map_element_centroid_to_node_fringe_result should fail for NodeID\n{obj}'
# map strain energy
keys = list(test.result_cases.keys())
assert len(keys) == 689, len(keys)
icase = keys[-1]
obj, (itime, name) = test.result_cases[icase]
test.icase_fringe = icase
str(obj)
title = obj.get_title(itime, name)
assert title == 'Strain Energy Density', str(obj)
is_passed = test.map_element_centroid_to_node_fringe_result(update_limits=True, show_msg=False)
assert is_passed == True, 'map_element_centroid_to_node_fringe_result failed'
def test_solid_shell_bar_02b(self):
bdf_filename = os.path.join(MODEL_PATH, 'sol_101_elements', 'mode_solid_shell_bar.bdf')
test = NastranGUI()
test.on_load_geometry(infile_name=bdf_filename, geometry_format='nastran', name='main',
plot=True, raise_error=True)
def test_solid_shell_bar_03(self):
bdf_filename = os.path.join(MODEL_PATH, 'sol_101_elements', 'buckling_solid_shell_bar.bdf')
op2_filename = os.path.join(MODEL_PATH, 'sol_101_elements', 'buckling_solid_shell_bar.op2')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
test.load_nastran_results(op2_filename)
def test_solid_bending(self):
bdf_filename = os.path.join(MODEL_PATH, 'solid_bending', 'solid_bending.bdf')
#op2_filename = os.path.join(MODEL_PATH, 'solid_bending', 'solid_bending_ogs.op2')
deflection_filename1 = os.path.join(MODEL_PATH, 'solid_bending', 'solid_bending_multi_deflection_node.txt')
deflection_filename2 = os.path.join(MODEL_PATH, 'solid_bending', 'solid_bending_multi_deflection_node_short.txt')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
#test.load_nastran_results(op2_filename)
nresult_cases = len(test.result_cases)
icase = max(test.result_cases)
# these are the two cases we're checking were added
test.on_load_custom_results(out_filename=deflection_filename1, restype='Deflection')
test.on_load_custom_results(out_filename=deflection_filename1, restype='Force')
dresult_cases = len(test.result_cases) - nresult_cases
icase_final = max(test.result_cases)
dcase = icase_final - icase
assert dresult_cases == 2, dresult_cases
assert dcase == 2, dcase
assert (icase_final - 1) in test.label_actors
assert icase_final in test.label_actors
assert len(test.label_actors[icase_final]) == 0
nids = [1, 2, 3, 5]
icase = icase_final
text = 'word'
test.mark_nodes(nids, icase, text)
assert len(test.label_actors[icase_final]) == 4, len(test.label_actors[icase_final])
# test nodal results
#'node', 'element', 'deflection', 'force', 'patran_nod',
csv_filename1 = os.path.join(MODEL_PATH, 'solid_bending', 'solid_bending_multi_node.csv')
csv_filename2 = os.path.join(MODEL_PATH, 'solid_bending', 'solid_bending_multi_node_extra.txt')
csv_filename3 = os.path.join(MODEL_PATH, 'solid_bending', 'solid_bending_multi_node_short.txt')
# missing/extra nodes
test.on_load_custom_results(out_filename=csv_filename1, restype='node', stop_on_failure=True)
test.on_load_custom_results(out_filename=csv_filename2, restype='node', stop_on_failure=True)
test.on_load_custom_results(out_filename=csv_filename3, restype='node', stop_on_failure=True)
# missing nodes
test.on_load_custom_results(out_filename=deflection_filename2, restype='Deflection')
def test_beam_modes_01(self):
"""CBAR/CBEAM - PARAM,POST,-1"""
bdf_filename = os.path.join(MODEL_PATH, 'beam_modes', 'beam_modes.dat')
op2_filename = os.path.join(MODEL_PATH, 'beam_modes', 'beam_modes_m1.op2')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
test.load_nastran_results(op2_filename)
def test_beam_modes_02(self):
"""CBAR/CBEAM - PARAM,POST,-2"""
bdf_filename = os.path.join(MODEL_PATH, 'beam_modes', 'beam_modes.dat')
op2_filename = os.path.join(MODEL_PATH, 'beam_modes', 'beam_modes_m2.op2')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
test.load_nastran_results(op2_filename)
def test_beam_modes_03(self):
dirname = os.path.join(MODEL_PATH, 'beam_modes')
bdf_filename = os.path.join(dirname, 'beam_modes.dat')
op2_filename = os.path.join(dirname, 'beam_modes_m1.op2')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
#test.load_nastran_results(op2_filename)
test.load_nastran_geometry(bdf_filename)
#test.load_nastran_results(op2_filename)
test.load_nastran_geometry(bdf_filename)
test.load_nastran_results(op2_filename)
def test_beam_modes_04(self):
dirname = os.path.join(MODEL_PATH, 'beam_modes')
bdf_filename = os.path.join(dirname, 'beam_modes.dat')
op2_filename = os.path.join(dirname, 'beam_modes_m2.op2')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
test.load_nastran_results(op2_filename)
test.load_nastran_geometry(bdf_filename)
test.load_nastran_results(op2_filename)
test.load_nastran_geometry(bdf_filename)
#@unittest.expectedFailure
#def test_contact(self):
#"""this test fails because of a misparsed card"""
#bdf_filename = os.path.join(MODEL_PATH, 'contact', 'contact.bdf')
#op2_filename = os.path.join(MODEL_PATH, 'contact', 'contact.op2')
#test = NastranGUI()
#test.load_nastran_geometry(bdf_filename)
#test.load_nastran_results(op2_filename)
def test_fsi(self):
"""tests -1 coordinate systems (flag for a fluid contact face)"""
bdf_filename = os.path.join(MODEL_PATH, 'fsi', 'fsi.bdf')
op2_filename = os.path.join(MODEL_PATH, 'fsi', 'fsi.op2')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
test.load_nastran_results(op2_filename)
def test_thermal_01(self):
"""runs models/thermal/thermal_test_153"""
dirname = os.path.join(MODEL_PATH, 'thermal')
bdf_filename = os.path.join(dirname, 'thermal_test_153.bdf')
op2_filename = os.path.join(dirname, 'thermal_test_153.op2')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
test.load_nastran_results(op2_filename)
def test_bwb_gui(self):
bdf_filename = os.path.join(MODEL_PATH, 'bwb', 'bwb_saero.bdf')
test = NastranGUI()
#test.log = get_logger2()
test.load_nastran_geometry(bdf_filename)
test.group_actions.create_groups_by_property_id()
test.group_actions.create_groups_by_visible_result(nlimit=50)
test.toggle_conms()
def test_femap_rougv1_01(self):
"""tests the exhaust manifold and it's funny eigenvectors"""
dirname = os.path.join(MODEL_PATH, 'femap_exhaust')
#bdf_filename = os.path.join(dirname, 'modal_example.bdf')
op2_filename = os.path.join(dirname, 'modal_example.op2')
test = NastranGUI()
test.load_nastran_geometry(op2_filename)
test.load_nastran_results(op2_filename)
def test_aero_op2(self):
"""tests the freedlm model (OP2 with aero)"""
#bdf_filename = os.path.join(MODEL_PATH, 'aero', 'freedlm', 'freedlm.bdf')
op2_filename = os.path.join(MODEL_PATH, 'aero', 'freedlm', 'freedlm.op2')
test = NastranGUI()
#test.log.level = 'debug'
#test.load_nastran_geometry(bdf_filename)
test.load_nastran_geometry(op2_filename)
test.load_nastran_results(op2_filename)
assert len(test.result_cases) == 218, len(test.result_cases)
#print(test.result_cases)
def test_aero(self):
"""tests the bah_plane"""
bdf_filename = os.path.join(MODEL_PATH, 'aero', 'bah_plane', 'bah_plane.bdf')
op2_filename = os.path.join(MODEL_PATH, 'aero', 'bah_plane', 'bah_plane.op2')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
test.load_nastran_results(op2_filename)
out_datai = deepcopy(test.geometry_properties)
test.on_update_geometry_properties_override_dialog(out_datai)
out_data = {
'clicked_ok' : True,
'Global XYZ' : out_datai['Global XYZ'],
'conm2' : out_datai['conm2'],
'bar_z' : out_datai['bar_z'],
'caero' : out_datai['caero'],
}
#print(test.geometry_properties)
coord = out_data['Global XYZ']
coord.is_visible = False
str(coord)
#print('coord = %r' % coord)
conm2 = out_data['conm2']
conm2.point_size = 10
barz = out_data['bar_z']
barz.bar_scale = 0.5
barz.is_visible = True
#print(barz)
caero = test.geometry_properties['caero']
str(caero)
caero.color = (255, 0, 0)
caero.line_width = 10
caero.opacity = 0.8
caero.is_visible = False
#print(caero)
#print(out_data)
test.on_update_geometry_properties(out_data, name='caero',
write_log=True)
def test_gui_elements_01(self):
"""tests forces/pressure in SOL 101"""
bdf_filename = os.path.join(MODEL_PATH, 'elements', 'static_elements.bdf')
op2_filename = os.path.join(MODEL_PATH, 'elements', 'static_elements.op2')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
test.load_nastran_results(op2_filename)
idisp = None
iforce_xyz = None
for key, case_data in test.result_cases.items():
case, data = case_data
#print(key, case)
if idisp is None and case.uname == 'Displacement':
idisp = key
elif idisp is not None and iforce_xyz is None and case.uname == 'LoadVectors':
iforce_xyz = key
break
elif key > 70:
break
ifringe = len(test.result_cases) - 1 # Strain Energy Density
test.on_fringe(icase=ifringe, stop_on_failure=True)
with self.assertRaises(ValueError):
test.on_vector(icase=ifringe, stop_on_failure=True)
with self.assertRaises(ValueError):
test.on_disp(icase=ifringe, stop_on_failure=True) # disp
test.on_fringe(icase=iforce_xyz, stop_on_failure=True)
test.on_vector(icase=iforce_xyz, stop_on_failure=True)
test.on_disp(icase=idisp, stop_on_failure=True) # disp
test.on_clear_results()
test.on_fringe(icase=iforce_xyz, stop_on_failure=True)
test.on_vector(icase=iforce_xyz, stop_on_failure=True) # force_xyz
test.on_disp(icase=idisp, stop_on_failure=True) # disp
test.on_fringe(icase=37, update_legend_window=True, show_msg=True) # normal
#op2_filename = os.path.join(MODEL_PATH, 'elements', 'static_elements.op2')
vtk_filename = os.path.join(MODEL_PATH, 'elements', 'static_elements.vtu')
nastran_to_vtk(op2_filename, vtk_filename)
assert os.path.exists(vtk_filename), vtk_filename
def test_gui_elements_01b(self):
bdf_filename = os.path.join(MODEL_PATH, 'elements', 'static_elements.bdf')
op2_filename = os.path.join(MODEL_PATH, 'elements', 'static_elements.op2')
#model = read_bdf(bdf_filename)
model = BDF()
model.disable_cards(['CHEXA', 'CTETRA', 'CPENTA',
'CROD', 'PLOTEL', 'CBAR', 'CBEAM', 'CTRIA3', 'CQUAD4', 'CQUADR', 'CTRIAR',
'CQUAD8', 'CTRIA6', 'CSHEAR', 'CTUBE',
'CONM2', 'CVISC', #'CONROD',
'CELAS1', 'CELAS2', 'CELAS3', 'CELAS4', 'CDAMP1', 'CDAMP2', 'CDAMP3', 'CDAMP4',
'PLOAD1', 'PLOAD2', 'PLOAD4',
])
model.read_bdf(bdf_filename)
#print(model.elements)
test2 = NastranGUI()
test2.load_nastran_geometry(model)
test2.load_nastran_results(op2_filename)
def test_gui_elements_02(self):
"""tests a large number of elements and results in SOL 101"""
#bdf_filename = os.path.join(MODEL_PATH, 'elements', 'static_elements.bdf')
op2_filename = os.path.join(MODEL_PATH, 'elements', 'static_elements.op2')
test = NastranGUI()
test.load_nastran_geometry(op2_filename)
test.load_nastran_results(op2_filename)
#test = NastranGUI()
test.settings.nastran_create_coords = False
test.settings.nastran_is_bar_axes = False
test.settings.nastran_is_3d_bars = False
test.settings.nastran_is_3d_bars_update = False
test.settings.nastran_is_element_quality = False
test.settings.nastran_is_properties = False
test.load_nastran_geometry(op2_filename)
def test_gui_elements_03(self):
"""tests a large number of elements and results in SOL 103-modes"""
#bdf_filename = os.path.join(MODEL_PATH, 'elements', 'modes_elements.bdf')
op2_filename = os.path.join(MODEL_PATH, 'elements', 'modes_elements.op2')
test = NastranGUI()
test.load_nastran_geometry(op2_filename)
test.load_nastran_results(op2_filename)
#test.create_groups_by_property_id()
test.create_groups_by_visible_result()
def test_gui_elements_04(self):
"""tests a large number of elements and results in SOL 108-freq"""
#bdf_filename = os.path.join(MODEL_PATH, 'elements', 'freq_elements.bdf')
op2_filename = os.path.join(MODEL_PATH, 'elements', 'freq_elements.op2')
test = NastranGUI()
test.load_nastran_geometry(op2_filename)
test.load_nastran_results(op2_filename)
icase = 33
name = 'Normal'
subcase_id = -1
test.set_normal_result(icase, name, subcase_id)
test.setup_fake_text_actors()
icase = 0
icase2 = icase + 1
while icase2 < len(test.result_cases):
#test.on_cycle_results(case=icase2, show_msg=True)
unused_result_name = 'dummy'
test._set_case(unused_result_name, icase2, explicit=False, cycle=False,
skip_click_check=False, min_value=None, max_value=None,
is_legend_shown=None, show_msg=True)
icase2 += 1
def test_gui_elements_05(self):
"""tests a large number of elements and results in SOL 108-freq"""
#bdf_filename = os.path.join(MODEL_PATH, 'elements', 'freq_elements2.bdf')
op2_filename = os.path.join(MODEL_PATH, 'elements', 'freq_elements2.op2')
test = NastranGUI()
test.load_nastran_geometry(op2_filename)
test.load_nastran_results(op2_filename)
def test_gui_elements_06(self):
"""tests a large number of elements and results in SOL 106-loadstep"""
#bdf_filename = os.path.join(MODEL_PATH, 'elements', 'loadstep_elements.bdf')
op2_filename = os.path.join(MODEL_PATH, 'elements', 'loadstep_elements.op2')
test = NastranGUI()
test.load_nastran_geometry(op2_filename)
test.load_nastran_results(op2_filename)
def test_gui_elements_07(self):
"""tests a large number of elements and results in SOL 107-complex modes"""
#bdf_filename = os.path.join(MODEL_PATH, 'elements', 'modes_elements.bdf')
op2_filename = os.path.join(MODEL_PATH, 'elements', 'modes_complex_elements.op2')
test = NastranGUI()
test.load_nastran_geometry(op2_filename)
test.load_nastran_results(op2_filename)
def test_gui_elements_08(self):
"""tests a large number of elements and results in SOL 109-linear time"""
bdf_filename = os.path.join(MODEL_PATH, 'elements', 'modes_elements.bdf')
op2_filename = os.path.join(MODEL_PATH, 'elements', 'time_elements.op2')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
test.load_nastran_results(op2_filename)
def test_gui_pload_01(self):
"""tests a PLOAD4/CTETRA"""
#bdf_filename = os.path.join(MODEL_PATH, 'elements', 'ctetra.bdf')
op2_filename = os.path.join(MODEL_PATH, 'unit', 'pload4', 'ctetra.op2')
test = NastranGUI()
test.load_nastran_geometry(op2_filename)
test.load_nastran_results(op2_filename)
def test_gui_pload_02(self):
"""tests a PLOAD4/CHEXA"""
#bdf_filename = os.path.join(MODEL_PATH, 'elements', 'chexa.bdf')
op2_filename = os.path.join(MODEL_PATH, 'unit', 'pload4', 'chexa.op2')
test = NastranGUI()
test.load_nastran_geometry(op2_filename)
test.load_nastran_results(op2_filename)
def test_gui_pload_03(self):
"""tests a PLOAD4/CPENTA"""
#bdf_filename = os.path.join(MODEL_PATH, 'elements', 'cpenta.bdf')
op2_filename = os.path.join(MODEL_PATH, 'unit', 'pload4', 'cpenta.op2')
test = NastranGUI()
test.load_nastran_geometry(op2_filename)
test.load_nastran_results(op2_filename)
def test_gui_pload_04(self):
"""tests a PLOAD4/CQUAD4"""
#bdf_filename = os.path.join(MODEL_PATH, 'elements', 'cquad4.bdf')
op2_filename = os.path.join(MODEL_PATH, 'unit', 'pload4', 'cquad4.op2')
test = NastranGUI()
test.load_nastran_geometry(op2_filename)
test.load_nastran_results(op2_filename)
def test_gui_pload_05(self):
"""tests a PLOAD4/CTRIA3"""
#bdf_filename = os.path.join(MODEL_PATH, 'elements', 'ctria3.bdf')
op2_filename = os.path.join(MODEL_PATH, 'unit', 'pload4', 'ctria3.op2')
test = NastranGUI()
test.load_nastran_geometry(op2_filename)
test.load_nastran_results(op2_filename)
#def test_gui_pload_06(self):
#"""tests a PLOAD1/CBAR"""
#bdf_filename = os.path.join(MODEL_PATH, 'elements', 'pload1.bdf')
#op2_filename = os.path.join(MODEL_PATH, 'unit', 'pload4', 'pload1.op2')
#test = NastranGUI()
#test.load_nastran_geometry(op2_filename)
#test.load_nastran_results(op2_filename)
#def test_gui_bar_rod(self):
#"""tests a PBARL/ROD"""
#bdf_filename = os.path.join(MODEL_PATH, 'unit', 'bars', 'pbarl_rod.bdf')
#test = NastranGUI()
#test.load_nastran_geometry(bdf_filename)
#def test_gui_bar_tube2(self):
def test_gui_bar_tube(self):
"""tests a PBARL/TUBE"""
bdf_filename = os.path.join(MODEL_PATH, 'unit', 'bars', 'pbarl_tube.bdf')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
def test_gui_bar_chan(self):
"""tests a PBARL/CHAN"""
bdf_filename = os.path.join(MODEL_PATH, 'unit', 'bars', 'pbarl_chan.bdf')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
test.on_pan_left(None)
test.on_pan_right(None)
test.on_pan_up(None)
test.on_pan_down(None)
test.on_increase_magnification()
test.on_decrease_magnification()
test.zoom(1.2)
test.on_rotate_clockwise()
test.on_rotate_cclockwise()
test.rotate(15.0)
test.set_focal_point([0., 1., 2.])
test.export_case_data(icases=[0, 1])
test.update_camera('+x')
test.update_camera('-x')
test.update_camera('+y')
test.update_camera('-y')
test.update_camera('+z')
test.update_camera('-z')
test._update_camera()
camera_data = test.get_camera_data()
test.on_set_camera_data(camera_data, show_log=True)
csv_filename = os.path.join(MODEL_PATH, 'custom_geom.csv')
test.on_load_user_geom(csv_filename=csv_filename, name=None, color=None)
stl_filename = os.path.join(STL_PATH, 'sphere.stl')
test.on_load_user_geom(csv_filename=stl_filename, name=None, color=None)
test.clear_labels()
test.reset_labels()
with open('xyz1.csv', 'w') as xyz_file:
xyz_file.write('1., 2., 3.\n')
xyz_file.write('4., 5., 6.\n')
csv_filename = 'xyz1.csv' # os.path.join(MODEL_PATH, 'xyz1.csv')
test.on_load_csv_points(csv_filename=csv_filename, name=None, color=None)
os.remove(csv_filename)
with open('xyz2.csv', 'w') as xyz_file:
xyz_file.write('10., 20., 30.')
csv_filename = 'xyz2.csv' # os.path.join(MODEL_PATH, 'xyz2.csv')
test.on_load_csv_points(csv_filename=csv_filename, name=None, color=None)
os.remove(csv_filename)
#test.on_wireframe()
#test.on_surface()
os.remove('0_NodeID.csv')
os.remove('1_ElementID.csv')
with open('rotate.py', 'w') as pyfile:
pyfile.write('self.rotate(20.)\n')
test.on_run_script('rotate.py')
os.remove('rotate.py')
def test_gui_screenshot(self):
bdf_filename = os.path.join(MODEL_PATH, 'unit', 'bars', 'pbarl_chan.bdf')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
magnify = None
render_large = vtk.vtkRenderLargeImage()
test.run_vtk = True
#test.create_corner_axis()
# faking coordinate system
axes_actor = vtk.vtkAxesActor()
test.corner_axis = vtk.vtkOrientationMarkerWidget()
test.corner_axis.SetOrientationMarker(axes_actor)
#test.on_take_screenshot(fname='chan.png', magnify=None, show_msg=True)
out = test.tool_actions._screenshot_setup(magnify, render_large)
line_widths0, point_sizes0, coord_scale0, coord_text_scale0, linewidth0, fake_axes_actor, magnify = out
test.tool_actions._screenshot_teardown(
line_widths0, point_sizes0, coord_scale0, coord_text_scale0, linewidth0, axes_actor)
def test_gui_bar_chan1(self):
"""tests a PBARL/CHAN1"""
bdf_filename = os.path.join(MODEL_PATH, 'unit', 'bars', 'pbarl_chan1.bdf')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
#def test_gui_bar_chan2(self):
def test_gui_bar_bar(self):
"""tests a PBARL/BAR"""
bdf_filename = os.path.join(MODEL_PATH, 'unit', 'bars', 'pbarl_bar.bdf')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
def test_gui_bar_box(self):
"""tests a PBARL/BOX"""
bdf_filename = os.path.join(MODEL_PATH, 'unit', 'bars', 'pbarl_box.bdf')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
def test_gui_bar_z(self):
"""tests a PBARL/Z"""
bdf_filename = os.path.join(MODEL_PATH, 'unit', 'bars', 'pbarl_z.bdf')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
def test_gui_bar_t(self):
"""tests a PBARL/T"""
bdf_filename = os.path.join(MODEL_PATH, 'unit', 'bars', 'pbarl_t.bdf')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
def test_gui_bar_t1(self):
"""tests a PBARL/T1"""
bdf_filename = os.path.join(MODEL_PATH, 'unit', 'bars', 'pbarl_t1.bdf')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
str(test.geometry_properties)
T1z = deepcopy(test.geometry_properties['T1_z'])
T1z.line_width = 4
T1z.color = (255, 0, 0)
T1z.opacity = 0.6
T1z.bar_scale = 0.20
test.edit_geometry_properties_obj.on_update_geometry_properties({'T1_z' : T1z}, name=None, write_log=True)
test.edit_geometry_properties_obj.on_update_geometry_properties({'T1_z' : T1z}, name='T1_z', write_log=True)
def test_gui_bar_t2(self):
"""tests a PBARL/T2"""
bdf_filename = os.path.join(MODEL_PATH, 'unit', 'bars', 'pbarl_t2.bdf')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
def test_gui_bar_hexa(self):
"""tests a PBARL/HEXA"""
bdf_filename = os.path.join(MODEL_PATH, 'unit', 'bars', 'pbarl_hexa.bdf')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
def test_gui_bar_hat(self):
"""tests a PBARL/HAT"""
bdf_filename = os.path.join(MODEL_PATH, 'unit', 'bars', 'pbarl_hat.bdf')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
def test_gui_bar_i(self):
"""tests a PBARL/I"""
bdf_filename = os.path.join(MODEL_PATH, 'unit', 'bars', 'pbarl_i.bdf')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
def test_gui_bar_i1(self):
"""tests a PBARL/I1"""
bdf_filename = os.path.join(MODEL_PATH, 'unit', 'bars', 'pbarl_i1.bdf')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
def test_gui_bar_h(self):
"""tests a PBARL/H"""
bdf_filename = os.path.join(MODEL_PATH, 'unit', 'bars', 'pbarl_h.bdf')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
def test_gui_beam_l(self):
"""tests a PBEAML/L"""
bdf_filename = os.path.join(MODEL_PATH, 'unit', 'bars', 'pbeaml_l.bdf')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
total_length = test.model.get_length_breakdown()[100]
assert np.allclose(total_length, 100.)
def test_gui_thermal_01(self):
"""tests thermal"""
#bdf_filename = os.path.join(MODEL_PATH, 'thermal', 'thermal_test_153.bdf')
op2_filename = os.path.join(MODEL_PATH, 'thermal', 'thermal_test_153.op2')
test = NastranGUI()
test.load_nastran_geometry(op2_filename)
test.load_nastran_results(op2_filename)
def test_gui_thermal_02(self):
"""tests thermal"""
bdf_filename = os.path.join(MODEL_PATH, 'thermal', 'hd15901.bdf')
op2_filename = os.path.join(MODEL_PATH, 'thermal', 'hd15901.op2')
test = NastranGUI()
with self.assertRaises(DuplicateIDsError):
test.load_nastran_geometry(op2_filename)
test.load_nastran_geometry(bdf_filename)
test.load_nastran_results(op2_filename)
def test_gui_thermal_03(self):
"""tests thermal"""
#bdf_filename = os.path.join(MODEL_PATH, 'other', 'hd15306.bdf')
op2_filename = os.path.join(MODEL_PATH, 'other', 'hd15306.op2')
test = NastranGUI()
test.load_nastran_geometry(op2_filename)
test.load_nastran_results(op2_filename)
def test_gui_superelement_1(self):
"""tests flyswatter"""
bdf_filename = os.path.join(MODEL_PATH, 'superelements', 'flyswatter', 'flyswatter_renumber.bdf')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
#test.load_nastran_results(op2_filename)
def test_gui_superelement_2(self):
"""tests superelement mirror/shift/renumber"""
bdf_filename = os.path.join(MODEL_PATH, 'superelements', 'see103q4.bdf')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
os.remove('spike.bdf')
os.remove('super_12.bdf')
os.remove('super_13.bdf')
os.remove('super_15.bdf')
def test_gui_dvprel(self):
"""tests dvprel"""
bdf_filename = os.path.join(MODEL_PATH, 'other', 'dofm12.bdf')
#op2_filename = os.path.join(MODEL_PATH, 'other', 'dofm12.op2')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
#test.load_nastran_results(op2_filename)
def test_gui_optimization_mcpads4(self):
"""tests mcpads4.bdf, which tests *.des and convergence"""
bdf_filename = os.path.join(MODEL_PATH, 'other', 'mcpads4.bdf')
op2_filename = os.path.join(MODEL_PATH, 'other', 'mcpads4.op2')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
test.load_nastran_results(op2_filename)
def test_gui_patran(self):
"""tests patran format"""
bdf_filename = os.path.join(MODEL_PATH, 'patran_fmt', '0012_20.bdf')
nod_filename = os.path.join(MODEL_PATH, 'patran_fmt', 'normals.nod')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
test.load_nastran_results(nod_filename)
def test_gui_patran2(self):
"""tests patran format"""
bdf_filename = os.path.join(MODEL_PATH, 'patran_fmt', '0012_20.bdf')
nod_filename = os.path.join(MODEL_PATH, 'patran_fmt', 'normals.nod')
test = NastranGUI()
test.on_load_geometry(bdf_filename, geometry_format='nastran', raise_error=True)
test.on_load_custom_results(out_filename=nod_filename, restype='Patran_nod')
def test_gui_axi(self):
"""tests axisymmetric elements"""
bdf_filename = os.path.join(MODEL_PATH, 'axisymmetric', 'model.bdf')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
def test_gui_ogs(self):
"""
tests ogs.op2:
- GridPointSurfaceStressesArray
"""
bdf_filename = os.path.join(MODEL_PATH, 'ogs', 'ogs.bdf')
op2_filename = os.path.join(MODEL_PATH, 'ogs', 'ogs.op2')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
test.load_nastran_results(op2_filename)
def test_gui_bdf_op2_other_23(self):
"""
tests ehbus69.op2:
- RealBush1DStressArray
- GridPointStressesVolumeDirectArray
- GridPointStressesVolumePrincipalArray
- GridPointStressesSurfaceDiscontinutiesArray
"""
# TODO: support these results...
bdf_filename = os.path.join(MODEL_PATH, 'other', 'ehbus69.bdf')
op2_filename = os.path.join(MODEL_PATH, 'other', 'ehbus69.op2')
test = NastranGUI()
test.load_nastran_geometry(bdf_filename)
test.load_nastran_results(op2_filename)
def test_gui_zona_model_1(self):
bdf_filename = os.path.join(MODEL_PATH, 'aero', 'f16_ma41.bdf')
test = NastranGUI()
test.log = SimpleLogger(level='error', encoding='utf-8', log_func=None)
test.load_nastran_geometry(bdf_filename)
def test_gui_zona_model_2(self):
bdf_file = get_zona_model()
test = NastranGUI()
test.log = SimpleLogger(level='error', encoding='utf-8', log_func=None)
test.load_nastran_geometry(bdf_file)
#def test_bottle(): # pragma: no cover
#"""
#Tests Nastran GUI loading
#"""
#test = NastranGUI()
#test.load_nastran_geometry('bottle_shell_w_holes_pmc.bdf', '')
#test.load_nastran_results('bottle_shell_w_holes_pmc.op2', '')
#keys = test.result_cases.keys()
#assert (1, 'Stress1', 1, 'centroid', '%.3f') in keys, keys
if __name__ == '__main__': # pragma: no cover
unittest.main()
|
pptutils.py
|
TomWildenhain/pptcc
| 448 |
138284
|
<reponame>TomWildenhain/pptcc<filename>pptutils.py
def hex_to_int(hex):
assert hex.startswith('0x')
hex = hex[2:]
total = 0
for h in hex:
total *= 16
total += '0123456789abcdef'.index(h)
return total
def byte_to_uint(byte):
total = 0
for c in byte:
total *= 2
if c == '1':
total += 1
return total
def byte_to_int(byte):
total = 0
for c in byte:
total *= 2
if c == '1':
total += 1
return total if byte[0] == '0' else total - 2**8
def word_to_int(word):
total = 0
for c in word:
total *= 2
if c == '1':
total += 1
return total if word[0] == '0' else total - 2**16
def dword_to_int(dword):
total = 0
for c in dword:
total *= 2
if c == '1':
total += 1
return total if dword[0] == '0' else total - 2**32
def word_to_uint(word):
total = 0
for c in word:
total *= 2
if c == '1':
total += 1
return total
def dword_to_uint(dword):
total = 0
for c in dword:
total *= 2
if c == '1':
total += 1
return total
def int_to_byte(x):
if x < 0:
x += 2**8
res = ''
for i in range(8):
if x % 2 == 1:
res = '1' + res
else:
res = '0' + res
x = x // 2
return res
def int_to_word(x):
if x < 0:
x += 2**16
res = ''
for i in range(16):
if x % 2 == 1:
res = '1' + res
else:
res = '0' + res
x = x // 2
return res
def uint_to_word(x):
res = ''
for i in range(16):
if x % 2 == 1:
res = '1' + res
else:
res = '0' + res
x = x // 2
return res
def uint_to_dword(x):
res = ''
for i in range(32):
if x % 2 == 1:
res = '1' + res
else:
res = '0' + res
x = x // 2
return res[:16], res[16:]
def int_to_dword(x):
if x < 0:
x += 2**32
res = ''
for i in range(32):
if x % 2 == 1:
res = '1' + res
else:
res = '0' + res
x = x // 2
return res[:16], res[16:]
def uint_to_byte(x):
res = ''
for i in range(8):
if x % 2 == 1:
res = '1' + res
else:
res = '0' + res
x = x // 2
return res
def split_on_spaces(s):
parts = s.replace('\t', ' ').split(' ')
parts = [p.strip() for p in parts if p.strip()]
return parts
def condense_spaces(s):
return ' '.join(split_on_spaces(s))
def pad_to_length(s, l):
assert l >= len(s)
return s + ' ' * (l - len(s))
|
insights/parsers/tests/test_ip_netns_exec_namespace_lsof.py
|
lhuett/insights-core
| 121 |
138294
|
from insights.parsers import SkipException
from insights.parsers import ip_netns_exec_namespace_lsof
from insights.parsers.ip_netns_exec_namespace_lsof import IpNetnsExecNamespaceLsofI
from insights.tests import context_wrap
import doctest
import pytest
IP_NETNS_EXEC_NAMESPACE_LSOF_I = """
COMMAND PID USER FD TYPE DEVICE SIZE/OFF NODE NAME
neutron-n 975 root 5u IPv4 6482691 0t0 TCP *:http (LISTEN)
""".strip()
EXCEPTION1 = """
""".strip()
EXCEPTION2 = """
COMMAND PID USER FD TYPE DEVICE SIZE/OFF NODE NAME
""".strip()
def test_ip_netns_exec_namespace_lsof():
data = IpNetnsExecNamespaceLsofI(context_wrap(IP_NETNS_EXEC_NAMESPACE_LSOF_I))
assert len(data.search(node="TCP")) == 1
assert len(data.search(command="neutron-n")) == 1
assert len(data.search(user="nobody")) == 0
assert data.data[0]["command"] == "neutron-n"
assert data.data[0].get("node") == "TCP"
assert [ps[2] for ps in data] == ["root"]
def test_ip_netns_exec_namespace_lsof_documentation():
env = {
"ns_lsof": IpNetnsExecNamespaceLsofI(context_wrap(IP_NETNS_EXEC_NAMESPACE_LSOF_I)),
}
failed, total = doctest.testmod(ip_netns_exec_namespace_lsof, globs=env)
assert failed == 0
def test_ip_netns_exec_namespace_lsof_exception1():
with pytest.raises(SkipException) as e:
IpNetnsExecNamespaceLsofI(context_wrap(EXCEPTION1))
assert "Empty file" in str(e)
def test_ip_netns_exec_namespace_lsof_exception2():
with pytest.raises(SkipException) as e:
IpNetnsExecNamespaceLsofI(context_wrap(EXCEPTION2))
assert "Useless data" in str(e)
|
build/apple/xcrun.py
|
zealoussnow/chromium
| 2,219 |
138308
|
<gh_stars>1000+
#!/usr/bin/python3
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Wrapper around xcrun adding support for --developer-dir parameter to set
the DEVELOPER_DIR environment variable, and for converting paths relative
to absolute (since this is required by most of the tool run via xcrun).
"""
import argparse
import os
import subprocess
import sys
def xcrun(command, developer_dir):
environ = dict(os.environ)
if developer_dir:
environ['DEVELOPER_DIR'] = os.path.abspath(developer_dir)
processed_args = ['/usr/bin/xcrun']
for arg in command:
if os.path.exists(arg):
arg = os.path.abspath(arg)
processed_args.append(arg)
process = subprocess.Popen(processed_args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True,
env=environ)
stdout, stderr = process.communicate()
sys.stdout.write(stdout)
if process.returncode:
sys.stderr.write(stderr)
sys.exit(process.returncode)
def main(args):
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument(
'--developer-dir',
help='path to developer dir to use for the invocation of xcrun')
parsed, remaining_args = parser.parse_known_args(args)
xcrun(remaining_args, parsed.developer_dir)
if __name__ == '__main__':
main(sys.argv[1:])
|
tests/test_session.py
|
jmsking/pykube
| 116 |
138330
|
<gh_stars>100-1000
"""
pykube.http unittests
"""
import copy
import logging
import tempfile
from pathlib import Path
from . import TestCase
BASE_CONFIG = {
"clusters": [
{"name": "test-cluster", "cluster": {"server": "http://localhost:8080"}}
],
"contexts": [
{
"name": "test-cluster",
"context": {"cluster": "test-cluster", "user": "test-user"},
}
],
"users": [{"name": "test-user", "user": {}}],
"current-context": "test-cluster",
}
_log = logging.getLogger(__name__)
class TestSession(TestCase):
def setUp(self):
self.config = copy.deepcopy(BASE_CONFIG)
def test_build_session_auth_provider(self):
"""Test that HTTPClient correctly parses the auth-provider config.
Observed in GKE with kubelet v1.3.
"""
self.config.update(
{
"users": [
{
"name": "test-user",
"user": {
"auth-provider": {
"config": {
"access-token": "<PASSWORD>",
"expiry": "2016-08-24T16:19:17.19878675-07:00",
},
},
},
},
]
}
)
gcloud_content = """
{
"client_id": "myclientid",
"client_secret": "myclientsecret",
"refresh_token": "<PASSWORD>",
"type": "authorized_user"
}
"""
_log.info("Built config: %s", self.config)
try:
tmp = Path(tempfile.mktemp())
with tmp.open("w") as f:
f.write(gcloud_content)
# TODO: this no longer works due to refactoring, GCP session handling is now done in KubernetesHTTPAdapter
# session = pykube.session.GCPSession(pykube.KubeConfig(doc=self.config), tmp)
# self.assertEquals(session.oauth.token['access_token'], 'abc')
# self.assertEquals(session.oauth.token['refresh_token'], 'myrefreshtoken')
# self.assertEquals(session.credentials.get('client_id'), 'myclientid')
# self.assertEquals(session.credentials.get('client_secret'), 'myclientsecret')
finally:
if tmp.exists():
tmp.unlink()
|
test/rpc-tests/dao/when/iSubmitAPaymentRequest.py
|
chasingkirkjufell/navcoin-core
| 103 |
138346
|
<gh_stars>100-1000
#!/usr/bin/env python3
# Copyright (c) 2019 The Navcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Expanded helper routines for regression testing of the NAV Coin community fund
#
from test_framework.util import *
def whenISubmitAPaymentRequest(node=None,
hash=None,
amount=None,
description=None,
expectSuccess=True):
if (node is None
or hash is None
or amount is None
or description is None
or (expectSuccess != True and expectSuccess != False)):
print('whenISubmitAPaymentRequest: invalid parameters')
assert(False)
try:
hash = node.createpaymentrequest(hash, amount, description)["hash"]
except JSONRPCException as e:
if (expectSuccess == True):
print(e.error)
assert(False)
assert(e.error["code"] == -3)
assert(e.error["message"] == "Proposal has not been accepted.")
return
slow_gen(node, 1)
paymentRequest = node.getpaymentrequest(hash)
assert(paymentRequest["hash"] == hash)
assert(paymentRequest["blockHash"] == node.getbestblockhash())
assert(satoshi_round(paymentRequest["requestedAmount"]) == satoshi_round(amount))
assert(paymentRequest["description"] == description)
assert(paymentRequest["status"] == 'pending')
assert(paymentRequest["state"] == 0)
assert(paymentRequest["votingCycle"] == 0)
assert(paymentRequest["votesNo"] == 0)
assert(paymentRequest["votesYes"] == 0)
return hash
|
oatomobile/tf/types.py
|
jannikwagner/oatomobile
| 152 |
138348
|
<filename>oatomobile/tf/types.py
# Copyright 2020 The OATomobile Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Type definitions used in baselines."""
from typing import Any
from typing import Iterable
from typing import Mapping
from typing import Union
import numpy as np
import tensorflow as tf
from oatomobile import types
Shape = types.Shape
Tensor = tf.Tensor
Array = Union[types.Scalar, np.ndarray, Tensor]
NestedArray = Union[Array, Iterable["NestedArray"], Mapping[Any, "NestedArray"]]
NestedTensor = Union[Tensor, Iterable["NestedTensor"], Mapping[Any,
"NestedTensor"]]
|
examples/acrobot_a2c.py
|
PuzeLiu/mushroom-rl
| 344 |
138354
|
<filename>examples/acrobot_a2c.py
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as F
from mushroom_rl.algorithms.actor_critic import A2C
from mushroom_rl.core import Core, Logger
from mushroom_rl.environments import Gym
from mushroom_rl.policy import BoltzmannTorchPolicy
from mushroom_rl.approximators.parametric.torch_approximator import *
from mushroom_rl.utils.dataset import compute_J
from mushroom_rl.utils.parameters import Parameter
from tqdm import trange
class Network(nn.Module):
def __init__(self, input_shape, output_shape, n_features, **kwargs):
super(Network, self).__init__()
n_input = input_shape[-1]
n_output = output_shape[0]
self._h1 = nn.Linear(n_input, n_features)
self._h2 = nn.Linear(n_features, n_features)
self._h3 = nn.Linear(n_features, n_output)
nn.init.xavier_uniform_(self._h1.weight,
gain=nn.init.calculate_gain('relu'))
nn.init.xavier_uniform_(self._h2.weight,
gain=nn.init.calculate_gain('relu'))
nn.init.xavier_uniform_(self._h3.weight,
gain=nn.init.calculate_gain('linear'))
def forward(self, state, **kwargs):
features1 = torch.relu(self._h1(torch.squeeze(state, 1).float()))
features2 = torch.relu(self._h2(features1))
a = self._h3(features2)
return a
def experiment(n_epochs, n_steps, n_steps_per_fit, n_step_test):
np.random.seed()
logger = Logger(A2C.__name__, results_dir=None)
logger.strong_line()
logger.info('Experiment Algorithm: ' + A2C.__name__)
# MDP
horizon = 1000
gamma = 0.99
gamma_eval = 1.
mdp = Gym('Acrobot-v1', horizon, gamma)
# Policy
policy_params = dict(
n_features=32,
use_cuda=False
)
beta = Parameter(1e0)
pi = BoltzmannTorchPolicy(Network,
mdp.info.observation_space.shape,
(mdp.info.action_space.n,),
beta=beta,
**policy_params)
# Agent
critic_params = dict(network=Network,
optimizer={'class': optim.RMSprop,
'params': {'lr': 1e-3,
'eps': 1e-5}},
loss=F.mse_loss,
n_features=32,
batch_size=64,
input_shape=mdp.info.observation_space.shape,
output_shape=(1,))
alg_params = dict(actor_optimizer={'class': optim.RMSprop,
'params': {'lr': 1e-3,
'eps': 3e-3}},
critic_params=critic_params,
ent_coeff=0.01
)
agent = A2C(mdp.info, pi, **alg_params)
# Algorithm
core = Core(agent, mdp)
core.learn(n_steps=n_steps, n_steps_per_fit=n_steps_per_fit)
# RUN
dataset = core.evaluate(n_steps=n_step_test, render=False)
J = compute_J(dataset, gamma_eval)
logger.epoch_info(0, J=np.mean(J))
for n in trange(n_epochs):
core.learn(n_steps=n_steps, n_steps_per_fit=n_steps_per_fit)
dataset = core.evaluate(n_steps=n_step_test, render=False)
J = compute_J(dataset, gamma_eval)
logger.epoch_info(n+1, J=np.mean(J))
logger.info('Press a button to visualize acrobot')
input()
core.evaluate(n_episodes=5, render=True)
if __name__ == '__main__':
experiment(n_epochs=40, n_steps=1000, n_steps_per_fit=5, n_step_test=2000)
|
ulmo/ncdc/cirs/__init__.py
|
sblack-usu/ulmo
| 123 |
138378
|
<reponame>sblack-usu/ulmo
"""
`National Climatic Data Center`_ `Climate Index Reference Sequential
(CIRS)`_ drought dataset
.. _National Climatic Data Center: http://www.ncdc.noaa.gov
.. _Climate Index Reference Sequential (CIRS): http://www1.ncdc.noaa.gov/pub/data/cirs/
"""
from .core import get_data
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.