content
stringlengths 0
894k
| type
stringclasses 2
values |
---|---|
from pathlib import Path
from tempfile import NamedTemporaryFile
import numpy as np
import pandas as pd
import pytest
from etna.datasets import generate_ar_df
@pytest.fixture
def base_pipeline_yaml_path():
tmp = NamedTemporaryFile("w")
tmp.write(
"""
_target_: etna.pipeline.Pipeline
horizon: 4
model:
_target_: etna.models.CatBoostModelMultiSegment
transforms:
- _target_: etna.transforms.LinearTrendTransform
in_column: target
- _target_: etna.transforms.SegmentEncoderTransform
"""
)
tmp.flush()
yield Path(tmp.name)
tmp.close()
@pytest.fixture
def base_pipeline_omegaconf_path():
tmp = NamedTemporaryFile("w")
tmp.write(
"""
_target_: etna.pipeline.Pipeline
horizon: 4
model:
_target_: etna.models.CatBoostModelMultiSegment
transforms:
- _target_: etna.transforms.LinearTrendTransform
in_column: target
- _target_: etna.transforms.SegmentEncoderTransform
- _target_: etna.transforms.LagTransform
in_column: target
lags: "${shift:${horizon},[1, 2, 4]}"
"""
)
tmp.flush()
yield Path(tmp.name)
tmp.close()
@pytest.fixture
def base_timeseries_path():
df = generate_ar_df(periods=100, start_time="2021-06-01", n_segments=2)
tmp = NamedTemporaryFile("w")
df.to_csv(tmp, index=False)
tmp.flush()
yield Path(tmp.name)
tmp.close()
@pytest.fixture
def base_timeseries_exog_path():
df_regressors = pd.DataFrame(
{
"timestamp": list(pd.date_range("2021-06-01", periods=120)) * 2,
"regressor_1": np.arange(240),
"regressor_2": np.arange(240) + 5,
"segment": ["segment_0"] * 120 + ["segment_1"] * 120,
}
)
tmp = NamedTemporaryFile("w")
df_regressors.to_csv(tmp, index=False)
tmp.flush()
yield Path(tmp.name)
tmp.close()
@pytest.fixture
def base_forecast_omegaconf_path():
tmp = NamedTemporaryFile("w")
tmp.write(
"""
prediction_interval: true
quantiles: [0.025, 0.975]
n_folds: 3
"""
)
tmp.flush()
yield Path(tmp.name)
tmp.close()
|
python
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: message.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='message.proto',
package='com.akolar.maxaltitude',
syntax='proto3',
serialized_options=None,
serialized_pb=_b('\n\rmessage.proto\x12\x16\x63om.akolar.maxaltitude\"\xed\x07\n\x0e\x41ircraftBeacon\x12\x14\n\x0cmessage_from\x18\x01 \x01(\t\x12\x0e\n\x06\x64\x65stto\x18\x02 \x01(\t\x12\x11\n\ttimestamp\x18\x03 \x01(\t\x12\x10\n\x08latitude\x18\x04 \x01(\x01\x12\x11\n\tlongitude\x18\x05 \x01(\x01\x12\x10\n\x08\x61ltitude\x18\x16 \x01(\x02\x12\x10\n\x08receiver\x18\x06 \x01(\t\x12\x0b\n\x03uid\x18\x07 \x01(\t\x12\x0f\n\x07stealth\x18\x08 \x01(\x08\x12\x14\n\x0c\x64o_not_track\x18\t \x01(\x08\x12\x13\n\x0braw_message\x18\x13 \x01(\t\x12\x0f\n\x07relayer\x18\n \x01(\t\x12\x0f\n\x07heading\x18\x0b \x01(\r\x12\x14\n\x0cground_speed\x18\x0c \x01(\x02\x12\x16\n\x0evertical_speed\x18\r \x01(\x02\x12\x11\n\tturn_rate\x18\x0e \x01(\x02\x12\x1d\n\x15signal_to_noise_ratio\x18\x0f \x01(\x02\x12\x13\n\x0b\x65rror_count\x18\x10 \x01(\r\x12\x13\n\x0b\x66req_offset\x18\x11 \x01(\x02\x12J\n\raircraft_type\x18\x14 \x01(\x0e\x32\x33.com.akolar.maxaltitude.AircraftBeacon.AircraftType\x12H\n\x0c\x61\x64\x64ress_type\x18\x15 \x01(\x0e\x32\x32.com.akolar.maxaltitude.AircraftBeacon.AddressType\x12\x43\n\x0bgps_quality\x18\x12 \x01(\x0b\x32..com.akolar.maxaltitude.AircraftBeacon.Quality\x1a/\n\x07Quality\x12\x12\n\nhorizontal\x18\x01 \x01(\r\x12\x10\n\x08vertical\x18\x02 \x01(\r\"\x80\x02\n\x0c\x41ircraftType\x12\x14\n\x10UNKNOWN_AIRPLANE\x10\x00\x12\n\n\x06GLIDER\x10\x01\x12\r\n\tTOW_PLANE\x10\x02\x12\x19\n\x15HELICOPTER_ROTORCRAFT\x10\x03\x12\r\n\tPARACHUTE\x10\x04\x12\x0e\n\nDROP_PLANE\x10\x05\x12\x0f\n\x0bHANG_GLIDER\x10\x06\x12\x0e\n\nPARAGLIDER\x10\x07\x12\x14\n\x10POWERED_AIRCRAFT\x10\x08\x12\x10\n\x0cJET_AIRCRAFT\x10\t\x12\x07\n\x03UFO\x10\n\x12\n\n\x06\x42\x41LOON\x10\x0b\x12\x0b\n\x07\x41IRSHIP\x10\x0c\x12\x07\n\x03UAV\x10\r\x12\x11\n\rSTATIC_OBJECT\x10\x0f\"U\n\x0b\x41\x64\x64ressType\x12\x13\n\x0fUNKNOWN_ADDRESS\x10\x00\x12\x08\n\x04ICAO\x10\x01\x12\t\n\x05\x46LARM\x10\x02\x12\x0f\n\x0bOGN_TRACKER\x10\x03\x12\x0b\n\x07NAVITER\x10\x04\x62\x06proto3')
)
_AIRCRAFTBEACON_AIRCRAFTTYPE = _descriptor.EnumDescriptor(
name='AircraftType',
full_name='com.akolar.maxaltitude.AircraftBeacon.AircraftType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN_AIRPLANE', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GLIDER', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TOW_PLANE', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='HELICOPTER_ROTORCRAFT', index=3, number=3,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PARACHUTE', index=4, number=4,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DROP_PLANE', index=5, number=5,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='HANG_GLIDER', index=6, number=6,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PARAGLIDER', index=7, number=7,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='POWERED_AIRCRAFT', index=8, number=8,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='JET_AIRCRAFT', index=9, number=9,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='UFO', index=10, number=10,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BALOON', index=11, number=11,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='AIRSHIP', index=12, number=12,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='UAV', index=13, number=13,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='STATIC_OBJECT', index=14, number=15,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=704,
serialized_end=960,
)
_sym_db.RegisterEnumDescriptor(_AIRCRAFTBEACON_AIRCRAFTTYPE)
_AIRCRAFTBEACON_ADDRESSTYPE = _descriptor.EnumDescriptor(
name='AddressType',
full_name='com.akolar.maxaltitude.AircraftBeacon.AddressType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN_ADDRESS', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ICAO', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FLARM', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='OGN_TRACKER', index=3, number=3,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NAVITER', index=4, number=4,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=962,
serialized_end=1047,
)
_sym_db.RegisterEnumDescriptor(_AIRCRAFTBEACON_ADDRESSTYPE)
_AIRCRAFTBEACON_QUALITY = _descriptor.Descriptor(
name='Quality',
full_name='com.akolar.maxaltitude.AircraftBeacon.Quality',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='horizontal', full_name='com.akolar.maxaltitude.AircraftBeacon.Quality.horizontal', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='vertical', full_name='com.akolar.maxaltitude.AircraftBeacon.Quality.vertical', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=654,
serialized_end=701,
)
_AIRCRAFTBEACON = _descriptor.Descriptor(
name='AircraftBeacon',
full_name='com.akolar.maxaltitude.AircraftBeacon',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='message_from', full_name='com.akolar.maxaltitude.AircraftBeacon.message_from', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='destto', full_name='com.akolar.maxaltitude.AircraftBeacon.destto', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='timestamp', full_name='com.akolar.maxaltitude.AircraftBeacon.timestamp', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='latitude', full_name='com.akolar.maxaltitude.AircraftBeacon.latitude', index=3,
number=4, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='longitude', full_name='com.akolar.maxaltitude.AircraftBeacon.longitude', index=4,
number=5, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='altitude', full_name='com.akolar.maxaltitude.AircraftBeacon.altitude', index=5,
number=22, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='receiver', full_name='com.akolar.maxaltitude.AircraftBeacon.receiver', index=6,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='uid', full_name='com.akolar.maxaltitude.AircraftBeacon.uid', index=7,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='stealth', full_name='com.akolar.maxaltitude.AircraftBeacon.stealth', index=8,
number=8, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='do_not_track', full_name='com.akolar.maxaltitude.AircraftBeacon.do_not_track', index=9,
number=9, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='raw_message', full_name='com.akolar.maxaltitude.AircraftBeacon.raw_message', index=10,
number=19, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='relayer', full_name='com.akolar.maxaltitude.AircraftBeacon.relayer', index=11,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='heading', full_name='com.akolar.maxaltitude.AircraftBeacon.heading', index=12,
number=11, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ground_speed', full_name='com.akolar.maxaltitude.AircraftBeacon.ground_speed', index=13,
number=12, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='vertical_speed', full_name='com.akolar.maxaltitude.AircraftBeacon.vertical_speed', index=14,
number=13, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='turn_rate', full_name='com.akolar.maxaltitude.AircraftBeacon.turn_rate', index=15,
number=14, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='signal_to_noise_ratio', full_name='com.akolar.maxaltitude.AircraftBeacon.signal_to_noise_ratio', index=16,
number=15, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='error_count', full_name='com.akolar.maxaltitude.AircraftBeacon.error_count', index=17,
number=16, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='freq_offset', full_name='com.akolar.maxaltitude.AircraftBeacon.freq_offset', index=18,
number=17, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='aircraft_type', full_name='com.akolar.maxaltitude.AircraftBeacon.aircraft_type', index=19,
number=20, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='address_type', full_name='com.akolar.maxaltitude.AircraftBeacon.address_type', index=20,
number=21, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='gps_quality', full_name='com.akolar.maxaltitude.AircraftBeacon.gps_quality', index=21,
number=18, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_AIRCRAFTBEACON_QUALITY, ],
enum_types=[
_AIRCRAFTBEACON_AIRCRAFTTYPE,
_AIRCRAFTBEACON_ADDRESSTYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=42,
serialized_end=1047,
)
_AIRCRAFTBEACON_QUALITY.containing_type = _AIRCRAFTBEACON
_AIRCRAFTBEACON.fields_by_name['aircraft_type'].enum_type = _AIRCRAFTBEACON_AIRCRAFTTYPE
_AIRCRAFTBEACON.fields_by_name['address_type'].enum_type = _AIRCRAFTBEACON_ADDRESSTYPE
_AIRCRAFTBEACON.fields_by_name['gps_quality'].message_type = _AIRCRAFTBEACON_QUALITY
_AIRCRAFTBEACON_AIRCRAFTTYPE.containing_type = _AIRCRAFTBEACON
_AIRCRAFTBEACON_ADDRESSTYPE.containing_type = _AIRCRAFTBEACON
DESCRIPTOR.message_types_by_name['AircraftBeacon'] = _AIRCRAFTBEACON
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
AircraftBeacon = _reflection.GeneratedProtocolMessageType('AircraftBeacon', (_message.Message,), dict(
Quality = _reflection.GeneratedProtocolMessageType('Quality', (_message.Message,), dict(
DESCRIPTOR = _AIRCRAFTBEACON_QUALITY,
__module__ = 'message_pb2'
# @@protoc_insertion_point(class_scope:com.akolar.maxaltitude.AircraftBeacon.Quality)
))
,
DESCRIPTOR = _AIRCRAFTBEACON,
__module__ = 'message_pb2'
# @@protoc_insertion_point(class_scope:com.akolar.maxaltitude.AircraftBeacon)
))
_sym_db.RegisterMessage(AircraftBeacon)
_sym_db.RegisterMessage(AircraftBeacon.Quality)
# @@protoc_insertion_point(module_scope)
|
python
|
##########################################################################
# MediPy - Copyright (C) Universite de Strasbourg
# Distributed under the terms of the CeCILL-B license, as published by
# the CEA-CNRS-INRIA. Refer to the LICENSE file or to
# http://www.cecill.info/licences/Licence_CeCILL-B_V1-en.html
# for details.
##########################################################################
def enum(name, *members, **named):
""" Create an enumerated type called ``name`` with given ``members``: ::
>>> Colors = medipy.base.enum("Colors", "red", "green", "blue")
>>> color = Colors.red
By default, the members are integers, but their value can be specified
using named parameters. In this case, the values must be storable in a
dictionary: ::
>>> Colors = medipy.base.enum("Colors", "red", green="green", blue=3.14)
>>> Colors.red
0
>>> Colors.green
'green'
>>> Colors.blue
3.14
The name of members can be retrieved either in bulk or using their
values: ::
>>> Colors.members
['red', 'green', 'blue']
>>> Colors[0]
'red'
>>> Colors["green"]
'green'
>>> Colors[3.14]
'blue'
The enumerated type is derived of :class:`~medipy.base.Enum`: ::
>>> isinstance(Colors, medipy.base.Enum)
True
"""
enums = dict(zip(members, range(len(members))), **named)
reverse = dict((value, key) for key, value in enums.iteritems())
new_type = Enum(name, (), enums)
new_type._reverse = reverse
return new_type
class Enum(type) :
""" Base class for all enumerated types. This class should not be used
directly
"""
def __init__(cls, name, bases, dct):
type.__init__(cls, name, bases, dct)
@property
def members(self) :
return self._reverse.values()
def __getitem__(self, item) :
""" Return the name of a member given its value.
"""
return self._reverse[item]
|
python
|
# # # # # # # # # # # # #
# #
# #
# #
# Pi Radio #
# By #
# Jackson #
# Hoggard #
# (c)2018 #
# #
# #
# #
# # # # # # # # # # # # #
import os, argparse, random
from colorama import Fore, Back, Style
from multiprocessing import Process
parser = argparse.ArgumentParser(prog='python PiRadio.py', description='Broadcasts WAV/MP3 file over FM using RPI GPIO #4 pin.')
parser.add_argument("-s", "--song_file", help="Set song to play")
parser.add_argument("-f", "--frequency", help="Set TX frequency. Acceptable range 87.1-108.2", type=float)
arg = parser.parse_args()
playlist = []
frequency = 0
def start():
os.system("clear")
print("Starting Pi Radio")
if arg.frequency is None:
frequency = 0 #raw_input("Enter the frequency (Press Enter/Return for 99.9MHz): ")
if frequency == 0:
frequency = '99.9'
elif 87.1 >= arg.frequency >= 108.2:
print "Frequency is out of range.";exit()
else:
frequency = str(arg.frequency)
print ("\nFrequency set to " + frequency)
makePlaylist()
os.system("clear")
begin()
print "Songs in Playlist:\n" + Fore.GREEN + "______________________________\n"
i = 0
while i < len(playlist):
print Style.RESET_ALL + playlist[i]
i += 1
print Fore.GREEN + "______________________________"
print Fore.WHITE + "Type Choice Number:\n1. Shuffle Play\n2. Talk\n3. Exit\n\n\n"
userInput()
def begin():
print(Fore.RED + Back.WHITE + '# PiRadio Station v1.1 #')
print(Style.RESET_ALL)
def play(song):
print Style.RESET_ALL + "\n"
arg.song_file = song
try:
if ".mp3" in arg.song_file.lower():
os.system("ffmpeg -i "+arg.song_file+" "+"-f s16le -ar 22.05k -ac 1 - | sudo ./fm_transmitter -f"+" "+frequency+" "+" - ")
elif ".wav" in arg.song_file.lower():
os.system("sudo ./fm_transmitter -f"+" "+ "99.9" +" " + "/home/pi/fm_transmitter/music/" + arg.song_file)
else:
print "That file extension is not supported."
print "File name provided: %s" %arg.song_file
raise IOError
except Exception:
print "Something went wrong. Halting."; exit()
except IOError:
print "There was an error regarding file selection. Halting."; exit()
def makePlaylist():
for root, dirs, files, in os.walk("/home/pi/fm_transmitter/music"):
for file in files:
if file.endswith(".wav"):
#print(file)
playlist.append(file)
def playSongs():
print Style.RESET_ALL + "\n"
currentsong = ''
i = 0
run = True
while run == True:
i = random.randint(0, len(playlist) - 1)
print Fore.RED + Back.WHITE + "Now Playing: " + playlist[i] + "\n"
print Style.RESET_ALL
p1 = Process(target = play(playlist[i]))
p1.start()
p2 = Process(target = checkForQuit)
p2.start()
def talk():
print("Still testing. Please choose a different option")
userInput()
def userInput():
choice = input(" > ")
processInput(choice)
def processInput(c):
if(c == 1): playSongs()
if(c == 2): talk()
if(c == 3): exit()
else:
userInput()
def checkForQuit():
if(keyboard.is_pressed('q')):
p1.stop()
start()
|
python
|
import struct, base64
import numpy as np
mdmr_dtypes = { 0: 's', 1: 'I', 2: 'q', 3: 'f', 4: 'd'}
# output of test_encode
b64data = """TURNUjAwMDECAAAABAAAAKUAAAAAAAAABAAAADcAAAAAAAAAAAAAAAAAAAB47j1cctzBv2t9kdCWc8G/fSJPkq6ZwL/PSe8bX3u+v6TC2EKQg7q/6iEa3UHstL8YQznRrkKqv4NuL2mM1oG/GEM50a5Cqj96Nqs+V1vBPxR5knTN5M8/yol2FVJ+2D+hSs0eaAXgPzaTb7a5MeI/+5EiMqzi4j9YkGYsms7iP4WUn1T7dOI/Arfu5qkO4j+OQLyuX7DhP8dLN4lBYOE/5pZWQ+Ie4T8IyQImcOvgP065wrtcxOA/K6T8pNqn4D9OtKuQ8pPgP/fHe9XKhOA/ba0vEtpy4D9cOBCSBUzgP/TDCOHRxt8/8S4X8Z2Y3T/MYmLzcW3YP4j029eBc84/IHu9++O9sj9XW7G/7J6sv/SmIhXGFsK/CoUIOIQqyb+6g9iZQufNv+o+AKlNnNC/38Mlx53S0b/njCjtDb7Sv9mZQuc1dtO/YDyDhv4J1L8XnwJgPIPUv7lTOlj/59S/eCgK9Ik81b9sBOJ1/YLVv3lA2ZQrvNW/2CrB4nDm1b/M0eP3Nv3Vv6DDfHkB9tW/YFlpUgq61b9+Oh4zUBnVv+XVOQZkr9O/AAAAAAAAAAAAAAAAAAAAABcrajANw+6/xty1hHzQ7r8IyQImcOvuv/lmmxvTE++/fm/Tn/1I778OT6+UZYjvv88sCVBTy++/AAAAAAAA8L+XcymuKvvvvyvB4nDmV++/28TJ/Q5F7b+fWRKgppbov5CDEmba/uC/OWItPgXA0L/e5SK+E7Oev0ZfQZqxaMI/SOF6FK5H0T9VwaikTkDXPxPyQc9m1ds//Yf029eB3z9eaK7TSEvhP76HS447peI/3xrYKsHi4z/BqKROQBPlP4e/JmvUQ+Y/csRafAqA5z/jUwCMZ9DoP6OvIM1YNOo/7ncoCvSJ6z/ye5v+7EfsP49TdCSX/+o/FjCBW3fz5T/rxVBOtKvcP2Hgufdwyc0/V5V9VwT/sz+NeohGdxCbv9Sa5h2n6Li/NXugFRiywr+HM7+aAwTHv0ku/yH99sm//vFetTLhy7/a4a/JGvXMv482jliLT82/ecxAZfz7zL9GzsKedvjLv/a0w1+TNcq/NJ2dDI6Sx7+bVZ+rrdjDv+KS407pYL2/HQOy17s/rr+pwTQMHxGTP/0wQni0ccA/QkP/BBcr0j8AAAAAAAAAAAAAAAAAAAAAN2xblNkgz78R34lZL4bOv9vEyf0ORc2/+GuyRj1Ey7+ZDTLJyFnIv3E486s5QMS/YJM16iEavb/ZQpCDEmaqvw3DR8SUSKI/Cty6m6c6xD9rgqj7AKTUP8cpOpLLf+A/qkNuhhvw5T+l2qfjMQPpP61M+KV+3uk/Ksb5m1CI6T/RBfUtc7roPw7bFmU2yOc/5bM8D+7O5j8vaYzWUdXlP2JnCp3X2OQ/io7k8h/S4z+7fsFu2LbiP9UJaCJseOE/UtUEUfcB4D+CrRIsDmfcP4YgByXMtNc/MlpHVRNE0T+CHJQw0/a/P5BJRs7Cnra/+Um1T8dj2L825QrvchHmv8eA7PXuj+y/zH9Iv30d778dPX5v05/vv2LWi6GcaO+/xuHMr+YA77/V52or9pfuvz86deWzPO6/bcX+snvy7b9FEr2MYrntv1WkwthCkO2/S3ZsBOJ17b9+HThnRGntv7bbLjTXae2/u/JZngd37b+OklfnGJDtv6TH7236s+2/HcnlP6Tf7b+XytsRTgvuvxY1mIbhI+6/CoDxDBr67b+V8e8zLhztvwAAAAAAAAAAAAAAAAAAAAAXK2owDcPuv8bctYR80O6/CMkCJnDr7r/5Zpsb0xPvv35v05/9SO+/Dk+vlGWI77/PLAlQU8vvvwAAAAAAAPC/l3Mprir7778rweJw5lfvv9vEyf0ORe2/n1kSoKaW6L+QgxJm2v7gvzliLT4FwNC/3uUivhOznr9GX0GasWjCP0jhehSuR9E/VcGopE5A1z8T8kHPZtXbP/2H9NvXgd8/Xmiu00hL4T++h0uOO6XiP98a2CrB4uM/waikTkAT5T+HvyZr1EPmP3LEWnwKgOc/41MAjGfQ6D+jryDNWDTqP+53KAr0ies/8nub/uxH7D+PU3Qkl//qPxYwgVt38+U/68VQTrSr3D9h4Ln3cMnNP1eVfVcE/7M/jXqIRncQm7/UmuYdp+i4vzV7oBUYssK/hzO/mgMEx79JLv8h/fbJv/7xXrUy4cu/2uGvyRr1zL+PNo5Yi0/Nv3nMQGX8+8y/Rs7Cnnb4y7/2tMNfkzXKvzSdnQyOkse/m1Wfq63Yw7/ikuNO6WC9vx0Dste7P66/qcE0DB8Rkz/9MEJ4tHHAP0JD/wQXK9I/AAAAAAAAAAA="""
strdata = base64.b64decode(b64data)
def parse(data):
ptr = 0
hdr_st = '=' + ''.join(('4s', '4s', 'I'))
hdr_size = struct.calcsize(hdr_st)
magic,version,n_blocks = struct.unpack(hdr_st, data[:hdr_size])
ptr += hdr_size
blocks = []
for block_i in range(n_blocks):
block_fmt = '=iq'
block_size = struct.calcsize(block_fmt)
dtype, length = struct.unpack(block_fmt, data[ptr:ptr+block_size])
blocks.append((dtype,length))
base_offset = hdr_size + (struct.calcsize(block_fmt) * n_blocks)
output_data = []
for block_i, blockinfo in enumerate(blocks):
ptr = base_offset
dtype, length = blockinfo
dtype_fmt = mdmr_dtypes[dtype]
format = '{}{}'.format(length, dtype_fmt)
block_size = struct.calcsize(format)
block_data = struct.unpack(format, data[ptr:ptr+block_size])
output_data.append(np.array(block_data))
ptr += block_size
return (magic, version, n_blocks, output_data)
if __name__ == '__main__':
(magic, version, n_blocks, data) = parse(strdata)
print("magic: ", magic)
print("version: ", version)
print("n_blocks: ", n_blocks)
print("data (arrays): ")
for i,array in enumerate(data):
print("### array number:", i)
print(array)
|
python
|
from app import app
from config import MONGO_URI, client
from flask import session, request, jsonify
import pymongo
import requests
import json
from datetime import datetime
# 連進MongoDB
db = client['aiboxdb']
@app.route('/api/android/getRemind', methods=['GET'])
def android_get_remind():
'''取得沒有user_nickname(未登入)的提醒資料, 且會過濾掉過期的提醒資料
Returns:
{
'status': '200'->取得成功; '404'->取得失敗
'result': 為登入提醒的資料(沒資料為空list); 錯誤訊息
'msg': 訊息
}
'''
try:
remind_collect = db['reminder']
user_remind_doc = remind_collect.find({'user_nickname': ''})
except Exception as err:
resp = {
'status': '404',
'result': err,
'msg': '取得未登入提醒資料失敗'
}
return jsonify(resp)
result_list = []
for item in user_remind_doc:
if datetime.strptime(item['remind_time'], '%Y-%m-%d %H:%M:%S') > datetime.today():
obj = {
'remind_time': item['remind_time'],
'dosomething': item['dosomething']
}
result_list.append(obj)
resp = {
'status': '200',
'result': result_list,
'msg': '取得未登入提醒資料成功'
}
return jsonify(resp)
@app.route('/api/android/getAllLocation', methods=['GET'])
def android_get_all_location():
'''取得所有查詢的地點
Returns:
{
'status': '200'->取得成功; '404'->取得失敗
'result': 所有查詢地點(沒有則為空list); 錯誤訊息
'msg': 訊息
}
'''
try:
location_collect = db['location']
location_doc = location_collect.find().sort("_id", 1)
except Exception as err:
resp = {
'status': '404',
'result': err,
'msg': '取得所有查詢地點失敗'
}
return jsonify(resp)
result_list = []
for item in location_doc:
obj = {
'location': item['location'],
'region': item['region'],
'number': str(item['number']),
'unit': item['unit'],
'date': item['date']
}
result_list.append(obj)
resp = {
'status': '200',
'result': result_list,
'msg': '取得所有查詢地點成功'
}
return jsonify(resp)
@app.route('/api/android/getLastLocation', methods=['GET'])
def android_get_last_location():
'''取得最後一個(最新)查詢的地點
Returns:
{
'status': '200'->取得成功; '404'->取得失敗
'result': 最新的查詢地點(沒有則為空物件{}); 錯誤訊息
'msg': 訊息
}
'''
try:
location_collect = db['location']
location_doc = location_collect.find().sort("_id", -1).limit(1) # 找_id最大的
except Exception as err:
resp = {
'status': '404',
'result': err,
'msg': '取得最新的查詢地點失敗'
}
return jsonify(resp)
obj= {}
for item in location_doc:
obj = {
'location': item['location'],
'region': item['region'],
'number': str(item['number']),
'unit': item['unit'],
'date': item['date']
}
resp = {
'status': '200',
'result': obj,
'msg': '取得最後一個(最新)查詢地點成功'
}
return jsonify(resp)
@app.route('/api/android/getWeather', methods=['GET'])
def android_get_weather():
'''取得某城市的天氣狀況
Params:
city: 城市名
Returns:
{
'status': '200'->取得成功; '404'->取得失敗
'result': 取得某城市的天氣狀況; 錯誤訊息
'msg': 訊息
}
'''
city = request.args.get('city')
print(city)
has_city = False
city_transfer = {
'新北': '新北市',
'新北市': '新北市',
'台北': '臺北市',
'台北市': '臺北市',
'台中': '臺中市',
'台中市': '臺中市',
'台南': '臺南市',
'台南市': '臺南市'
}
for key, values in city_transfer.items():
if city == key:
city = values
weather = {
'Wx': '', # 天氣現象
'MaxT': '', # 最高溫度
'MinT': '', # 最低溫度
'CI': '', # 舒適度
'PoP': '', # 降雨機率
'info': '' # 讓app可以換相對應的背景
}
# 政府開放資料, 天氣api
resp = requests.get('https://opendata.cwb.gov.tw/api/v1/rest/datastore/F-C0032-001?Authorization=rdec-key-123-45678-011121314')
data = json.loads(resp.text)
records = data['records']['location'] # 各地區的預報紀錄
for record in records:
if record['locationName'] == city:
has_city = True
elements = record['weatherElement']
for element in elements:
if element['elementName'] == 'Wx': # 天氣現象
weather['Wx'] = element['time'][-1]['parameter']['parameterName']
if element['elementName'] == 'MaxT': # 最高溫度
weather['MaxT'] = element['time'][-1]['parameter']['parameterName']
if element['elementName'] == 'MinT': # 最低溫度
weather['MinT'] = element['time'][-1]['parameter']['parameterName']
if element['elementName'] == 'CI': # 舒適度
weather['CI'] = element['time'][-1]['parameter']['parameterName']
if element['elementName'] == 'PoP': # 降雨機率
weather['PoP'] = element['time'][-1]['parameter']['parameterName']
# app天氣背景資訊
if '雨' in weather['Wx']:
weather['info'] = 'rainy'
elif '晴' in weather or '熱' in weather:
weather['info'] = 'sunny'
elif '雲' in weather['Wx']:
weather['info'] = 'cloudy'
else:
weather['info'] = 'cloudy'
if has_city is True:
resp = {
'status': '200',
'result': weather,
'msg': '取得某城市的天氣狀況成功'
}
return jsonify(resp)
else:
resp = {
'status': '404',
'result': '沒有此城市',
'msg': '取得某城市的天氣狀況失敗'
}
return jsonify(resp)
@app.route('/api/android/getHospital', methods=['GET'])
def android_get_hospital():
'''取得醫院的資訊
Params:
hospital: 醫院名稱
Returns:
{
'status': '200'->取得成功; '404'->取得失敗
'result': 取得醫院的資訊; 錯誤訊息
'msg': 訊息
}
'''
hospital = request.args.get('hospital')
db = client['aiboxdb']
hospital_collect = db['hospital']
hospital_doc = hospital_collect.find_one({'機構名稱': {'$regex': hospital}})
if hospital_doc != None:
hospital_doc.pop('_id')
resp = {
'status': '200',
'result': hospital_doc,
'msg': '取得醫院資訊成功'
}
return jsonify(resp)
else:
resp = {
'status': '404',
'result': '沒有此醫院',
'msg': '取得醫院資訊失敗'
}
return jsonify(resp)
@app.route('/api/android/getECPhone', methods=['GET'])
def android_get_ec_phone():
'''取得緊急聯絡電話
Returns:
{
'status': '200'->取得成功; '404'->取得失敗
'result': 取得緊急聯絡電話; 錯誤訊息
'msg': 訊息
}
'''
db = client['aiboxdb']
temp_ec_phone_collect = db['temp_ec_phone']
temp_ec_phone_doc = temp_ec_phone_collect.find_one({'_id': 0})
if temp_ec_phone_doc['phone'] != '':
resp = {
'status': '200',
'result': {
'phone': temp_ec_phone_doc['phone']
},
'msg': '取得緊急聯絡電話成功'
}
temp_ec_phone_doc = temp_ec_phone_collect.find_one_and_update({'_id': 0}, {'$set': {'phone': ''}}, upsert=False)
return jsonify(resp)
else:
resp = {
'status': '404',
'result': "null",
'msg': '取得緊急聯絡電話失敗'
}
return jsonify(resp)
@app.route('/api/android/getActivity', methods=['GET'])
def android_get_activity():
'''取得活動資訊
Returns:
{
'status': '200'->取得成功; '404'->取得失敗
'result': 取得活動資訊; 錯誤訊息
'msg': 訊息
}
'''
db = client['aiboxdb']
open_activity_collect = db['open_activity']
open_activity_doc = open_activity_collect.find({}, {'_id': False})
resp = {
'status': '200',
'result': list(open_activity_doc),
'msg': '取得活動資訊成功'
}
return jsonify(resp)
|
python
|
from pathlib import Path
from typing import Tuple, Callable, Union
ReadPaths = Union[Tuple[Path], Tuple[Path, Path]]
def _make_paired_paths(
dir_path: Path,
paired: bool,
mkstr: Callable[[int], str]
) -> ReadPaths:
path1 = dir_path/mkstr(1)
return (path1, dir_path/mkstr(2)) if paired else (path1,)
def make_read_paths(
reads_dir_path: Path,
paired: bool
) -> ReadPaths:
return _make_paired_paths(reads_dir_path, paired, lambda n: f"reads_{n}.fq.gz")
def make_legacy_read_paths(
reads_dir_path: Path,
paired: bool
) -> ReadPaths:
return _make_paired_paths(reads_dir_path, paired, lambda n: f"reads_{n}.fastq")
|
python
|
import json
from lib import action
class VaultWriteAction(action.VaultBaseAction):
def run(self, path, values):
return self.vault.write(path, **json.loads(values))
|
python
|
import numpy as np
import operator
def TI_Forward_Neighborhood(D, p, Eps):
""" ."""
seeds = []
forwardThreshold = p.dist + Eps
# You have to declare the list to traverse.
# First is the index with element "p"
# Items are selected from start to item "p"
# And finally it turns around
indice = D.index(p)
points_list = D[indice + 1:]
# The newly calculated list is traversed
for q in points_list:
if q.dist > forwardThreshold:
break
if Distance(q.Coords, p.Coords) <= Eps:
seeds.append(q)
# The list with the seeds is returned.
return seeds
def TI_Backward_Neighborhood(D, pto, Eps):
seeds = []
backwardThreshold = pto.dist - Eps
# You have to declare the list to go.
# First is the index where the element "p" is
# Items are selected from start to item "p"
# And finally he turns around
indice = D.index(pto)
points_list = D[:indice]
points_list.reverse()
# The newly calculated list is traversed
for q in points_list:
if q.dist < backwardThreshold:
break
if Distance(q.Coords, pto.Coords) <= Eps:
seeds.append(q)
# The list with the seeds is returned.
return seeds
def TI_Neighborhood(D, p, Eps):
part_1 = TI_Backward_Neighborhood(D, p, Eps)
part_2 = TI_Forward_Neighborhood(D, p, Eps)
return part_1 + part_2
def TI_ExpandCluster(D, D_prim,
p, ClId, Eps, MinPts):
"""D is increasingly ordered with respect to the
distances from the reference point"""
# The set of points around point "p" is explored. Note that
# seeds is a set or list of points.
seeds = TI_Neighborhood(D, p, Eps)
# Points around "p" are counted, including itself
p.NeighborsNo += len(seeds)
# "p" can be noise or an edge point
if p.NeighborsNo < MinPts:
# It is initially declared as noise
p.ClusterId = -1 # "NOISE"
# You go through each point of the set of seeds
for q in seeds:
q.Border.append(p)
q.NeighborsNo += 1
# The list of edge points of "p" is declared empty
p.Border = []
# "P" is removed from D to D_prim
D.remove(p)
D_prim.append(p)
return False
else:
# Cluster membership is assigned
p.ClusterId = ClId
# The points found in the seeds are covered
for q in seeds:
q.ClusterId = ClId
q.NeighborsNo += 1
for q in p.Border:
# Identify which element is in the D_prim listing, and
# then modify this.
D_prim[D_prim.index(q)].ClusterId = ClId
# Once again the set is emptied
p.Border = []
# "P" is removed from D to D_prim
D.remove(p)
D_prim.append(p)
# As long as the number of elements in the seed list is
# greater than zero, that is, while finding ONE element, the
# next iteration:
while seeds:
# Somehow in this while the process is repeated
curPoint = seeds[0]
curSeeds = TI_Neighborhood(D, curPoint, Eps)
curPoint.NeighborsNo += len(curSeeds)
# i curPoint is on the edge
if curPoint.NeighborsNo < MinPts:
for q in curSeeds:
q.NeighborsNo += 1
# If curPoint is core
else:
while curSeeds:
q = curSeeds[0]
q.NeighborsNo += 1
if q.ClusterId == "UNCLASSIFIED":
q.ClusterId = ClId
# Remove "p" from D to
# D_prim
curSeeds.remove(q)
seeds.append(q)
else:
curSeeds.remove(q)
# The edge points are traversed
for q in curPoint.Border:
# Identify which element is in the
# listing D_prim, and then this is modified.
D_prim[D_prim.index(q)].ClusterId = ClId
# The content of the variables is modified
curPoint.Border = []
D.remove(curPoint)
D_prim.append(curPoint)
seeds.remove(curPoint)
# The logical value is returned.
return True
def Distance(point, pnt_ref):
"""Function that calculates the distance in two dimensions"""
point = np.array(point[0:2])
pnt_ref = np.array(pnt_ref[0:2])
return np.sqrt(np.sum(np.power(point - pnt_ref, 2)))
class class_point:
"""Class that generates a point with its attributes"""
def __init__(self, point, pnt_ref, metadata=None):
try:
# Metadata
self.metadata = metadata
# The original coordinates are saved
self.Coords = point[0:2]
except:
pass
# p.ClusterId = UNCLASSIFIED;
self.ClusterId = "UNCLASSIFIED"
# p.dist = Distance(p,r)
self.dist = Distance(point[0:2], pnt_ref[0:2])
# p.NeighborsNo = 1
self.NeighborsNo = 1
# p.Border = vacio
self.Border = []
def TI_DBScan(D, eps, MinPts, metadata=None):
"""This class applies the TI-DBScan algorithm to the set
of points delivered.
D = [[coord1, coord2, ...], ...]:
It is a list of tuples or lists, where the two
first items in each list are the coordinates and
the third is METAdata."""
try:
# /* assert: r denotes a reference point */
pnt_ref = D[0]
except IndexError:
pass
# the number of points cannot be 1.
MinPts = MinPts if MinPts > 1 else 2
# D' = empty set of points;
D_prim = []
#Points are transformed
try:
D = [class_point(
D[indice], pnt_ref, metadata=metadata[indice])
for indice in range(len(D))]
except TypeError:
D = [class_point(
D[indice], pnt_ref)
for indice in range(len(D))]
# sort all points in D non-decreasingly w.r.t. field dist;
#D = sorted(D, key=operator.attrgetter('dist'))
D = sorted(D, key=operator.attrgetter('dist'))
# ClusterId = label of first cluster;
i = 0
ClusterId = i #"%s" % (
# for each point p in the ordered set D starting from
# the first point until last point in D do
# While the list of points to review is not empty, it iterates
# infinitely.
while D:
p = D[0]
#for p in D:
# if TI-ExpandCluster(D, D', p, ClusterId, Eps, MinPts) then
if TI_ExpandCluster(D, D_prim,
p, ClusterId, eps, MinPts):
# ClusterId = NextId(ClusterId)
i += 1
ClusterId = i #"%s" % (i)
# endif
# endfor
# return D'// D' is a clustered set of points
return D_prim
# The next line is for testing
if __name__ == "__main__":
set_of_points = [[1.00, 1.00], [1.50, 1.00], [2.00, 1.50],
[5.00, 5.00], [6.00, 5.50], [5.50, 6.00],
[10.00, 11.00], [10.50, 9.50], [10.00, 10.00],
[8.00, 1.00], [1.00, 8.00]]
#set_of_points = [[1.00, 1.00], [1.50, 1.00], [2.00, 1.50],
# [5.00, 5.00], [6.00, 5.50], [5.50, 6.00],
# [8.00, 1.00], [1.00, 8.00]]
#set_of_points = [[1.00, 1.00], [1.50, 1.00], [2.00, 1.50],
# [5.00, 5.00], [8.00, 1.00], [1.00, 8.00]]
result = TI_DBScan(set_of_points, 2, 2)
for element in result:
print (element.ClusterId)
print (element.Coords)
print ("")
|
python
|
import time
import os
from coala_utils.decorators import enforce_signature
from coalib.output.printers.LogPrinter import LogPrinterMixin
from coalib.misc.CachingUtilities import (
pickle_load, pickle_dump, delete_files)
class FileCache:
"""
This object is a file cache that helps in collecting only the changed
and new files since the last run. Example/Tutorial:
>>> from pyprint.NullPrinter import NullPrinter
>>> from coalib.output.printers.LogPrinter import LogPrinter
>>> import logging
>>> import copy, time
>>> log_printer = LogPrinter()
>>> log_printer.log_level = logging.CRITICAL
To initialize the cache create an instance for the project:
>>> cache = FileCache(log_printer, "test", flush_cache=True)
Now we can track new files by running:
>>> cache.track_files(["a.c", "b.c"])
Since all cache operations are lazy (for performance), we need to
explicitly write the cache to disk for persistence in future uses:
(Note: The cache will automatically figure out the write location)
>>> cache.write()
Let's go into the future:
>>> time.sleep(1)
Let's create a new instance to simulate a separate run:
>>> cache = FileCache(log_printer, "test", flush_cache=False)
>>> old_data = copy.deepcopy(cache.data)
We can mark a file as changed by doing:
>>> cache.untrack_files({"a.c"})
Again write to disk after calculating the new cache times for each file:
>>> cache.write()
>>> new_data = cache.data
Since we marked 'a.c' as a changed file:
>>> "a.c" not in cache.data
True
>>> "a.c" in old_data
True
Since 'b.c' was untouched after the second run, its time was updated
to the latest value:
>>> old_data["b.c"] < new_data["b.c"]
True
"""
@enforce_signature
def __init__(
self,
log_printer: LogPrinterMixin,
project_dir: str,
flush_cache: bool=False):
"""
Initialize FileCache.
:param log_printer: An object to use for logging.
:param project_dir: The root directory of the project to be used
as a key identifier.
:param flush_cache: Flush the cache and rebuild it.
"""
self.log_printer = log_printer
self.project_dir = project_dir
self.current_time = int(time.time())
cache_data = pickle_load(log_printer, project_dir, {})
last_time = -1
if 'time' in cache_data:
last_time = cache_data['time']
if not flush_cache and last_time > self.current_time:
log_printer.warn('It seems like you went back in time - your '
'system time is behind the last recorded run '
'time on this project. The cache will '
'be force flushed.')
flush_cache = True
self.data = cache_data.get('files', {})
if flush_cache:
self.flush_cache()
# store the files to be untracked and then untrack them in the end
# so that an untracked file is not tracked again by mistake in a
# later section (which will happen if that file doesn't yield a
# result in that section).
self.to_untrack = set()
def flush_cache(self):
"""
Flushes the cache and deletes the relevant file.
"""
self.data = {}
delete_files(self.log_printer, [self.project_dir])
self.log_printer.debug('The file cache was successfully flushed.')
def __enter__(self):
return self
def write(self):
"""
Update the last run time on the project for each file
to the current time. Using this object as a contextmanager is
preferred (that will automatically call this method on exit).
"""
for file in self.to_untrack:
if file in self.data:
del self.data[file]
for file_name in self.data:
self.data[file_name] = self.current_time
pickle_dump(
self.log_printer,
self.project_dir,
{'time': self.current_time, 'files': self.data})
def __exit__(self, type, value, traceback):
"""
Update the last run time on the project for each file
to the current time.
"""
self.write()
def untrack_files(self, files):
"""
Removes the given files from the cache so that they are no longer
considered cached for this and the next run.
:param files: A set of files to remove from cache.
"""
self.to_untrack.update(files)
def track_files(self, files):
"""
Start tracking files given in ``files`` by adding them to the
database.
:param files: A set of files that need to be tracked.
These files are initialized with their last
modified tag as -1.
"""
for file in files:
if file not in self.data:
self.data[file] = -1
def get_uncached_files(self, files):
"""
Returns the set of files that are not in the cache yet or have been
untracked.
:param files: The list of collected files.
:return: A set of files that are uncached.
"""
if self.data == {}:
# The first run on this project. So all files are new
# and must be returned irrespective of whether caching is turned on.
return files
else:
return {file
for file in files
if (file not in self.data or
int(os.path.getmtime(file)) > self.data[file])}
|
python
|
from flask import Flask
from flask import render_template, url_for, request
import datetime
from person import Person
from my_lib import get_week
from parser_price import Parser_price
from head_hunter_vacancies import HeadHunter_vacancies
from bd_apartment import Appartment_BD
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from database_setup import Base, View_apartment, Country, Region, City, Address, Characteristic
pers = Person()
app = Flask( __name__ )
engine = create_engine('sqlite:///apartment.db?check_same_thread=False')
Base.metadata.bind = engine
DBSession = sessionmaker( bind=engine )
session = DBSession()
@app.route("/")
@app.route("/index/")
def main_win(): # Главная страница
today = datetime.datetime.today()
scw = get_week( int( today.strftime('%w') ) )
return render_template( 'index.html', curdate = today.strftime('%d-%m-%Y'), curweek = scw )
@app.route("/personal/")
def pers_win(): # Персональные данные -------------------------------------------------------
dic={
'photo' : pers.get_photo(),
'fio' : pers.get_name() + ' ' + pers.get_otch() + ' ' + pers.get_fam(),
'birthday' : pers.get_birthday(),
'attach': pers.get_attach()
}
return render_template( 'personal.html', **dic )
@app.route("/parser/" )
def parser(): # начальная страница парсера квартир - выбор района ---------------------------
return render_template( 'parser_form.html' )
@app.route("/price_apartments/", methods=['POST'] )
def price(): # результат работы парсера - цены
region = request.form['region'] # получение параметра
parser = Parser_price( region ) # создать объект парсинга
dicMin = parser.cost_min(rej='dic')
dicMax = parser.cost_max(rej='dic')
# параметры для страницы
dic={}
dic['region'] = region
dic['minprice'] = dicMin['price']
dic['mincity'] = dicMin['city']
dic['mincharact'] = dicMin['address']+'; '+dicMin['region']+'; '+dicMin['characteristic']
dic['maxprice'] = dicMax['price']
dic['maxcity'] = dicMax['city']
dic['maxcharact'] = dicMax['address'] + '; ' + dicMax['region'] + '; ' + dicMax['characteristic']
return render_template( 'price_apartments.html', **dic )
@app.route("/hh_main/" )
def hh_main(): # начальная страница выкансий API ---------------------------------------------
return render_template('hh_city.html')
@app.route("/hh_vacancy/", methods=['POST'] )
def hh_vacancy():
city = request.form['city'] # какой город был выбран
vac = request.form['vac']
hh = HeadHunter_vacancies()
lst, num, sum = hh.view_vacancies( city, vac )
dic={}
s = ''
for v in lst:
if v:
s += '* '+v+'\n'
dic['skills'] = s
dic['city'] = city
dic['vac'] = vac
if num == 0:
dic['salary'] = 0.0
else:
dic['salary'] = round( sum/num, 2 )
return render_template('hh_vacancy.html', **dic)
@app.route("/bd_apartment/" )
def bd_apartment():
return render_template('bd_apartment.html')
@app.route("/bd_apartment_view/", methods=['POST'] )
def bd_apartment_view():
region = request.form['region'] # получение параметра
load = request.form.get('load') # получение параметра
dic = {}
bd = Appartment_BD( )
dic['field'] = []
if bd.is_connect == 'OK':
lstField = bd.get_title_table() # список кортежей(записей) с полями внутри
dic['field']=lstField
# данные БД
# перезаписать
if load:
parser = Parser_price( ) # создать объект парсинга по району
lst_data = parser.data_search( region )
bd.save_data( lst_data )
lst_view_data, update = bd.get_data( region )
dic['data'] = lst_view_data
dic['region'] = region
dic['update'] = update
return render_template('bd_apartment_view.html', **dic)
#----------------------------------------------------------------------------------------- SALAlchemy
def get_field():
#print('============ Поля просмотра =======================')
lst_field = session.query(View_apartment).all()
lst=[]
for field in lst_field:
#print(field.colname, field.coltitle, sep=', ')
lst.append( {'npp':field.npp, 'name':field.colname, 'title':field.coltitle} )
lst = sorted( lst, key=lambda x: x['npp'] ) # сортировка по порядку
return lst
def get_country():
query = session.query(City, Country)
query = query.join(City, City.id_country == Country.id)
records = query.all()
lst=[]
for city, country in records:
#print(city, country, sep=', ')
lst.append( {'city':city, 'country':country} )
return lst
def get_region( name, session ):
reg = session.query(Region).filter( Region.name == name).first()
lst=[]
if reg:
lst = [reg.id, reg.name, reg.id_city, reg.date]
return lst
def get_data( region, session ):
reg = get_region( region, session )
records = []
upd=''
if reg:
id_reg = reg[0]
upd = reg[3]
query = session.query( Characteristic, Address ).filter( Address.id_region == id_reg )
query = query.join( Address, Address.id == Characteristic.id_address )
records = query.all()
# for char, address in records:
# print( address, char, sep='; ')
return (records, upd)
def save_data( lst:list, session): #
s1 = lst[0]['region'].replace( ',',' ' )
reg = ' '.join( s1.split(' ')[0:2] )
id_reg = set_region( reg, session )
for v in lst:
id_addr = set_address( id_reg, v['address'], session )
id_charact = set_characteristic( id_addr, v['characteristic'], session)
def set_region( name:str, session:DBSession ):
reg = get_region( name, session )
id_reg = None
if reg:
id_reg = reg[0]
else: # нет такого района
today = datetime.datetime.today().strftime('%d-%m-%Y')
reg = Region( name=name, id_city=1, date=today)
session.add( reg )
session.commit()
reg = get_region( name, session )
if reg:
id_reg = reg[0]
return id_reg
def get_address( name, id_city, session ):
adr = session.query(Address).filter( Address.name == name).first()
lst=[]
if adr:
lst = [adr.id, adr.name, adr.id_city, adr.id_region]
return lst
def set_address( id_reg, name, session:DBSession ):
adr = get_address( name, 1, session )
id_adr = None
if adr:
id_adr = adr[0]
else: # нет такого
adr = Address( name=name, id_city=1, id_region=id_reg )
session.add(adr)
session.commit()
adr = get_address(name, 1, session )
if adr:
id_adr = adr[0]
return id_adr
def get_characteristic( id_addr, session ):
ch = session.query(Characteristic).filter( Characteristic.id_address == id_addr).first()
lst=[]
if ch:
lst = [ch.id, ch.data]
return lst
def set_characteristic( id_addr, data, session ):
ch = get_characteristic( id_addr, session)
id_ch = None
if ch:
id_adr = ch[0]
else: # нет такого
ch = Characteristic( data=data, id_address=id_addr )
session.add( ch )
session.commit()
ch = get_characteristic( id_addr, session )
if ch:
id_ch = ch[0]
return id_ch
@app.route("/sqlalchemy_apartment/")
def sqlalchemy_apartment():
return render_template('sqlalchemy_apartment.html')
@app.route("/sqlalchemy_apartment_view/", methods=['POST'])
def sqlalchemy_apartment_view():
region = request.form['region'] # получение параметра
load = request.form.get('load') # получение параметра
dic = {}
dic['region'] = region
dic['field'] = []
lstField = get_field()
lst = []
for v in lstField:
lst.append(v['title'])
dic['field'] = lst
# данные БД
# перезаписать
if load:
parser = Parser_price() # создать объект парсинга по району
lst_data = parser.data_search(region)
save_data( lst_data, session )
#print( bd.get_id_region( region ) )
lst_view_data, update = get_data( region, session )
dic['data'] = lst_view_data
dic['region'] = region
dic['update'] = update
return render_template('sqlalchemy_apartment_view.html', **dic)
# ********************************************************************
if __name__ == "__main__":
#print( 'версия:', flask.__version__ )
app.run( debug=True )
#Thread(target=app.polling, args=()).start()
|
python
|
#from __future__ import absolute_import
from celery import shared_task
#from celery.contrib import rdb #DEBUG
@shared_task
def myflqTaskRequest(analysisID):
from django.conf import settings
from myflq.models import Analysis,AnalysisResults
from django.core.files import File
import subprocess,time,tempfile
#rdb.set_trace() #DEBUG => telnet 127.0.0.1 portnumber
analysis = Analysis.objects.get(id=analysisID)
analysis.progress = 'P'
analysis.save()
tempfigure = tempfile.NamedTemporaryFile(delete=False,suffix='.png')
tempxml = tempfile.NamedTemporaryFile(delete=False,suffix='.xml')
tempfigure.close(), tempxml.close() #Only their filenames need to be passed to the subprocess
command = ['python3','../MyFLq.py', '-p', analysis.configuration.user.password,
'analysis', '--sampleName', analysis.originalFilename,
'--negativeReadsFilter' if analysis.negativeReadsFilter else 'REMOVE',
'--primerBuffer', str(analysis.primerBuffer),
'--kMerAssign' if analysis.kMerAssign else 'REMOVE',
str(analysis.kMerAssign) if analysis.kMerAssign else 'REMOVE',
'--flankOut' if analysis.flankOut else 'REMOVE',
'--stutterBuffer', str(analysis.stutterBuffer),
'--useCompress' if analysis.useCompress else 'REMOVE',
'--withAlignment' if analysis.withAlignment else 'REMOVE',
'--threshold', str(analysis.threshold),
'--clusterInfo' if analysis.clusterInfo else 'REMOVE',
'--randomSubset' if analysis.randomSubset else 'REMOVE',
str(analysis.randomSubset) if analysis.randomSubset else 'REMOVE',
'-r',tempxml.name,'-s', settings.STATIC_URL+'css/resultMyFLq.xsl','-v',tempfigure.name,
analysis.fastq.file.name, analysis.configuration.dbusername(),
analysis.configuration.fulldbname(), 'default']
while 'REMOVE' in command: command.remove('REMOVE')
try:
subprocess.check_output(command,stderr=subprocess.STDOUT)
analysisResult = AnalysisResults(analysis=analysis)
analysisResult.xmlFile.save(tempxml.name,File(open(tempxml.name)))
analysisResult.figFile.save(tempfigure.name,File(open(tempfigure.name,'rb')))
analysisResult.save()
analysis.progress = 'F'
analysis.save()
except subprocess.CalledProcessError as e:
analysis.progress = 'FA'
analysis.save()
print('FAILURE:',e.output.decode())
import os
os.remove(tempxml.name), os.remove(tempfigure.name)
print('Command:\n',' '.join(command))
return 'Executed:\n'+' '.join(command)
@shared_task
def alleleTaskRequest(sequence):
"""
Retrieves the sequence identifier on ENA.
Submits an entry if not already available.
"""
from urllib.request import urlopen
from time import sleep
#urlopen("http://www.ebi.ac.uk/ena/search/showQueryCollections?type=exact") #DEBUG see collection ids
# 20 Human -----Human (EMBL-Bank)
#Submit search for sequence #TODO make work with &type=exact => mail ENA
response = urlopen('http://www.ebi.ac.uk/ena/search/executeSearch?Sequence={seq}&collection_id=20'.format(seq=sequence))
response = response.read().decode().strip()
#Wait for result completion
status = urlopen(response).read().decode()
while not status.startswith('COMPLETE'):
sleep(30)
status = urlopen(response).read().decode()
totalResults = int(status.strip().split('\t')[-1])
#See if there is a full identity match (check first only 10 results)
resultsQuery = response.replace('Status','Results')+'&fields=accession,identity,e_value&offset={offset}&length=10'
for i in range(0,totalResults,10):
results = urlopen(resultsQuery.format(offset=i))
results = response.read().decode().strip()
if '\t100\t' in results: break
if '\t100\t' in results:
for result in results.split('\r\n'):
result = result.split('\t')
if result[1] == '100': return result[0] #result[0] is the accession id
#If not returned then sequence has to be submitted
enasubmit = 'https://www-test.ebi.ac.uk/ena/submit/drop-box/submit/'
#https://www.ebi.ac.uk/ena/submit/drop-box/submit/ #TODO for production
|
python
|
import json
from importlib import resources
import requests
import explorer
from explorer import configs
from explorer.enums.fields_enum import FieldsEnum as fields
from explorer.utils.parsing import ResponseParser as parser
class BlockchainExplorer:
def __new__(cls, api_key: str, net: str, prefix: str):
with resources.path(configs, f"{net.upper()}-stable.json") as path:
config_path = str(path)
return cls.from_config(api_key=api_key, config_path=config_path, net=net, prefix=prefix)
@staticmethod
def __load_config(config_path: str) -> dict:
with open(config_path, "r") as f:
return json.load(f)
@staticmethod
def __run(func, api_key: str, net: str, prefix: str):
def wrapper(*args, **kwargs):
url = (
f"{prefix.format(net.lower()).replace('-main','')}"
f"{func(*args, **kwargs)}"
f"{fields.API_KEY}"
f"{api_key}"
)
r = requests.get(url, headers={"User-Agent": ""})
return parser.parse(r)
return wrapper
@classmethod
def from_config(cls, api_key: str, config_path: str, net: str, prefix: str):
config = cls.__load_config(config_path)
for func, v in config.items():
if not func.startswith("_"): # disabled if _
attr = getattr(getattr(explorer, v["module"]), func)
setattr(cls, func, cls.__run(attr, api_key, net, prefix))
return cls
class Etherscan(BlockchainExplorer):
def __new__(cls, api_key: str, net: str = "MAIN"):
return BlockchainExplorer(api_key, net, prefix=fields.PREFIX)
class Arbiscan(BlockchainExplorer):
def __new__(cls, api_key: str, net: str = "MAIN"):
return BlockchainExplorer(api_key, net, prefix="https://api.arbiscan.io/api?")
|
python
|
import feedparser
import justext
import requests
import sys
from database import Database
from bs4 import BeautifulSoup
import re
import mistune
from unidecode import unidecode
def get_text_from_reuters(link):
response = requests.get(link)
resText = response.content.decode("UTF-8", 'ignore')
soup = BeautifulSoup(resText, 'html.parser')
tmp = [x.extract() for x in soup.find_all(class_= "Edition_items_293of")]
for tag in soup.find_all(["script", "meta", "head", "style", "noscript"]):
tag.decompose()
for tag in soup.find_all(True, class_= ["Attribution_content_27_rw", "Image_container_1tVQo"]):
tag.decompose()
paragraphs = justext.justext(soup.prettify(), justext.get_stoplist("English"))
text = "\n\n".join([p.text for p in paragraphs if not p.is_boilerplate])
return text
def get_text_from_cnn(link):
response = requests.get(link)
soup = BeautifulSoup(response.content, 'lxml')
for tag in soup.find_all(["script","img", "meta", "head", "style", "noscript", "h3", "h4"]):
tag.decompose()
for tag in soup.find_all(class_= ["video__end-slate__top-wrapper", "cd__headline", "el__storyelement--standard","el__article--embed", "zn-body__read-more", "el__leafmedia","el__leafmedia--storyhighlights", "zn-body__footer", "el__embedded--standard", "el__storyelement__title", "media__caption"]):
tag.decompose()
title = soup.find("h1", class_ = "pg-headline")
content = soup.find("section", id = "body-text")
return "{}\n\n{}".format(title.get_text(), content.get_text())
def get_text_from_wikipedia(link):
markdown = mistune.Markdown()
response = requests.get(link)
unaccented_string = unidecode(str(response.content)).replace("\\n", " ")
html = unaccented_string
html = markdown(html)
soup = BeautifulSoup(html, 'lxml')
title = soup.find(id = "firstHeading")
content = soup.find("div", class_ = "mw-parser-output")
to_remove = content.find(id = "External_links")
to_remove = content.find(id = "Notes") if content.find(id = "Notes") is not None else to_remove
to_remove = content.find(id = "See_also") if content.find(id = "See_also") is not None else to_remove
to_remove = content.find(id = "Gallery") if content.find(id = "Gallery") is not None else to_remove
to_remove = content.find(id = "Selected_bibliography") if content.find(id = "Selected_bibliography") is not None else to_remove
if to_remove is not None:
parent = list(to_remove.parents)[0]
for tag in parent.find_next_siblings():
tag.decompose()
for tag in content.find_all(["small", "math", "table", "h2", "sup"]):
tag.decompose()
for tag in content.find_all(True, id = ["toc"]):
tag.decompose()
for tag in content.find_all(True, class_ =["mw-headline","IPA","mw-editsection", "quotebox", "infobox", "vertical-navbox", "navbox", "reference", "reflist", "thumb"]):
tag.decompose()
for tag in content.find_all(True, role = "note"):
tag.decompose()
# paren_reg = re.compile("/\(([^()]+)\)/g")
# out = paren_reg.sub('', content.get_text())
out = content.get_text().replace("\\", "")
out = out.replace("'", "")
out = out.replace(";", "")
return "{}\n\n{}".format(title.get_text(), out)
def collect(url):
d = feedparser.parse(url)
texts = {}
for entry in d["entries"]:
link = entry["link"]
print("downloading: " + link)
text = source(link)
texts[link] = text
return texts
|
python
|
from typing import Dict, Type
from .setting import Setting
class SettingRegistry:
def __init__(self):
self._registry: Dict[Type, Type[Setting]] = {}
def register_setting(self, type_hint: Type, setting_cls: Type[Setting]):
self._registry[type_hint] = setting_cls
def get_setting_class_for_type(self, type_hint: Type) -> Type[Setting]:
return self._registry.get(type_hint, Setting)
registry = SettingRegistry()
|
python
|
import boto3
from botocore.exceptions import ClientError
from .config import s3, bucket_name
import logging
log = logging.getLogger("my-logger")
def generate_presigned_url(s3_client, client_method, method_parameters, expires_in):
"""
Generating a presigned Amazon S3 URL that can be used to perform an action.
"""
try:
url = s3_client.generate_presigned_url(
ClientMethod=client_method,
Params=method_parameters,
ExpiresIn=expires_in
)
log.info("Got presigned URL")
except ClientError:
log.info(
f"Couldn't get a presigned URL for client method {client_method}")
raise
return url
def upload_file(obj):
client_action = 'put_object'
file_path = obj.file_name
url = generate_presigned_url(
s3, client_action, {'Bucket': bucket_name, 'Key': file_path}, 3600)
return {"presigned_url": url, "filename": obj.file_name}
|
python
|
#!/usr/bin/env pytest-3
# -*- coding: utf-8 -*-
#
# This file is part of the minifold project.
# https://github.com/nokia/minifold
__author__ = "Marc-Olivier Buob"
__maintainer__ = "Marc-Olivier Buob"
__email__ = "[email protected]"
__copyright__ = "Copyright (C) 2018, Nokia"
__license__ = "BSD-3"
from minifold.doc_type import DocType
def test_sort():
hdr = DocType.HDR
book = DocType.BOOKS_AND_THESES
assert sorted([hdr, book]) == [book, hdr]
def test_dblp():
from minifold.dblp import DblpConnector
for s in [
"conference and workshop papers",
"conference or workshop",
"journal articles",
"informal publications",
"books and theses",
"editorship"
]:
assert DblpConnector.to_doc_type(s) != DocType.UNKNOWN
def test_hal():
from minifold.hal import HalConnector
for s in ["art", "comm", "report", "hdr", "couv", "patent"]:
assert HalConnector.to_doc_type(s) != DocType.UNKNOWN
|
python
|
import numpy as np
import matplotlib.pyplot as plt
#Für jeden Wert in der Liste neuen e-Wert errechnen
def e_function(my_list):
return [np.exp(val) for val in my_list]
#Funktion plotten
def plot_func(x, y, farbe, windowName):
plt.figure(windowName)
plt.plot(x, y, color=farbe)
plt.title("My Image")
plt.xlabel("x")
plt.ylabel("e(x)")
plt.show()
a = 1
b = 5
mylist = np.array(range(a, b + 1), dtype=np.int8)
e_list = e_function(mylist)
plot_func(mylist, e_list, "black", "MyWindowName")
|
python
|
from recon.core.module import BaseModule
import re
class Module(BaseModule):
meta = {
'name': 'Shodan Hostname Enumerator',
'author': 'Tim Tomes (@LaNMaSteR53)',
'description': 'Harvests hosts from the Shodan API by using the \'hostname\' search operator. Updates the \'hosts\' table with the results.',
'required_keys': ['shodan_api'],
'query': 'SELECT DISTINCT domain FROM domains WHERE domain IS NOT NULL',
'options': (
('limit', 1, True, 'limit number of api requests per input source (0 = unlimited)'),
),
}
def module_run(self, domains):
limit = self.options['limit']
for domain in domains:
self.heading(domain, level=0)
query = 'hostname:%s' % (domain)
results = self.search_shodan_api(query, limit)
for host in results:
address = host['ip_str']
port = host['port']
if not host['hostnames']:
host['hostnames'] = [None]
for hostname in host['hostnames']:
self.add_ports(ip_address=address, port=port, host=hostname)
self.add_hosts(host=hostname, ip_address=address)
|
python
|
from .imports import *
def bn_drop_lin(inp, n_out, bn=True, p=0., actn=None):
out = inp
if bn:
out = BatchNormalization()(out)
if p>0:
out = Dropout(p)(out)
use_bias = False if bn else True
out = Dense(n_out, activation=actn, use_bias=use_bias)(out)
return out
|
python
|
# -*- coding: utf-8 -*-
highestNumber = -1
highestNumberPosition = -1
for i in range(100):
number = int(input())
if number > highestNumber:
highestNumber = number
highestNumberPosition = i + 1
print(highestNumber)
print(highestNumberPosition)
|
python
|
import requests, re
def fbvid(url):
req=requests.get(url)
if req.status_code == 200:
try:
return {"status":True,"url":re.search('hd_src:"(.+?)"', req.text)[1]}
except TypeError:
return {"status":False,"msg":"private_video"}
else:
return {"status":False,"msg":"Link Wrong"}
|
python
|
import pandas as pd
import numpy as np
import datetime
import numba
import time
import os
def cummul(array):
temp = 1
ret = []
for element in array:
temp *= element
ret.append(temp)
return np.array(ret)
def listdirs(folder):
return [
d for d in (os.path.join(folder, d1) for d1 in os.listdir(folder))
if os.path.isdir(d)
]
def split_list(alist, wanted_parts=1, slip=2):
length = len(alist)
return [ alist[int(np.clip(i*length// wanted_parts-slip, 0, np.inf)): (i+1)*length // wanted_parts]
for i in range(wanted_parts) ]
def to_timestamp(timestamp, pattern="%Y-%m-%d %H:%M:%S"):
return np.array([time.mktime(datetime.datetime.strptime(t, pattern).timetuple()) for t in
timestamp.tolist()], dtype=np.float64)
def to_datetime(timestamp):
return [datetime.datetime.fromtimestamp(x) for x in timestamp]
def sign(a):
return (a > 0) - (a < 0)
def fillna_arr(array, **kwargs):
df = pd.DataFrame({"v": array})
df = df.fillna(**kwargs)
return df["v"].values
def rolling_window(observations, n, func:lambda x: x):
ret = []
for i, data in enumerate(observations[n - 1:]):
strip = func(observations[i:i + n])
ret.append(strip)
return np.array(ret)
def get_rolling_window_size(timestamp_lst, period):
return len(timestamp_lst[np.where(timestamp_lst <= timestamp_lst[0] + period)[0]])
def filter_array(func, array):
mask = func(array)
index = np.where(mask)[-1]
return array[index]
def append_dict(d, d1):
for key, value in d1.items():
d[key] = value
class LabeledScalarStorage:
def __init__(self, *args):
self.value = {}
for a in args:
self.add_label(a)
def __add__(self, other):
for key, value in other.value.items():
self.value[key] = value
return self
def __getitem__(self, item):
return self.value[item]
def __setitem__(self, key, value):
self.value[key] = value
def keys(self):
return self.value.keys()
def values(self):
return self.value.values()
def items(self):
return self.value.items()
def add_label(self, label):
self.value[label] = []
def add_scalar(self, label, scalar):
self.value[label].append(scalar)
def extend(self, other):
for key in self.value.keys():
self[key].extend(other[key])
@property
def dataframe(self):
return pd.DataFrame(self.value)
class StructureDataset:
def __init__(self):
self.value = {}
def __getitem__(self, item):
return self.value[item]
def __setitem__(self, x, y):
if isinstance(self.value[x], StructureDataset):
raise ValueError("can't __setitem__ to group")
self.value[x] = y
def keys(self):
return self.value.keys()
def values(self):
return self.value.values()
def items(self):
return self.value.items()
def create_group(self, name):
self.value[name] = StructureDataset()
return self.value[name]
def create_dataset(self, name, data):
self.value[name] = np.array(data)
def get(self, *args):
temp = self
for a in args:
temp = temp[a]
return temp
|
python
|
text = input("entrez une chaine de caracteres : ")
isPalindromic = text[0] == text[len(text) - 1]
i, j = 0, len(text)- 1
while i < j and isPalindromic:
isPalindromic = text[i] == text[j]
i, j = i + 1, j - 1
print(isPalindromic)
|
python
|
from kivy.app import App
from kivy.lang import Builder
from kivy.uix.boxlayout import BoxLayout
from kivy.clock import Clock
from kivy.app import App
from kivy.lang import Builder
from kivy.uix.label import Label
from kivy.properties import ListProperty
from kivy.uix.behaviors import ButtonBehavior
import urllib.request
import urllib.error
import urllib.parse
import json
from widget.ConsulDetails import ConsulDetailsApp
from widget.ConsulDetailsModel import ConsulDetailsModel
from widget.ConsulDetailsModel import ConsulChecksModel
Builder.load_file("widget/template/ConsulWidget.kv")
class DetailButton(ButtonBehavior, Label):
def __init__(self, **kwargs):
super(DetailButton, self).__init__(**kwargs)
def display_details(self, name):
print("Show details for %s" % name)
details_model = self.__make_details_object(name)
popup = ConsulDetailsApp(details_model).build()
popup.open()
@staticmethod
def __make_details_object(name):
details_model = ConsulDetailsModel()
details_model.name = name
req = urllib.request.Request('http://localhost:8500/v1/health/service/%s?dc=dc1&token=' % name)
try:
response = urllib.request.urlopen(req)
data = json.loads(response.read().decode('utf-8'))
details_model.service_id = data[0]['Service']['ID']
details_model.service_name = data[0]['Service']['Service']
details_model.service_adres = data[0]['Service']['Address']
details_model.service_port = data[0]['Service']['Port']
for check in data[0]['Checks']:
checkObject = ConsulChecksModel()
checkObject.check_id = check['CheckID']
checkObject.name = check['Name']
checkObject.status = check['Status']
checkObject.output = check['Output']
checkObject.service_id = check['ServiceID']
checkObject.service_name = check['ServiceName']
checkObject.status_color(check['Status'])
details_model.checks.append(checkObject)
except urllib.error.URLError as e:
print(e.reason)
return details_model
class ConsulWidget(BoxLayout):
state = 'all'
data = ListProperty([])
rv_data = ListProperty([])
def __init__(self, **kwargs):
super(ConsulWidget, self).__init__(**kwargs)
def on_enter(self):
print('on_enter')
self.make_data_request()
self.start()
def on_leave(self):
print('on_leave')
self.stop()
def make_data_request(self):
req = urllib.request.Request('http://localhost:8500/v1/internal/ui/services?dc=dc1&token=')
try:
response = urllib.request.urlopen(req)
self.data = json.loads(response.read().decode('utf-8'))
self.test_subset()
except urllib.error.URLError as e:
print(e.reason)
def test_subset(self, state=state):
if not len(self.data):
self.make_data_request()
else:
self.state = state
self.rv_data = [{'name': str(item['Name']), 'passing' : str(item['ChecksPassing']), 'warning' : str(item['ChecksWarning']), 'critical': str(item['ChecksCritical']), 'statuscolor' : self.__setColor(item)} for item in self.data if self.__match_state(state, item)]
@staticmethod
def __match_state(state, item):
if state is 'all':
return True
elif state is 'failing' and item['ChecksCritical'] or item['ChecksWarning']:
return True
elif state is 'succes' and not item['ChecksCritical'] and not item['ChecksWarning']:
return True
@staticmethod
def __setColor(item):
c = [0, 1, 0.3, 0.2]
if item['ChecksCritical']:
c = [1, 0, 0, 0.2]
elif item['ChecksWarning']:
c = [1, 0.6, 0, 0.2]
return c
def start(self):
print('Start data refresh timer')
Clock.schedule_interval(self.refresh_data, 5)
def stop(self):
print('Stop data refresh timer')
Clock.unschedule(self.refresh_data)
def refresh_data(self, dt):
print('refresh consul data')
self.data = []
self.make_data_request()
class Consul(App):
def build(self):
return ConsulWidget()
|
python
|
#!/bin/python3
import json
import os
import sys
import io
import time
from specimen import specimen
from growlab_v1_http_client import growlab_v1_http_client
from readingsbuilder import readingsbuilder
from pathbuilder import pathbuilder
if __name__ == "__main__":
print("Starting growlab")
config = {}
try:
with open("./config.json") as f:
config = json.loads(f.read())
except Exception as e:
sys.stderr.write("Error: {}".format(e))
sys.exit(1)
print("Loaded config, saving images to {}".format(
config["images"]["output_directory"]))
http_client = growlab_v1_http_client(config["http"])
thp_readings = http_client.get_thp_readings()
light_intensity_readings = http_client.get_light_intensity_readings()
camera_mode = http_client.get_camera_mode()
timestamp_string = time.strftime("%Y-%m-%d %H:%M:%S")
r_builder = readingsbuilder(
thp_readings, light_intensity_readings, camera_mode, timestamp_string)
readings = r_builder.build_readings_structrue()
# print(readings)
readings_pathbuilder = pathbuilder(config["data"]["output_directory"],
"." + config["data"]["encoding"], timestamp_string)
readings_filepath = readings_pathbuilder.build_file_path()
print("Readings file output path [", readings_filepath, "]")
with open(readings_filepath, 'w') as readings_output_file:
json.dump(readings, readings_output_file)
is_image_taken = False
camera_image = http_client.get_camera_image()
if len(camera_image) != 0:
is_image_taken = True
if is_image_taken:
frame = io.BytesIO(http_client.get_camera_image())
pwd = os.getcwd()
output_path = pwd + "/html"
# print("Html page content output path [", output_path, "]")
try:
os.mkdir(output_path)
except:
pass
spec = specimen(config["text"], config["images"])
pb = pathbuilder(config["images"]["output_directory"],
"." + config["images"]["encoding"], timestamp_string)
image_file_path = pb.build_file_path()
print("Image file output path [", image_file_path, "]")
if is_image_taken:
spec.save_image(image_file_path, frame, readings)
spec.save_html(image_file_path, output_path, readings, is_image_taken)
|
python
|
#!/usr/bin/env python
# encoding: utf-8
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
class Solution(object):
def addTwoNumbers(self, l1, l2):
"""
:type l1: ListNode
:type l2: ListNode
:rtype: ListNode
"""
r = 0
hd = ListNode(0)
p = None
while(l1 != None or l2 !=None or r != 0):
if(p == None):
p = hd
else:
p.next = ListNode(0)
p = p.next
a = l1.val if l1 else 0
b = l2.val if l2 else 0
m = (a + b + r) % 10
r = (a + b + r) // 10
p.val = m
l1 = l1.next if l1 else None
l2 = l2.next if l2 else None
return hd
|
python
|
import atexit
from pathlib import Path
from typing import Dict, Union
from .template import BaseTemplate
from .exceptions import NotFoundError
class MemoryEngine(object):
_data: Dict
_path: Path
_template: BaseTemplate
def __init__(self, path: Union[Path, str], template: BaseTemplate, auto_load=True):
"""
:param path: path to save file
:param template: memory template
"""
self._data = {}
self._template = template
# update path
self.path = path
# exposing dictionary methods
self.clear = self._data.clear
self.items = self._data.items
# read the initial data
if auto_load:
self.load()
@property
def path(self):
return self._path
@path.setter
def path(self, value: Union[Path, str]):
if type(value) != Path:
self._path = Path(value)
else:
self._path = value
@property
def template(self):
return self._template
def save(self):
"""
write current data to disk
"""
self._template.save(self._data, self._path)
def load(self):
"""
read data from disk
"""
try:
self._data = self._template.load(self._path)
except NotFoundError:
self.save()
def get(self, key, default=None):
"""
:param key: key used as identifier
:param default: value to return is key not found
:return: data corresponding to identifer(key)
:returns: default if key not found
"""
try:
value = self._data[key]
except KeyError:
value = default
return value
def delete(self, *args):
"""
removes the keys from memory
:param args: keys to be removed
"""
for key in args:
try:
del self._data[key]
except KeyError:
pass
def put(self, key, value):
"""
adds key-value pair to memory
:param key: key used as identifier
:param value: data to store
:return: self, may be chained
"""
self._data[key] = value
return self
def putall(self, d: dict):
"""
adds all the key-value pairs in the map
:param d: dictionary map to be stored
"""
for key, value in d.items():
self._data[key] = value
def save_atexit(self, should_save=True):
"""
register save function to atexit module
:param should_save: whether to register or unregister
"""
if should_save:
atexit.register(self.save)
else:
atexit.unregister(self.save)
|
python
|
#!/usr/bin/python
#Libraries
import RPi.GPIO as GPIO
import time
#GPIO Mode (BOARD / BCM)
GPIO.setmode(GPIO.BCM)
#set GPIO Pins
GPIO_TRIGGER = 18
GPIO_ECHO = 24
GPIO_IR = 6
#set GPIO direction (IN / OUT)
GPIO.setup(GPIO_TRIGGER, GPIO.OUT)
GPIO.setup(GPIO_ECHO, GPIO.IN)
GPIO.setup(GPIO_IR, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
last_dist = 0
last_dist_time = 0
def main ():
print("PIR Module Test (CTRL+C to exit)")
try:
GPIO.add_event_detect(GPIO_IR, GPIO.RISING, callback=MOTION)
while 1:
time.sleep(10)
except KeyboardInterrupt:
print("Quit")
GPIO.cleanup()
def MOTION(PIR_PIN):
global last_dist
global last_dist_time
dist = distance()
d_time = int(time.time())
print("%s\tOh hay I saw something" % d_time)
if dist < 2000:
print("%s\tI think I spot you at %.1f cm" % (d_time, dist))
last_dist = dist
last_dist_time = d_time
else:
if last_dist > 0:
del_time = d_time - last_dist_time
print("%s\tYa know I feel like you are hanging around and only %s seconds ago you were at %.1f cm away" % (d_time, del_time, last_dist))
else:
print("%s\tPretty Sneaky... " % d_time)
def distance():
# set Trigger to HIGH
GPIO.output(GPIO_TRIGGER, True)
# set Trigger after 0.01ms to LOW
time.sleep(0.00001)
GPIO.output(GPIO_TRIGGER, False)
StartTime = time.time()
StopTime = time.time()
# save StartTime
while GPIO.input(GPIO_ECHO) == 0:
StartTime = time.time()
# save time of arrival
while GPIO.input(GPIO_ECHO) == 1:
StopTime = time.time()
# time difference between start and arrival
TimeElapsed = StopTime - StartTime
# multiply with the sonic speed (34300 cm/s)
# and divide by 2, because there and back
distance = (TimeElapsed * 34300) / 2
return distance
if __name__ == '__main__':
main()
|
python
|
# -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from projects.tokens import token_generator
from rest_framework import permissions
class HasAPIAccess(permissions.BasePermission):
""" """
message = _('Invalid or missing API Key.')
def has_permission(self, request, view):
api_token = request.META.get('HTTP_API_KEY', None)
return bool(api_token and token_generator.check_token(api_token))
|
python
|
# Copyright 2019-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import pytest
TFS_DOCKER_BASE_NAME = 'sagemaker-tensorflow-serving'
def pytest_addoption(parser):
parser.addoption('--docker-base-name', default=TFS_DOCKER_BASE_NAME)
parser.addoption('--framework-version', required=True)
parser.addoption('--processor', default='cpu', choices=['cpu', 'gpu'])
parser.addoption('--aws-id', default=None)
parser.addoption('--tag')
parser.addoption('--generate-coverage-doc', default=False, action='store_true',
help='use this option to generate test coverage doc')
def pytest_collection_modifyitems(session, config, items):
if config.getoption("--generate-coverage-doc"):
from test.test_utils.test_reporting import TestReportGenerator
report_generator = TestReportGenerator(items, is_sagemaker=True)
report_generator.generate_coverage_doc(framework="tensorflow", job_type="inference")
@pytest.fixture(scope='module')
def docker_base_name(request):
return request.config.getoption('--docker-base-name')
@pytest.fixture(scope='module')
def framework_version(request):
return request.config.getoption('--framework-version')
@pytest.fixture(scope='module')
def processor(request):
return request.config.getoption('--processor')
@pytest.fixture(scope='module')
def runtime_config(request, processor):
if processor == 'gpu':
return '--runtime=nvidia '
else:
return ''
@pytest.fixture(scope='module')
def tag(request, framework_version, processor):
image_tag = request.config.getoption('--tag')
if not image_tag:
image_tag = '{}-{}'.format(framework_version, processor)
return image_tag
@pytest.fixture(autouse=True)
def skip_by_device_type(request, processor):
is_gpu = processor == 'gpu'
if (request.node.get_closest_marker('skip_gpu') and is_gpu) or \
(request.node.get_closest_marker('skip_cpu') and not is_gpu):
pytest.skip('Skipping because running on \'{}\' instance'.format(processor))
|
python
|
#!/usr/bin/env python
import vtk
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# A script to test the mask filter.
# replaces a circle with a color
# Image pipeline
reader = vtk.vtkPNMReader()
reader.ReleaseDataFlagOff()
reader.SetFileName("" + str(VTK_DATA_ROOT) + "/Data/earth.ppm")
reader.Update()
sphere = vtk.vtkImageEllipsoidSource()
sphere.SetWholeExtent(0,511,0,255,0,0)
sphere.SetCenter(128,128,0)
sphere.SetRadius(80,80,1)
sphere.Update()
mask = vtk.vtkImageMask()
mask.SetImageInputData(reader.GetOutput())
mask.SetMaskInputData(sphere.GetOutput())
mask.SetMaskedOutputValue(100,128,200)
mask.NotMaskOn()
mask.ReleaseDataFlagOff()
mask.Update()
sphere2 = vtk.vtkImageEllipsoidSource()
sphere2.SetWholeExtent(0,511,0,255,0,0)
sphere2.SetCenter(328,128,0)
sphere2.SetRadius(80,50,1)
sphere2.Update()
# Test the wrapping of the output masked value
mask2 = vtk.vtkImageMask()
mask2.SetImageInputData(mask.GetOutput())
mask2.SetMaskInputData(sphere2.GetOutput())
mask2.SetMaskedOutputValue(100)
mask2.NotMaskOn()
mask2.ReleaseDataFlagOff()
mask2.Update()
sphere3 = vtk.vtkImageEllipsoidSource()
sphere3.SetWholeExtent(0,511,0,255,0,0)
sphere3.SetCenter(228,155,0)
sphere3.SetRadius(80,80,1)
sphere3.Update()
# Test the wrapping of the output masked value
mask3 = vtk.vtkImageMask()
mask3.SetImageInputData(mask2.GetOutput())
mask3.SetMaskInputData(sphere3.GetOutput())
mask3.SetMaskedOutputValue(255)
mask3.NotMaskOn()
mask3.SetMaskAlpha(0.5)
mask3.ReleaseDataFlagOff()
viewer = vtk.vtkImageViewer()
viewer.SetInputConnection(mask3.GetOutputPort())
viewer.SetColorWindow(255)
viewer.SetColorLevel(128)
#viewer DebugOn
viewer.Render()
# --- end of script --
|
python
|
"""
Testing area package
"""
from shapes.square.area import area_square
from shapes.square.perimeter import perimeter_square
import pytest
def test_square_area():
"""
testing function area_square
"""
length = 2
A = area_square(length)
assert pytest.approx(A) == 4.0
def test_square_perimeter():
length = 2
P = perimeter_square(length)
assert pytest.approx(P) == 8.0
######################
# Write a test for the triangle function
######################
def test_triangle_area():
print("insert test for triangle area here")
|
python
|
from dataset import FontData, make_tfrecodes
gspath='gs://your-bucket-name/'
def make1():
d = FontData()
make_tfrecodes(d, gspath, 512, 64, 8, train=True)
make_tfrecodes(d, gspath, 512, 64, 8, train=False)
if __name__ == "__main__":
make1()
|
python
|
import zipfile
import pytest
from git_taxbreak.modules.writer import Writer
@pytest.fixture
def patch_zip_file(monkeypatch):
class ZipFileMock(zipfile.ZipFile):
def __init__(self, *args, **kwargs):
self.output = args[0]
self.content = []
def __enter__(self):
return self
def close(self):
self.output.content = self.content
def writestr(self, *args, **kwargs):
file = args[0]
content = args[1]
self.content.append({"file_name": file, "content": content})
monkeypatch.setattr(zipfile, "ZipFile", ZipFileMock)
class DummyOutput:
content = None
def test_archive_save(patch_zip_file):
ARTIFACTS = [
{
"diff": "diff_content1",
"message": "summary1\n\nMore text1",
"commit_hash": "hash1",
"files": [
{"file_name": "some_path/file_name1.txt", "content": "file_content1"},
{"file_name": "some_path2/file_name2.txt", "content": "file_content2"},
{"file_name": "file_name3.txt", "content": "file_content3"},
],
},
{
"diff": "diff_content2",
"message": "summary2\n\nMore text2",
"commit_hash": "hash2",
"files": [
{"file_name": "some_path/file_name4.txt", "content": "file_content4"},
{"file_name": "file_name5.txt", "content": "file_content5"},
],
},
]
EXPECTED_CONTENT = [
{"file_name": "hash1/diff.txt", "content": "diff_content1"},
{"file_name": "hash1/some_path/file_name1.txt", "content": "file_content1"},
{"file_name": "hash1/some_path2/file_name2.txt", "content": "file_content2"},
{"file_name": "hash1/file_name3.txt", "content": "file_content3"},
{"file_name": "hash2/diff.txt", "content": "diff_content2"},
{"file_name": "hash2/some_path/file_name4.txt", "content": "file_content4"},
{"file_name": "hash2/file_name5.txt", "content": "file_content5"},
{"file_name": "work-commits.txt", "content": "hash1 summary1\nhash2 summary2"},
]
dummy_output = DummyOutput()
with Writer(dummy_output) as writer:
writer.archive(ARTIFACTS)
assert dummy_output.content == EXPECTED_CONTENT
def test_archive_not_throw_when_file_content_not_exist(patch_zip_file):
ARTIFACTS = [
{
"diff": "diff_content1",
"message": "summary1\n\nMore text1",
"commit_hash": "hash1",
"files": [{"file_name": "some_path/file_name1.txt", "content": None}],
}
]
EXPECTED_CONTENT = [
{"file_name": "hash1/diff.txt", "content": "diff_content1"},
{"file_name": "work-commits.txt", "content": "hash1 summary1"},
]
dummy_output = DummyOutput()
with Writer(dummy_output) as writer:
writer.archive(ARTIFACTS)
assert dummy_output.content == EXPECTED_CONTENT
|
python
|
import sys
import shutil
from pathlib import Path
import logging
from pcv import DEFAULTS_PATH, CALLER_PATH, SOURCE, STATIC, DIST
"""
Initializes a pcv project in the current folder.
Run from command line:
python -m pcv.start
This will create the following directory tree in the current folder:
.
├── settings.py
├── makecv.py
├── dist (empty directory)
└── source
├── static (empty directory)
└── template.json
"""
logger = logging.getLogger(__name__)
def initialize(destination=None):
""" copy defaults tree to specified destination and create empty folders """
if destination is None:
destination = CALLER_PATH
else:
destination = Path(destination)
path = shutil.copytree(DEFAULTS_PATH, destination, dirs_exist_ok=True)
destination.joinpath(SOURCE).joinpath(STATIC).mkdir(exist_ok=True)
destination.joinpath(DIST).mkdir(exist_ok=True)
logger.info(f'pcv project initialized in {path}')
if __name__ == '__main__':
logger.info(f'script path: {sys.argv.pop(0)}')
initialize(*sys.argv)
|
python
|
import numpy as np
import PatternToNumber
def computing_frequencies(text, k):
m = 4**k
frequency_array = np.zeros(shape = (1, m), dtype = int)
for i in range(len(text) - k + 1):
pattern = text[i:i+k]
j = PatternToNumber.pattern_to_number(pattern)
frequency_array[0 , j] = frequency_array[0, j] + 1
return frequency_array
'''def results(text, t):
result = computing_frequencies(text, t).tolist()
for line in result:
print(' '.join(map(str, line)))
results('GAGCGGGACACGTATTATAACGAAGAAAACGGGAGACTAAACTGTAGAGGACTCTTGCCAGCATACGTAACAAGCTCGACGCAGCGCGTAGTCTGATCCGAGTGAATCCGATCCCAGCAAAGGTCGTGGTTGTGTTCGGGGGGGCCTCGCGGGGGCTGCGGACTACCCTCTTGCGCCTAAGCATAAGCTCAGGAACCTGTTTCTTATGCAATTCTGTTAAGACCTTCCAAACGCATAAGGGCTCTGATTGCTAGTATGATAGAACAATCCGCACCCACCAGCTAATAGGAAGCCAACATAGCGGAGGGACACGGCTGCCTAGCCAGGACAACATTCGAGTATGATCAGAGTGCACCGGAATAGGATACTCGTGGTCGCAACGTCGCACGGCTTTCCTAAGCTGTGGAAAAGATTCAACGGACTCAGCGCCAGCGCCCGGGGACCACCGTATCTATGGGGAGTCAGTGAGAGCAGAGCAGTTGGGGGAAAAACAGCGATCCTGGCGGACACCGAGAGTTCGCATTGTATATGAAGTAGCAGCGAGTAGAGTAGATTACTGATCGAATGGTCGCGTGATCCCTGGCAAATCCTACAAAATAGGAGGAGCTGAGGCGCTCGGCCCCGATCCTGGTTTTTCTGACCCGCA', 5)
'''
|
python
|
# Copyright(C) Facebook, Inc. and its affiliates. All Rights Reserved.
from typing import Dict, List
import numpy as np
import xarray as xr
from .base_jags_impl import BaseJagsImplementation
class RobustRegression(BaseJagsImplementation):
def __init__(self, **attrs: Dict) -> None:
self.attrs = attrs
def get_vars(self) -> List[str]:
return ["alpha", "beta", "nu", "sigma"]
def format_data_to_jags(self, data: xr.Dataset) -> Dict:
# transpose the dataset to ensure that it is the way we expect
data = data.transpose("item", "feature")
# we already have all the values to be bound except for X and Y in self.attrs
attrs: dict = self.attrs.copy()
attrs["X"] = data.X.values
attrs["Y"] = data.Y.values
return attrs
def extract_data_from_jags(self, samples: Dict) -> xr.Dataset:
# dim 2 is the chains dimension so we squeeze it out
return xr.Dataset(
{
# alpha, nu, sigma dimensions are [1, samples], we want [samples]
"alpha": (["draw"], samples["alpha"].squeeze(0)),
"nu": (["draw"], samples["nu"].squeeze(0)),
"sigma": (["draw"], samples["sigma"].squeeze(0)),
# beta dimensions are [k, samples], we want [samples, k]
"beta": (["draw", "feature"], samples["beta"].T),
},
coords={
"draw": np.arange(samples["beta"].shape[1]),
"feature": np.arange(samples["beta"].shape[0]),
},
)
def get_code(self) -> str:
return """
model {
# priors
# note: JAGS normal distribution uses precision rather than standard deviation
alpha ~ dnorm(0.0, 1/(alpha_scale**2));
for (j in 1:k) {
beta[j] ~ dnorm(beta_loc, 1/(beta_scale**2));
}
# gamma and exponential are parameterized by shape, rate (=1/scale)
nu ~ dgamma(2, 0.1)
sigma ~ dexp(1/sigma_mean)
# likelihood
for (i in 1:n) {
mu[i] <- alpha + inprod(beta, X[i,])
Y[i] ~ dt(mu[i], 1/(sigma**2), nu)
}
}
"""
|
python
|
import discord
import os
from keep_alive import keep_alive
client = discord.Client()
@client.event
async def on_ready():
print('We have logged in as {0.user}'.format(client))
@client.event
async def on_message(message):
if message.author == client.user:
return
if str(message.author) in ["robbbot#6138"]:
await message.add_reaction('🟪')
if str(message.author) in ["barftron666#9511"]:
await message.add_reaction('👐')
if str(message.author) in ["SinSemilla#3965"]:
await message.add_reaction('<:NH:789552992876494888>')
if str(message.author) in ["Desert Ham#2846"]:
await message.add_reaction('<:DS:784463585531265104>')
if message.content.startswith('$hello'):
await message.channel.send('Hello!')
if message.content.startswith('@nickhalford'):
await message.channel.send('SinSemilla#3965')
if message.content.startswith('$darcysux') or message.content.startswith(
'$ds'):
await message.channel.send('<:DS:784463585531265104>')
if message.content.startswith('$thumb'):
channel = message.channel
await channel.send('Send me that 👍')
def check(reaction, user):
return user == message.author and str(reaction.emoji) == '👍'
try:
reaction, user = await client.wait_for(
'reaction_add', timeout=60.0, check=check)
except asyncio.TimeoutError:
await channel.send('👎')
else:
await channel.send('👍')
if message.content.startswith('$commands'):
channel = message.channel
await channel.send('$hello, $ds, $darcysux, $commands')
keep_alive()
client.run(os.getenv('TOKEN'))
|
python
|
""" Command line utility for repository """
import fire
from repo_utils import get_repository_path
import os
import importlib
# This will make available all definitions found under the same package that this file is found.
# This allows making a command line out of any package with the repo_utils template by putting start_command_line inside it.
package_name = __file__.replace(get_repository_path(), '').split(os.sep)[1]
mod = importlib.import_module(package_name)
def start_command_line():
"""
Command-line interface for the repository.
Specify the definition to execute and then any arguments.
e.g. "define <name>".
The Fire library converts the specified function or object into a command-line utility.
"""
global mod
fire.Fire(mod)
|
python
|
from random import choice
from typing import Union
from datetime import datetime
from discord import User, Member, Embed
from discord.ext import commands
from bot.main import NewCommand
class Hug(commands.Cog):
def __init__(self, client):
self.client = client
def randomise(self, users:list):
messages = [
f'{users[0]} hugs {users[1]}!',
f'{users[0]} gives {users[1]} a big hug!',
f'{users[0]} gives a huge hug to {users[1]}!',
]
gifs = [
'https://media.giphy.com/media/PHZ7v9tfQu0o0/giphy.gif',
'https://media.giphy.com/media/od5H3PmEG5EVq/giphy.gif',
'https://media.giphy.com/media/GMFUrC8E8aWoo/giphy.gif',
'https://media.giphy.com/media/svXXBgduBsJ1u/giphy.gif',
'https://media.giphy.com/media/QFPoctlgZ5s0E/giphy.gif',
'https://media.giphy.com/media/3bqtLDeiDtwhq/giphy.gif',
'https://media.giphy.com/media/sUIZWMnfd4Mb6/giphy.gif',
'https://media.giphy.com/media/lrr9rHuoJOE0w/giphy.gif',
'https://media.giphy.com/media/du8yT5dStTeMg/giphy.gif',
'https://media.giphy.com/media/l2QDM9Jnim1YVILXa/giphy.gif',
'https://media.giphy.com/media/DjczAlIcyK1Co/giphy.gif',
'https://media.giphy.com/media/2A75Y6NodD38I/giphy.gif',
'https://media.giphy.com/media/10BcGXjbHOctZC/giphy.gif',
]
return (choice(gifs), choice(messages))
@commands.command(
name='hug',
cls=NewCommand,
brief='A Hug for you!',
description='Give someone a Tight Hug!',
usage='<user:name/id/@mention>',
explained_usage=["**User:** The User whom you wanna give a Hug!"],
examples=[
'hug @Akshu',
'hug 764462046032560128',
'hug Akshu#7472'
]
)
@commands.guild_only()
@commands.bot_has_permissions(send_messages=True, embed_links=True)
@commands.cooldown(1, 5, commands.BucketType.member)
async def _hug(self, ctx, user:Union[User, Member]):
gif, msg = self.randomise([ctx.author.name, user.name])
embed=Embed(color=0x00eeff, timestamp=datetime.utcnow())
embed.set_author(name=msg, icon_url=ctx.author.avatar_url)
embed.set_footer(text=f'Thanks for using {ctx.guild.me.name}', icon_url=ctx.guild.me.avatar_url)
embed.set_image(url=gif)
await ctx.reply(embed=embed)
def setup(client):
client.add_cog(Hug(client))
|
python
|
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.utils import timezone
from transductor.models import EnergyTransductor, TransductorModel
class EnergyTransductorViewsTestCase(TestCase):
def setUp(self):
t_model = TransductorModel()
t_model.name = "TR 4020"
t_model.transport_protocol = "UDP"
t_model.serial_protocol = "Mosbus RTU"
t_model.measurements_type = "EnergyMeasurements"
t_model.register_addresses = [[68, 0], [70, 1]]
t_model.save()
self.t_model = t_model
transductor = self.create_energy_transductor(1, "Test Transductor", "1.1.1.1", t_model)
self.transductor = transductor
def test_index_access_and_template(self):
url = reverse('transductor:index')
response = self.client.get(url)
self.assertEqual(200, response.status_code)
self.assertTemplateUsed(response, 'transductor/index.html')
def test_index_with_transductor(self):
transductor = self.transductor
url = reverse('transductor:index')
response = self.client.get(url)
self.assertIn(transductor.description, response.content)
def test_transductor_creation_page(self):
url = reverse('transductor:new')
response = self.client.get(url)
self.assertEqual(200, response.status_code)
def test_not_create_energy_transductor_without_params(self):
url = reverse('transductor:new')
params = {
'serie_number': u'',
'ip_address': u'',
'description': u'',
'model': u''
}
response = self.client.post(url, params)
self.assertFormError(response, 'form', 'serie_number', 'This field is required.')
self.assertFormError(response, 'form', 'ip_address', 'This field is required.')
self.assertFormError(response, 'form', 'description', 'This field is required.')
self.assertFormError(response, 'form', 'model', 'This field is required.')
def test_create_valid_energy_transductor(self):
t_model = self.t_model
transductor_count = EnergyTransductor.objects.count()
url = reverse('transductor:new')
params = {
'serie_number': 1,
'ip_address': '111.111.111.111',
'description': 'Test',
'model': t_model.id
}
response = self.client.post(url, params)
self.assertEqual(transductor_count + 1, EnergyTransductor.objects.count())
t_id = EnergyTransductor.objects.get(ip_address='111.111.111.111').id
detail_url = reverse('transductor:detail', kwargs={'transductor_id': t_id})
self.assertRedirects(response, detail_url)
def test_not_create_transductor_with_same_ip_address(self):
t_model = self.t_model
transductor = self.transductor
url = reverse('transductor:new')
params = {
'serie_number': 1,
'ip_address': transductor.ip_address,
'description': 'Test',
'model': t_model.id
}
response = self.client.post(url, params)
self.assertFormError(response, 'form', 'ip_address', 'Transductor with this Ip address already exists.')
def test_not_create_transductor_with_wrong_ip_address(self):
t_model = self.t_model
url = reverse('transductor:new')
params = {
'serie_number': 1,
'ip_address': '1',
'description': 'Test',
'model': t_model.id
}
response = self.client.post(url, params)
self.assertFormError(response, 'form', 'ip_address', 'Incorrect IP address format')
def test_energy_transductor_detail(self):
t_model = self.t_model
transductor = self.create_energy_transductor(1, "Test", "111.111.111.111", t_model)
url = reverse('transductor:detail', kwargs={'transductor_id': transductor.id})
response = self.client.get(url)
self.assertEqual(200, response.status_code)
self.assertIn("No measurement avaiable", response.content)
def test_transductor_editing_page(self):
transductor = self.transductor
url = reverse('transductor:edit', kwargs={'transductor_id': transductor.id})
response = self.client.get(url)
self.assertEqual(200, response.status_code)
def test_change_transductor_model(self):
t_model_1 = self.t_model
transductor = self.create_energy_transductor(1, "Test", "111.111.111.111", t_model_1)
t_model_2 = TransductorModel()
t_model_2.name = "Transductor Model 2"
t_model_2.transport_protocol = "TCP/IP"
t_model_2.serial_protocol = "Mosbus"
t_model_2.register_addresses = [[100, 0], [105, 1]]
t_model_2.save()
url = reverse('transductor:edit', kwargs={'transductor_id': transductor.id})
params = {
'serie_number': 2,
'ip_address': '222.222.222.222',
'description': 'Another Test',
'model': t_model_2.id
}
self.client.post(url, params)
transductor = EnergyTransductor.objects.get(ip_address='222.222.222.222')
self.assertEqual(2, transductor.serie_number)
self.assertEqual("Another Test", transductor.description)
self.assertEqual(t_model_2, transductor.model)
def test_not_edit_transductor_with_wrong_params(self):
t_model = self.t_model
url = reverse('transductor:edit', kwargs={'transductor_id': self.transductor.id})
params = {
'serie_number': 2,
'ip_address': 'Wrong Ip Addres',
'description': 'Another Test',
'model': t_model.id
}
response = self.client.post(url, params)
self.assertFormError(response, 'form', 'ip_address', 'Incorrect IP address format')
def test_delete_energy_transductor(self):
t_model = self.t_model
transductor = self.create_energy_transductor(1, "Test", "111.111.111.111", t_model)
transductor_count = EnergyTransductor.objects.count()
url = reverse('transductor:delete', kwargs={'transductor_id': transductor.id})
params = {
'delete': u''
}
self.client.post(url, params)
self.assertEqual(transductor_count - 1, EnergyTransductor.objects.count())
def test_not_delete_energy_transductor_with_get_method(self):
t_model = self.t_model
transductor = self.create_energy_transductor(1, "Test", "111.111.111.111", t_model)
transductor_count = EnergyTransductor.objects.count()
url = reverse('transductor:delete', kwargs={'transductor_id': transductor.id})
self.client.get(url)
self.assertEqual(transductor_count, EnergyTransductor.objects.count())
def test_transductor_model_index(self):
t_model = self.t_model
url = reverse('transductor:model_index')
response = self.client.get(url)
self.assertIn(t_model.name, response.content)
def create_energy_transductor(self, serie_number, description, ip_address, t_model):
transductor = EnergyTransductor()
transductor.serie_number = serie_number
transductor.description = description
transductor.creation_date = timezone.now()
transductor.ip_address = ip_address
transductor.model = t_model
transductor.save()
return transductor
|
python
|
'''
Created on March 30, 2018
@author: Alejandro Molina
'''
import numpy as np
from spn.algorithms.StructureLearning import get_next_operation, learn_structure
from spn.algorithms.Validity import is_valid
from spn.algorithms.splitting.Clustering import get_split_rows_KMeans, get_split_rows_TSNE
from spn.algorithms.splitting.RDC import get_split_cols_RDC_py, get_split_rows_RDC_py
from spn.structure.Base import Sum, assign_ids
from spn.structure.leaves.histogram.Histograms import create_histogram_leaf
from spn.structure.leaves.parametric.Parametric import create_parametric_leaf
from spn.structure.leaves.piecewise.PiecewiseLinear import create_piecewise_leaf
def learn_classifier(data, ds_context, spn_learn_wrapper, label_idx, cpus=-1, rand_gen=None):
spn = Sum()
for label, count in zip(*np.unique(data[:, label_idx], return_counts=True)):
branch = spn_learn_wrapper(data[data[:, label_idx] == label, :], ds_context, cpus=cpus, rand_gen=rand_gen)
spn.children.append(branch)
spn.weights.append(count / data.shape[0])
spn.scope.extend(branch.scope)
assign_ids(spn)
valid, err = is_valid(spn)
assert valid, "invalid spn: " + err
return spn
def learn_mspn_with_missing(data, ds_context, cols="rdc", rows="kmeans", min_instances_slice=200, threshold=0.3,
linear=False, ohe=False, leaves=None, memory=None, rand_gen=None, cpus=-1):
if leaves is None:
# leaves = create_histogram_leaf
leaves = create_piecewise_leaf
if rand_gen is None:
rand_gen = np.random.RandomState(17)
def l_mspn_missing(data, ds_context, cols, rows, min_instances_slice, threshold, linear, ohe):
if cols == "rdc":
split_cols = get_split_cols_RDC_py(threshold, rand_gen=rand_gen, ohe=ohe, n_jobs=cpus)
if rows == "rdc":
split_rows = get_split_rows_RDC_py(rand_gen=rand_gen, ohe=ohe, n_jobs=cpus)
elif rows == "kmeans":
split_rows = get_split_rows_KMeans()
if leaves is None:
leaves = create_histogram_leaf
nextop = get_next_operation(min_instances_slice)
return learn_structure(data, ds_context, split_rows, split_cols, leaves, nextop)
if memory:
l_mspn_missing = memory.cache(l_mspn_missing)
return l_mspn_missing(data, ds_context, cols, rows, min_instances_slice, threshold, linear, ohe)
def learn_mspn(data, ds_context, cols="rdc", rows="kmeans", min_instances_slice=200, threshold=0.3, ohe=False,
leaves=None, memory=None, rand_gen=None, cpus=-1):
if leaves is None:
leaves = create_histogram_leaf
if rand_gen is None:
rand_gen = np.random.RandomState(17)
def l_mspn(data, ds_context, cols, rows, min_instances_slice, threshold, ohe):
if cols == "rdc":
split_cols = get_split_cols_RDC_py(threshold, rand_gen=rand_gen, ohe=ohe, n_jobs=cpus)
if rows == "rdc":
split_rows = get_split_rows_RDC_py(rand_gen=rand_gen, ohe=ohe, n_jobs=cpus)
elif rows == "kmeans":
split_rows = get_split_rows_KMeans()
nextop = get_next_operation(min_instances_slice)
return learn_structure(data, ds_context, split_rows, split_cols, leaves, nextop)
if memory:
l_mspn = memory.cache(l_mspn)
return l_mspn(data, ds_context, cols, rows, min_instances_slice, threshold, ohe)
def learn_parametric(data, ds_context, cols="rdc", rows="kmeans", min_instances_slice=200, threshold=0.3, ohe=False,
leaves=None, memory=None, rand_gen=None, cpus=-1):
if leaves is None:
leaves = create_parametric_leaf
if rand_gen is None:
rand_gen = np.random.RandomState(17)
def learn_param(data, ds_context, cols, rows, min_instances_slice, threshold, ohe):
if cols == "rdc":
split_cols = get_split_cols_RDC_py(threshold, rand_gen=rand_gen, ohe=ohe, n_jobs=cpus)
if rows == "rdc":
split_rows = get_split_rows_RDC_py(rand_gen=rand_gen, ohe=ohe, n_jobs=cpus)
elif rows == "kmeans":
split_rows = get_split_rows_KMeans()
nextop = get_next_operation(min_instances_slice)
return learn_structure(data, ds_context, split_rows, split_cols, leaves, nextop)
if memory:
learn_param = memory.cache(learn_param)
return learn_param(data, ds_context, cols, rows, min_instances_slice, threshold, ohe)
|
python
|
__version__ = "0.0.1"
from ._widget import LiveIDS
from .video_ui import initui
|
python
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('eventlog', '0037_auto_20180911_1252'),
]
operations = [
migrations.RemoveField(
model_name='celerytaskprogress',
name='content_type',
),
migrations.RemoveField(
model_name='celerytaskprogress',
name='object_id',
),
migrations.AlterField(
model_name='celerytaskprogress',
name='task_type',
field=models.IntegerField(default=0, choices=[(0, 'Bulk Site Update'), (1, 'User Assign to Project'), (2, 'User Assign to Site'), (3, 'Site Response Xls Report'), (4, 'Site Import'), (6, 'Zip Site Images'), (7, 'Remove Roles'), (8, 'Site Data Export'), (9, 'Response Pdf Report'), (10, 'Site Progress Xls Report')]),
),
]
|
python
|
import pytest
from kaneda.backends import BaseBackend
class DummyBackend(BaseBackend):
reported_data = {}
def report(self, name, metric, value, tags, id_=None):
payload = self._get_payload(name, value, tags)
payload['metric'] = metric
self.reported_data[name] = payload
@pytest.fixture
def dummy_backend():
return DummyBackend()
@pytest.fixture
def empty_settings():
class Settings:
pass
return Settings
@pytest.fixture
def unexisting_backend_settings():
class Settings:
BACKEND = 'kaneda.backends.UnexsitingBackend'
return Settings
|
python
|
#!/usr/bin/env python
"""
Copy one netCDF file to another with compression and sensible
chunking
Adapted from nc3tonc4
https://github.com/Unidata/netcdf4-python/blob/master/utils/nc3tonc4
"""
from netCDF4 import Dataset
import numpy as np
import numpy.ma as ma
import os
import sys
import math
import operator
from warnings import warn
import argparse
import copy
import numbers
from six.moves import reduce
dtypes = {
'f' : 4, # f4, 32-bit floating point
'd' : 8, # f8, 64-bit floating point
'e' : 4, # f2, 16-bit floating point
'i' : 4, # i4, 32-bit signed integer
'h' : 2, # i2, 16-bit signed integer
'l' : 8, # i8, 64-bit singed integer
'b' : 1, # i1, 8-bit signed integer
'B' : 1, # u1, 8-bit unsigned integer
'H' : 2, # u2, 16-bit unsigned integer
'I' : 4, # u4, 32-bit unsigned integer
'L' : 8, # u8, 64-bit unsigned integer
'S' : 1 } # S1, single-character string
class FormatError(Exception):
'''Unsupported netCDF format'''
def numVals(shape):
"""Return number of values in chunk of specified shape, given by a list of dimension lengths.
shape -- list of variable dimension sizes"""
if(len(shape) == 0):
return 1
return reduce(operator.mul, shape)
def cascadeRounding(array):
"""Implement cascase rounding
http://stackoverflow.com/questions/792460/how-to-round-floats-to-integers-while-preserving-their-sum
"""
sort_index = np.argsort(array)
integer_array = []
total_float = 0
total_int = 0
# We place a hard limit on the total of the array, which keeps
# the rounded values from exceeding the total of the array
limit = np.floor(sum(array))
for idx in sort_index:
total_float += array[idx]
integer_array.append(min(round(total_float),limit)-total_int)
total_int += integer_array[-1]
rounded_array = np.zeros(len(array))
# Should make this a comprehension, but I couldn't comprehend it
for i in range(len(sort_index)):
rounded_array[sort_index[i]] = integer_array[i]
return rounded_array
def calcChunkShape(chunkVol, varShape):
"""
Calculate a chunk shape for a given volume/area for the dimensions in varShape.
chunkVol -- volume/area of the chunk
chunkVol -- array of dimensions for the whole dataset
"""
return np.array(cascadeRounding(np.asarray(varShape) * (chunkVol / float(numVals(varShape))) ** (1./len(varShape))),dtype="int")
def chunk_shape_nD(varShape, valSize=4, chunkSize=4096, minDim=1):
"""
Return a 'good shape' for an nD variable, assuming balanced 1D, 2D access
varShape -- list of variable dimension sizes
chunkSize -- minimum chunksize desired, in bytes (default 4096)
valSize -- size of each data value, in bytes (default 4)
minDim -- mimimum chunk dimension (if var dimension larger
than this value, otherwise it is just var dimension)
Returns integer chunk lengths of a chunk shape that provides
balanced access of 1D subsets and 2D subsets of a netCDF or HDF5
variable var. 'Good shape' for chunks means that the number of
chunks accessed to read any kind of 1D or 2D subset is approximately
equal, and the size of each chunk (uncompressed) is at least
chunkSize, which is often a disk block size.
"""
varShapema = ma.array(varShape)
chunkVals = min(chunkSize / float(valSize),numVals(varShapema)) # ideal number of values in a chunk
# Make an ideal chunk shape array
chunkShape = ma.array(calcChunkShape(chunkVals,varShapema),dtype=int)
# Short circuit for 1D arrays. Logic below unecessary & can have divide by zero
if len(varShapema) == 1: return chunkShape.filled(fill_value=1)
# And a copy where we'll store our final values
chunkShapeFinal = ma.masked_all(chunkShape.shape,dtype=int)
if chunkVals < numVals(np.minimum(varShapema,minDim)):
while chunkVals < numVals(np.minimum(varShapema,minDim)):
minDim -= 1
sys.stderr.write('Mindim too large for variable, reduced to : %d\n' % minDim)
lastChunkCount = -1
while True:
# Loop over the axes in chunkShape, making sure they are at
# least minDim in length.
for i in range(len(chunkShape)):
if ma.is_masked(chunkShape[i]):
continue
if (chunkShape[i] < minDim):
# Set the final chunk shape for this dimension
chunkShapeFinal[i] = min(minDim,varShapema[i])
# mask it out of the array of possible chunkShapes
chunkShape[i] = ma.masked
# Have we fixed any dimensions and filled them in chunkShapeFinal?
if chunkShapeFinal.count() > 0:
chunkCount = numVals(chunkShapeFinal[~chunkShapeFinal.mask])
else:
if (lastChunkCount == -1):
# Haven't modified initial guess, break out of
# this loop and accept chunkShape
break
if chunkCount != lastChunkCount and len(varShapema[~chunkShape.mask]) > 0:
# Recalculate chunkShape array, with reduced dimensions
chunkShape[~chunkShape.mask] = calcChunkShape(chunkVals/chunkCount,varShapema[~chunkShape.mask])
lastChunkCount = chunkCount
else:
break
# This doesn't work when chunkShape has no masked values. Weird.
# chunkShapeFinal[chunkShapeFinal.mask] = chunkShape[~chunkShape.mask]
for i in range(len(chunkShapeFinal)):
if ma.is_masked(chunkShapeFinal[i]):
chunkShapeFinal[i] = chunkShape[i]
return chunkShapeFinal.filled(fill_value=1)
def nc2nc(filename_o, filename_d, zlib=True, complevel=5, shuffle=True, fletcher32=False,
clobber=False, verbose=False, classic=True, lsd_dict=None, vars=None, chunksize=4, buffersize=50, mindim=1,ignoreformat=False):
"""convert a netcdf file (filename_o) to another netcdf file (filename_d)
The default format is 'NETCDF4_classic', but can be set to NETCDF4 if classic=False.
If the lsd_dict is not None, variable names corresponding to the keys of the dict
will be truncated to the decimal place specified by the values of the dict.
This improves compression by making it 'lossy'..
If vars is not None, only variable names in the list will be copied (plus all the
dimension variables). The zlib, complevel and shuffle keywords control
how the compression is done. buffersize is the size (in KB) of the buffer used to
copy the data from one file to another. mindim sets a minimum size for a dimension
of a chunk. In some cases very large variable dimensions will mean chunk sizes for
the smaller dimensions will be small, with a minimum of at least 1. This can lead to
slow access times.
"""
if os.path.isfile(filename_d) and not clobber:
sys.stderr.write('Output file already exists: %s. Use -o option to overwrite\n' % filename_d)
return False
ncfile_o = Dataset(filename_o,'r')
if ncfile_o.file_format is "NETCDF4":
if ignoreformat:
warn('netCDF4 formatted file .. ignoring')
else:
raise FormatError('nc2nc is not tested to work with netCDF4 files, only netCDF4 Classic, and netCDF3. See --ignoreformat option to ignore warning')
if classic:
ncfile_d = Dataset(filename_d,'w',clobber=clobber,format='NETCDF4_CLASSIC')
else:
ncfile_d = Dataset(filename_d,'w',clobber=clobber,format='NETCDF4')
mval = 1.e30 # missing value if unpackshort=True
# Copy buffer specified in MiB, so convert to bytes
buffersize = buffersize*(1024**2)
# Chunk size specified in KiB, so convert to bytes
chunksize = chunksize*1024
# create dimensions. Check for unlimited dim.
unlimdimname = False
unlimdim = None
# create global attributes.
if verbose: sys.stdout.write('copying global attributes ..\n')
#for attname in ncfile_o.ncattrs():
# setattr(ncfile_d,attname,getattr(ncfile_o,attname))
ncfile_d.setncatts(ncfile_o.__dict__)
# Copy dimensions
if verbose: sys.stdout.write('copying dimensions ..\n')
for dimname,dim in ncfile_o.dimensions.items():
if dim.isunlimited():
unlimdimname = dimname
unlimdim = dim
ncfile_d.createDimension(dimname,None)
else:
ncfile_d.createDimension(dimname,len(dim))
# create variables.
if vars is None:
varnames = ncfile_o.variables.keys()
else:
# variables to copy specified
varnames = vars
# add dimension variables
for dimname in ncfile_o.dimensions.keys():
if dimname in ncfile_o.variables.keys() and dimname not in varnames:
varnames.append(dimname)
for varname in varnames:
ncvar = ncfile_o.variables[varname]
if verbose: sys.stdout.write('copying variable %s\n' % varname)
# quantize data?
if lsd_dict is not None and varname in lsd_dict:
lsd = int(lsd_dict[varname])
if verbose: sys.stdout.write('truncating to least_significant_digit = %d\n'%lsd)
else:
lsd = None # no quantization.
datatype = ncvar.dtype
# is there an unlimited dimension?
if unlimdimname and unlimdimname in ncvar.dimensions:
hasunlimdim = True
else:
hasunlimdim = False
if hasattr(ncvar, '_FillValue'):
FillValue = ncvar._FillValue
else:
FillValue = None
chunksizes = None
# check we have a mapping from the type to a number of bytes
if ncvar.dtype.char in dtypes:
if verbose: sys.stdout.write('Variable shape: %s\n' % str(ncvar.shape))
if (ncvar.shape != ()): chunksizes=chunk_shape_nD(ncvar.shape,valSize=dtypes[ncvar.dtype.char],minDim=mindim,chunkSize=chunksize)
if verbose: sys.stdout.write('Chunk sizes: %s\n' % str(chunksizes))
else:
sys.stderr.write("This datatype not supported: dtype : %s\n" % ncvar.dtype.char)
sys.exit(1)
# Create the variable we will copy to
var = ncfile_d.createVariable(varname, datatype, ncvar.dimensions, fill_value=FillValue, least_significant_digit=lsd, zlib=zlib, complevel=complevel, shuffle=shuffle, fletcher32=fletcher32, chunksizes=chunksizes)
# fill variable attributes.
attdict = ncvar.__dict__
if '_FillValue' in attdict: del attdict['_FillValue']
var.setncatts(attdict)
# fill variable with data.
dimlim = np.asarray(ncvar.shape)
# bufferChunk is a multiple of the chunksize which is less than the size of copy buffer
if (ncvar.shape != ()): bufferChunk = chunk_shape_nD(ncvar.shape,valSize=dtypes[ncvar.dtype.char],chunkSize=buffersize)
# Don't bother copying in steps if all our data fits inside the bufferChunk
if ncvar.shape == () or np.all(bufferChunk >= dimlim):
var[:] = ncvar[:]
else:
# Make sure our chunk size is no larger than the dimension in that direction
for ind, chunk in enumerate(bufferChunk):
if chunk > dimlim[ind]: bufferChunk[ind] = dimlim[ind]
if verbose: sys.stdout.write('Buffer chunk : %s\n' % str(bufferChunk))
# bufferSteps is the number of copies of bufferChunk that fit along each axis
bufferSteps = (dimlim-1)//bufferChunk + 1
# Make an iterator out of all possible combinations of the bufferOffsets, which
# are just steps along each dimension
for index in np.ndindex(*bufferSteps):
index *= bufferChunk
slices = []
# Make up slices of size bufferChunk
for start, step, end in zip(index, bufferChunk, dimlim):
# min checks we don't go beyond the limits of the variable
slices.append(slice(start,min(start+step,end),None))
# Copy the data
var[slices] = ncvar[slices]
ncfile_d.sync() # flush data to disk
# close files.
ncfile_o.close()
ncfile_d.close()
def parse_args(arglist):
"""
Parse arguments given as list (arglist)
"""
class DictAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
try:
k, v = values.split("=", 1)
except ValueError:
raise argparse.ArgumentError(self, "Format must be key=value")
# Implementation is from argparse._AppendAction
items = copy.copy(argparse._ensure_value(namespace, self.dest, {})) # Default mutables, use copy!
try:
items[k] = int(v)
except ValueError:
raise argparse.ArgumentError(self, "value must be an integer")
if items[k] < 0: raise argparse.ArgumentError(self, "value cannot be negative")
setattr(namespace, self.dest, items)
def positive_int(value):
ivalue = int(value)
if ivalue < 1:
raise argparse.ArgumentTypeError("%s is an invalid positive int value" % value)
return ivalue
parser = argparse.ArgumentParser(description="Make a copy of a netCDF file with automatic chunk sizing")
parser.add_argument("-d","--dlevel", help="Set deflate level. Valid values 0-9 (default=5)", type=int, default=5, choices=range(0,10), metavar='{1-9}')
parser.add_argument("-m","--mindim", help="Minimum dimension of chunk. Valid values 1-dimsize", type=positive_int, default=1)
parser.add_argument("-s","--chunksize", help="Set chunksize - total size of one chunk in KiB (default=64)", type=int, default=64)
parser.add_argument("-b","--buffersize", help="Set size of copy buffer in MiB (default=500)", type=int, default=500)
parser.add_argument("-n","--noshuffle", help="Don't shuffle on deflation (default is to shuffle)", action='store_true')
parser.add_argument("-v","--verbose", help="Verbose output", action='store_true')
parser.add_argument("-c","--classic", help="use NETCDF4_CLASSIC output instead of NETCDF4 (default true)", action='store_false')
parser.add_argument("-f","--fletcher32", help="Activate Fletcher32 checksum", action='store_true')
parser.add_argument("-va","--vars", help="Specify variables to copy (default is to copy all)", action='append')
parser.add_argument("-q","--quantize", help="Truncate data in variable to a given decimal precision, e.g. -q speed=2 -q temp=0 causes variable speed to be truncated to a precision of 0.01 and temp to a precision of 1", action=DictAction)
parser.add_argument("-o","--overwrite", help="Write output file even if already it exists (default is to not overwrite)", action='store_true')
parser.add_argument("-i","--ignoreformat", help="Ignore warnings about netCDF4 formatted file: BE CAREFUL! (default false)", action='store_true')
parser.add_argument("origin", help="netCDF file to be compressed")
parser.add_argument("destination", help="netCDF output file")
return parser.parse_args(arglist)
def main(args):
zlib=False
if args.dlevel > 0: zlib=True
verbose = args.verbose
# copy the data from origin to destination
nc2nc(args.origin, args.destination, zlib=zlib, complevel=args.dlevel, shuffle=not args.noshuffle,
fletcher32=args.fletcher32, clobber=args.overwrite, lsd_dict=args.quantize,
verbose=verbose, vars=args.vars, classic=args.classic, chunksize=args.chunksize, buffersize=args.buffersize, ignoreformat=args.ignoreformat)
def main_parse_args(arglist):
"""
Call main with list of arguments. Callable from tests
"""
# Must return so that check command return value is passed back to calling routine
# otherwise py.test will fail
return main(parse_args(arglist))
def main_argv():
"""
Call main and pass command line arguments. This is required for setup.py entry_points
"""
main_parse_args(sys.argv[1:])
if __name__ == "__main__":
main_argv()
|
python
|
#!/usr/bin/python3
__author__ = 'yangdd'
'''
example 034
'''
def hello_world():
print('Hello World')
def three_hello():
for i in range(3):
hello_world()
if __name__ == '__main__':
three_hello()
|
python
|
from xml.etree import ElementTree
import csocha
from . import board, moves
class GameState:
def __init__(self, c: str, t: int, b: board.Board, undep: list):
self.color = c
self.opponent = "BLUE" if c == "RED" else "RED"
self.turn = t
self.board = b
self.undeployed = undep
def is_connected(self, fields: set) -> bool:
visited = [fields.pop()]
while len(visited) > 0:
neighbours = fields.intersection(csocha.neighbours(visited.pop(0)))
fields.difference_update(neighbours)
visited.extend(neighbours)
return len(fields) == 0
def can_be_disconnected(self, piece: set) -> bool:
nonempty = self.board.nonempty()
if len(nonempty) == 1:
return True
neighbours = csocha.neighbours(piece)
length = len(nonempty.intersection(neighbours))
if length < 2 or length > 5:
return True
return self.is_connected(set(nonempty).difference({piece}))
def get_possible_moves(self) -> set:
# Get possible set moves
possible_moves = self.get_possible_set_moves()
# Add possible drag moves
possible_moves.update(self.get_possible_drag_moves())
# If no move is possible, add skip move
if len(possible_moves) == 0:
possible_moves.add(moves.SkipMove())
# Return possible moves
return possible_moves
def get_possible_set_moves(self) -> set:
# First turn
if self.turn == 0:
# All empty fields are possible
dests = self.board.empty()
# Second turn
elif self.turn == 1:
# Get first set piece
field = self.board.color(self.opponent).__iter__().__next__()
# Get empty fields next to first piece
dests = self.board.empty().intersection(csocha.neighbours(field))
# All other turns
else:
# Get own pieces
dests = self.board.color(self.color)
# Get neighbours of own pieces
dests = {y for x in dests for y in csocha.neighbours(x)}
# Only empty fields
dests.intersection_update(self.board.empty())
# Get opponent pieces
opponent = self.board.color(self.opponent)
# Get neighbours of opponent pieces
opponent = {y for x in opponent for y in csocha.neighbours(x)}
# Only fields not next to opponent pieces
dests = dests.difference(opponent)
# If bee isn't set until fith turn player has to set bee
if (self.turn > 5 and (self.color, "BEE") in self.undeployed):
types = {"BEE"}
else:
types = {x[1] for x in self.undeployed if x[0] == self.color}
# Return all combinations of pieces and destinations
return {
moves.SetMove((self.color, y), x)
for x in dests
for y in types
}
def get_possible_drag_moves(self) -> set:
# Drag moves are only possible when bee is set
if (self.color, "BEE") in self.undeployed:
return set()
possible_moves = set()
# Loop through all set pieces
for position in self.board.color(self.color):
# When there is no piece under piece
if len(self.board.fields[position]) == 1:
if not self.can_be_disconnected(position):
continue
else:
# Piece is stacked therefore has to be a beetle
dests = self.get_beetle_move_dests(position)
# Call function to get piece type specific destinations
if self.board.fields[position][-1][1] == "BEETLE":
dests = self.get_beetle_move_dests(position)
elif self.board.fields[position][-1][1] == "BEE":
dests = self.get_bee_move_dests(position, position)
elif self.board.fields[position][-1][1] == "SPIDER":
dests = self.get_spider_move_dests(position)
elif self.board.fields[position][-1][1] == "ANT":
dests = self.get_ant_move_dests(position)
elif self.board.fields[position][-1][1] == "GRASSHOPPER":
dests = self.get_grasshopper_move_dests(position)
else:
continue
# Add all destinations to possible_moves
possible_moves.update(moves.DragMove(position, x) for x in dests)
# Return possible moves
return possible_moves
def get_beetle_move_dests(self, pos: tuple) -> set:
# Get neighbours of pos
all_neighbours = csocha.neighbours(pos)
# Only take fields with pieces
neighbours = set(self.board.nonempty().intersection(all_neighbours))
# If we are on top of another piece add it aswell
if len(self.board.fields[pos]) > 1:
neighbours.add(pos)
# Get fields next to fields
dests = {y for x in neighbours for y in csocha.neighbours(x)}
# Only take fields in reach
dests.intersection_update(all_neighbours)
# Only take valid fields
dests.intersection_update(self.board.fields.keys())
# Return fields
return dests
def get_bee_move_dests(self, pos: tuple, start_pos: tuple) -> set:
# Get neighbours of pos
all_neighbours = csocha.neighbours(pos)
# Only take fields with pieces
neighbours = set(self.board.nonempty().intersection(all_neighbours))
# Remove own field
neighbours.discard(start_pos)
# Get fields next to fields
dests = set()
for neighbour in neighbours:
dests = dests.symmetric_difference(csocha.neighbours(neighbour))
# Get obstructed fields
obstructed = self.board.obstructed.copy()
# Only take obstructed fields in reach
obstructed.intersection_update(all_neighbours)
# Get fields next to obscructed fields
obstructed = (y for x in obstructed for y in csocha.neighbours(x))
# Remove fields next to obstructed
dests = dests.difference(obstructed)
# Only take fields in reach
dests.intersection_update(all_neighbours)
# Only take empty fields
dests.intersection_update(self.board.empty())
# Return fields
return dests
def get_spider_move_dests(self, pos: tuple) -> set:
dests = {pos}
all_dests = dests.copy()
for _ in range(3):
dests = {
y
for x in dests
for y in self.get_bee_move_dests(x, pos)
}.difference(all_dests)
all_dests.update(dests)
return dests
def get_ant_move_dests(self, pos: tuple) -> set:
found = set()
todo = {pos}
while len(todo) > 0:
dest = todo.pop()
found.add(dest)
dests = self.get_bee_move_dests(dest, pos).difference(found)
todo.update(dests)
found.discard(pos)
return found
def get_grasshopper_move_dests(self, pos: tuple) -> set:
dests = set()
for direction in [(1, 0), (1, -1), (0, -1), (-1, 0), (-1, 1), (0, 1)]:
dest = (pos[0] + direction[0], pos[1] + direction[1])
if dest in self.board.empty():
continue
while dest in self.board.nonempty():
dest = (dest[0] + direction[0], dest[1] + direction[1])
dests.add(dest)
dests.intersection_update(self.board.empty())
return dests
def get_bee(self, color: str) -> tuple:
# Loop through all fields
for position, pieces in self.board.fields.items():
# If bee is on this field return it
if len(pieces) > 0 and pieces[0] == (color, "BEE"):
return position
# Bee is not set jet, return none
return None
def game_ended(self):
# Game can only end if color is blue
if self.color != "RED":
return False
# Get empty fields for use later
empty = self.board.empty()
# Get own bee
ownbee = self.get_bee(self.color)
# If own bee is set
if ownbee is not None:
# If own bee has been surrounded, game has ended
if len(set(csocha.neighbours(ownbee)).difference(empty)) == 6:
return True
# Get opponent bee
oppbee = self.get_bee(self.opponent)
# If opponent bee is set
if oppbee is not None:
# If opponent bee has been surrounded, game has ended
if len(set(csocha.neighbours(oppbee)).difference(empty)) == 6:
return True
# If turn limit is reach, game has ended
return self.turn >= 60
def hash(self, depth: int) -> bytes:
if self.turn > 7 and self.turn < 60 - depth:
return csocha.hash(self.board.fields) + str(self.color).encode()
return csocha.hash(self.board.fields) + str(self.turn).encode()
def parse(xml: ElementTree.Element) -> GameState:
color = xml.get("currentPlayerColor")
turn = int(xml.get("turn"))
_board = board.parse(xml.find("board"))
undeployed = []
for piece in xml.findall("*/piece"):
undeployed.append((piece.get("owner"), piece.get("type")))
return GameState(color, turn, _board, undeployed)
|
python
|
from fastapi import FastAPI
TAREFAS = [
{"id": 1, "titulo": "Cristiano"},
{"id": 2, "titulo": "Araujo"}
]
app = FastAPI()
@app.get('/tarefas')
def listar():
return TAREFAS
|
python
|
import string
from model_mommy import mommy
from datetime import datetime
from django_rq import job
from django.contrib.auth.models import User
from django.utils import timezone
from dateutil.parser import parse as extract_date
from django.conf import settings
from survey.models import *
from survey.utils.decorators import static_var
from survey.tests.base_test import BaseTest
from survey.forms.answer import SurveyAllocationForm, AddMoreLoopForm
from .survey_base_test import SurveyBaseTest
class InterviewsTest(SurveyBaseTest):
def test_get_answer_with_question_not_yet_answered(self):
self._create_ussd_non_group_questions()
num_question = Question.objects.filter(answer_type=NumericalAnswer.choice_name()).last()
self.assertEquals(self.interview.get_answer(num_question), '')
def test_save_answers_with_interview_id(self):
self._create_ussd_non_group_questions(self.qset)
answers = []
n_quest = Question.objects.get(answer_type=NumericalAnswer.choice_name())
t_quest = Question.objects.get(answer_type=TextAnswer.choice_name())
m_quest = Question.objects.get(answer_type=MultiChoiceAnswer.choice_name())
answers = [{n_quest.id: 1, t_quest.id: 'Hey Man', m_quest.id: 'Y'},
{n_quest.id: 5, t_quest.id: 'Hey Boy', m_quest.id: 'Y'},
{n_quest.id: 15, t_quest.id: 'Hey Girl!', m_quest.id: 'N'},
{n_quest.id: 15, t_quest.id: 'Hey Part!'}
]
question_map = {n_quest.id: n_quest, t_quest.id: t_quest, m_quest.id: m_quest}
Interview.save_answers(self.qset, self.survey, self.ea,
self.access_channel, question_map, answers, reference_interview=self.interview.id)
self.assertEquals(NumericalAnswer.objects.count(), 4)
self.assertEquals(TextAnswer.objects.count(), 4)
self.assertEquals(MultiChoiceAnswer.objects.count(), 3)
self.assertEquals(TextAnswer.objects.first().to_text().lower(), 'Hey Man'.lower())
self.assertEquals(MultiChoiceAnswer.objects.first().as_text.lower(), 'Y'.lower())
self.assertEquals(MultiChoiceAnswer.objects.first().as_value, str(QuestionOption.objects.get(text='Y').order))
self.assertEquals(Interview.objects.last().interview_reference, self.interview)
def test_interview_with_survey_parameters(self):
# self._create_ussd_group_questions()
pass
|
python
|
from scraper.web_scraper import WebScraper
from loguru import logger
uri = 'https://www.investing.com/technical/technical-summary'
class SummaryTableScraper(WebScraper):
def __init__(self, uri, class_name):
super(SummaryTableScraper, self).__init__()
self.goto(uri)
self.n_table_pairs = 12
self.table_class_name = class_name
self.technical_summary = self.__get_technical_summary()
def __get_technical_summary(self):
return self.find('.'+self.table_class_name, first=True).text.split('\n')[6:]
def get_pairs_info(self):
"""
returns pairs data with keys as cur pairs and
values as dicts with keys - ratio ... summary
"""
summary_list = self.__get_technical_summary()
pairs_data = {}
tot_pairs = len(summary_list)//self.n_table_pairs
for i in range(0, len(summary_list), tot_pairs):
pairs_data[summary_list[i]] = {
'Pair' : summary_list[i],
'Ratio' : summary_list[i+1],
'MovingAvg' : summary_list[i+3:i+7],
'Indicators' : summary_list[i+8:i+12],
'Summary' : summary_list[i+13:i+17],
}
return pairs_data
def proc_pair_info(pair_info):
"""
return true if all are either `Strong Buy` OR `Strong Sell`
"""
if (len(set(pair_info['Summary'])) == 1) and (pair_info['Summary'][0][:6] == 'Strong'):
logger.debug(f"[TRUE ] scored .. {pair_info['Pair']} : {pair_info['Summary']}")
return True
logger.debug(f"[FALSE] scored .. {pair_info['Pair']} : {pair_info['Summary']}")
return False
class PairScores:
"""
Simple scores based on frequency [0,100]
"""
def __init__(self):
self.scores = {}
for pair in ['EUR/USD', 'GBP/USD', 'USD/JPY', 'USD/CHF', 'AUD/USD', 'EUR/GBP', 'USD/CAD', 'NZD/USD', 'EUR/JPY', 'EUR/CHF', 'GBP/JPY', 'GBP/CHF']:
self.scores[pair] = 0
def increment(self, cur_strong_pair):
self.scores[cur_strong_pair] = min(100, self.scores[cur_strong_pair]+1)
def decrement(self, cur_weak_pair):
self.scores[cur_weak_pair] = max(0, self.scores[cur_weak_pair]-1)
import os, time
from pprint import pprint
if __name__ == '__main__':
scraper = SummaryTableScraper(uri=uri, class_name='technicalSummaryTbl')
data = scraper.get_pairs_info()
pair_scores = PairScores()
while True:
in_ = input('\nenter pair : ')
scraper.goto(uri)
data = scraper.get_pairs_info()
for _, pair_info in data.items():
if pair_info['Pair'] == in_:
pprint(pair_info['Summary'])
'''
scraper = SummaryTableScraper(uri=uri, class_name='technicalSummaryTbl')
data = scraper.get_pairs_info()
pair_scores = PairScores()
while True:
scraper.goto(uri)
data = scraper.get_pairs_info()
for _, pair_info in data.items():
strong = proc_pair_info(pair_info)
if strong:
pair_scores.increment(pair_info['Pair'])
elif not strong:
pair_scores.decrement(pair_info['Pair'])
print(pair_scores.scores)
print("="*200)
time.sleep(3)
'''
|
python
|
import Bdecode as BD
import pprint
class Torrent:
def __init__(self, filename):
decoder = BD.Bdecode(filename)
self.torrentData = decoder.decode()
# self.meta_info = bencoding.Decoder(meta_info).decode()
def __str__(self):
whatever = pprint.pprint(self.torrentData.items())
# announce = self.torrentData[b'meta_info'][b'announce'].decode('utf-8')
return whatever
if __name__ == "__main__":
torrent = Torrent("test.torrent")
print(torrent)
|
python
|
import collections
from collections import OrderedDict
class ValidationError(Exception):
def __init__(self, errors):
self.errors = ValidationError.normalise(errors)
@staticmethod
def normalise(errors):
if isinstance(errors, dict):
new_errors = OrderedDict()
for k, v in errors.items():
if isinstance(v, (dict, list)):
v = ValidationError.normalise(v)
else:
v = [v]
new_errors[k] = v
elif isinstance(errors, list):
new_errors = []
for x in errors:
if isinstance(x, (dict, list)):
x = ValidationError.normalise(x)
new_errors.append(x)
else:
new_errors = [errors]
return new_errors
@staticmethod
def _first(errors):
r = None
if isinstance(errors, list):
for x in errors:
r = ValidationError._first(x)
if r is not None:
break
elif isinstance(errors, dict):
for k, v in errors.items():
r = ValidationError._first(v)
if r is not None:
if r[0] is None:
path = (k,)
else:
path = (k,) + r[0]
r = (path, r[1])
break
else:
r = (None, errors)
return r
def first(self):
return ValidationError._first(self.errors)
@staticmethod
def _flatten(errors, path=None):
flattened_errors = []
if path is None:
path = tuple()
for field_name, field_errors in errors.items():
field_path = path + (field_name,)
if isinstance(field_errors, collections.Mapping):
flattened_field_errors = ValidationError._flatten(field_errors, path=field_path)
flattened_errors.extend(flattened_field_errors)
else:
for field_error in field_errors:
flattened_errors.append((field_path, field_error))
return flattened_errors
def flatten(self):
return ValidationError._flatten(self.errors)
def __str__(self):
return str(self.errors)
class SkipField(Exception):
pass
|
python
|
from school.models.class_model import Class
from account.models.instructor_model import InstructorProfile
from country.models.country_model import Country
from country.models.city_model import City
from school.models.school_model import School
from django.urls.base import reverse
from rest_framework.test import APITestCase
from django.contrib.auth import get_user_model
User = get_user_model()
class StudentListTests(APITestCase):
url_list = reverse("school:student_list")
url_add = reverse("school:add_student_list_item")
url_list_by_instructor = reverse("school:student_list_by_class_instructor")
url_login = reverse("token_obtain_pair")
def setUp(self) -> None:
self.country = Country.objects.create(name = "Türkiye", code = "Tur")
self.city = City.objects.create(country = self.country, name = "Konya", code = "42")
self.school = School.objects.create(city = self.city, name = "Example School", address = "Example Address", website = "Example website")
self.password = "testpass123"
self.normal_user = User.objects.create_user(username = "normaldoe", password = self.password, user_type = 1)
self.instructor_user1 = User.objects.create_user(
username="johndoe",
password=self.password,
user_type=4,
email = "[email protected]",
identity_number = "12345678910",
)
instructor_profile = InstructorProfile.objects.get(user = self.instructor_user1)
instructor_profile.school = self.school
instructor_profile.save()
self.instructor_user2 = User.objects.create_user(
username="johndoe2",
password=self.password,
user_type=4,
email = "[email protected]",
identity_number = "12345678911",
)
instructor_profile2 = InstructorProfile.objects.get(user = self.instructor_user2)
instructor_profile2.school = self.school
instructor_profile2.save()
self.school_class = Class.objects.create(school = self.school, instructor = self.instructor_user1.user_instructor, name = "Class A", grade = 1)
self.child = User.objects.create_user(
username = "johndoechild",
password = self.password,
email = "[email protected]",
identity_number = "12345678912",
user_type = 2
)
self.url_destroy = reverse("school:student_list_item_destroy", kwargs={"class_id": self.school_class.id, "child_id": self.child.id})
def login_with_token(self, login_data):
"""
A method for using login process.
"""
response = self.client.post(self.url_login, login_data)
self.assertEqual(200, response.status_code)
token = response.data["access"]
self.client.credentials(HTTP_AUTHORIZATION='Bearer ' + token)
def test_student_list(self):
"""
Tests that school list page returns a status code of 200.
"""
response = self.client.get(self.url_list)
self.assertEqual(200, response.status_code)
def test_add_item_is_authenticated(self):
"""
Tests whether the user is authenticated.
"""
response = self.client.get(self.url_add)
self.assertEqual(401, response.status_code)
def test_add_item_is_instructor(self):
"""
Tests whether the user is instructor.
"""
data = {
"username": self.normal_user.username,
"password": self.password
}
self.login_with_token(data)
response = self.client.get(self.url_add)
self.assertEqual(403, response.status_code)
def test_add_list_item_is_own_class(self):
"""
Child-class relation record add page test.
"""
login_data = {
"username": self.instructor_user2.username,
"password": self.password
}
self.login_with_token(login_data)
data = {"school_class": self.school_class.id, "child": self.child.id}
response = self.client.post(self.url_add, data)
self.assertEqual(403, response.status_code)
def test_add_list_item(self):
"""
Child-class relation record add page test.
"""
login_data = {
"username": self.instructor_user1.username,
"password": self.password
}
self.login_with_token(login_data)
data = {"school_class": self.school_class.id, "child": self.child.id}
response = self.client.post(self.url_add, data)
self.assertEqual(201, response.status_code)
def test_destroy_student_list_item_is_authenticated(self):
"""
Tests whether the user is authenticated, and if not, the user cannot access the "student list item destroy" page.
"""
response = self.client.get(self.url_destroy)
assert 401 == response.status_code
def test_destroy_student_list_item_is_instructor(self):
"""
Tests whether the user is instructor.
"""
login_data = {
"username": self.normal_user.username,
"password": self.password
}
self.login_with_token(login_data)
response = self.client.get(self.url_add)
assert 403 == response.status_code
def test_student_list_item_destroy_is_own_class(self):
"""
This test has to return a 404 result. This is because we used get_object_or_404 in Destroy View. Test user hasn't got any student so this test will return 404.
"""
login_data = {
"username": self.instructor_user2.username,
"password": self.password
}
self.login_with_token(login_data)
response = self.client.delete(self.url_destroy)
self.assertEqual(404, response.status_code)
def test_student_list_item_destroy(self):
"""
Tests whether the user can delete class-child relation properly.
"""
login_data = {
"username": self.instructor_user1.username,
"password": self.password
}
self.login_with_token(login_data)
self.test_add_list_item()
response = self.client.delete(self.url_destroy)
self.assertEqual(204, response.status_code)
|
python
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Persistent identifier minters."""
from __future__ import absolute_import, print_function
from flask import current_app
from .providers.recordid import RecordIdProvider
from .providers.recordid_v2 import RecordIdProviderV2
def recid_minter_v2(record_uuid, data):
"""Mint record identifiers with RecordIDProviderV2.
This minter is recommended to be used when creating records to get
PersistentIdentifier with ``object_type='rec'`` and the new random
alphanumeric `pid_value`.
Raises ``AssertionError`` if a ``PIDSTORE_RECID_FIELD`` entry is already in
``data``. The minted ``pid_value`` will be stored in that field.
:param record_uuid: The object UUID of the record.
:param data: The record metadata.
:returns: A fresh `invenio_pidstore.models.PersistentIdentifier` instance.
"""
pid_field = current_app.config['PIDSTORE_RECID_FIELD']
assert pid_field not in data
provider = RecordIdProviderV2.create(
object_type='rec', object_uuid=record_uuid)
data[pid_field] = provider.pid.pid_value
return provider.pid
def recid_minter(record_uuid, data):
"""Mint record identifiers.
This is a minter specific for records.
With the help of
:class:`invenio_pidstore.providers.recordid.RecordIdProvider`, it creates
the PID instance with `rec` as predefined `object_type`.
Procedure followed: (we will use `control_number` as value of
`PIDSTORE_RECID_FIELD` for the simplicity of the documentation.)
#. If a `control_number` field is already there, a `AssertionError`
exception is raised.
#. The provider is initialized with the help of
:class:`invenio_pidstore.providers.recordid.RecordIdProvider`.
It's called with default value 'rec' for `object_type` and `record_uuid`
variable for `object_uuid`.
#. The new `id_value` is stored inside `data` as `control_number` field.
:param record_uuid: The record UUID.
:param data: The record metadata.
:returns: A fresh `invenio_pidstore.models.PersistentIdentifier` instance.
"""
pid_field = current_app.config['PIDSTORE_RECID_FIELD']
assert pid_field not in data
provider = RecordIdProvider.create(
object_type='rec', object_uuid=record_uuid)
data[pid_field] = provider.pid.pid_value
return provider.pid
|
python
|
from threading import local
from django.test import TestCase
from django.test import override_settings
from cid.locals import generate_new_cid
from cid.locals import get_cid
from cid.locals import set_cid
_thread_locals = local()
class TestCidStorage(TestCase):
def setUp(self):
self.clear_cid()
self.cid = 'test-cid'
def tearDown(self):
self.clear_cid()
def clear_cid(self):
try:
delattr(_thread_locals, 'CID')
except AttributeError:
pass
def test_get_empty_cid(self):
self.assertIsNone(get_cid())
def test_set_cid(self):
self.assertIsNone(get_cid())
set_cid(self.cid)
self.assertEqual(self.cid, get_cid())
@override_settings(CID_GENERATE=True, CID_GENERATOR=lambda: 'constant_correlation')
def test_custom_generator(self):
assert generate_new_cid() == 'constant_correlation'
|
python
|
'''Tools for interaction with IDF build system'''
def build_name(name):
name_parts = name.split('/')
return '__'.join(name_parts)
|
python
|
from torch.utils.data import Dataset, DataLoader
from albumentations import (ShiftScaleRotate, Compose, CoarseDropout, RandomCrop, HorizontalFlip, OneOf, ElasticTransform,
OpticalDistortion, RandomGamma, Resize, GaussNoise, VerticalFlip, RandomBrightnessContrast)
import cv2
import os
import torch
import pickle
import matplotlib.pyplot as plt
import numpy as np
from skimage import io
import random
from utils import remove_small_areas, keep_large_area, fit_Ellipse, crop_mask_expand, roi_extend
class Dataset_train(Dataset):
def __init__(self, data_root='data', size=(512, 512), fold=0):
self.root = data_root
file = open(os.path.join(data_root, 'train_val_split_200803.pkl'), 'rb')
pkl_data = pickle.load(file)
if fold == -1:
self.train_name_list = pkl_data[0][0]
self.train_name_list.append(pkl_data[0][1])
else:
self.train_name_list = pkl_data[fold][0]
self.len = len(self.train_name_list)
self.transforms = Compose([Resize(size[0], size[0]),
ShiftScaleRotate(shift_limit=0.1, scale_limit=0.2, rotate_limit=30, p=0.7,
border_mode=cv2.BORDER_CONSTANT, value=0),
VerticalFlip(p=0.5),
OneOf([ElasticTransform(p=1, alpha=50, sigma=30, alpha_affine=30,
border_mode=cv2.BORDER_CONSTANT, value=0),
OpticalDistortion(p=1, distort_limit=0.5, shift_limit=0.1,
border_mode=cv2.BORDER_CONSTANT, value=0)], p=0.5),
RandomGamma(gamma_limit=(80, 120), p=0.5),
GaussNoise(var_limit=(10, 100), mean=0, p=0.5),
RandomBrightnessContrast(brightness_limit=0.2, contrast_limit=0.2, p=0.5),
CoarseDropout(max_holes=2, max_height=256, max_width=256, min_holes=1, min_height=5,
min_width=5, fill_value=0, p=0.5)
])
self.pseudo_mask_transformation = Compose([ShiftScaleRotate(shift_limit=0.1, scale_limit=0.1, rotate_limit=180, p=0.7,
border_mode=cv2.BORDER_CONSTANT, value=0, interpolation=cv2.INTER_NEAREST),
OneOf([ElasticTransform(p=1, alpha=50, sigma=30, alpha_affine=30, border_mode=cv2.BORDER_CONSTANT,
value=0, interpolation=cv2.INTER_NEAREST),
OpticalDistortion(p=1, distort_limit=0.5, shift_limit=0.1, border_mode=cv2.BORDER_CONSTANT,
value=0, interpolation=cv2.INTER_NEAREST)], p=0.5)])
def __getitem__(self, idx):
name = self.train_name_list[idx]
if random.randint(0, 1) == 1:
img = io.imread(os.path.join(self.root, 'img_match_challenge_val', name))
else:
img = io.imread(os.path.join(self.root, 'img', name))
Disc_Cup_mask = cv2.imread(os.path.join(self.root, 'mask', name.rstrip('.jpg') + '.png'), 0) # [0, 100, 200]
cate = int(name.strip('.jpg').split('_')[-1])
x1_new, x2_new, y1_new, y2_new = crop_mask_expand(Disc_Cup_mask, expand_Percentage=0.2)
Disc_Cup_mask_ROI = Disc_Cup_mask[x1_new: x2_new, y1_new: y2_new]
Disc_Cup_mask_ROI = self.pseudo_mask_transformation(image=Disc_Cup_mask_ROI)['image']
Disc_Cup_mask[x1_new: x2_new, y1_new: y2_new] = Disc_Cup_mask_ROI
augmented = self.transforms(image=img, mask=Disc_Cup_mask)
img, Disc_Cup_mask = augmented['image'], augmented['mask']
img = torch.from_numpy(img).float().permute(2, 0, 1) / 255
Disc_mask = (Disc_Cup_mask > 0).astype(np.uint8)
Cup_mask = (Disc_Cup_mask == 200).astype(np.uint8)
Disc_mask = torch.from_numpy(Disc_mask).unsqueeze(0).float()
Cup_mask = torch.from_numpy(Cup_mask).unsqueeze(0).float()
img = torch.cat((img, Disc_mask, Cup_mask), dim=0)
cate = torch.tensor(cate)
return img, cate
def __len__(self):
return self.len
class Dataset_val(Dataset):
def __init__(self, data_root='data', size=(512, 512), fold=0):
self.root = data_root
file = open(os.path.join(data_root, 'train_val_split_200803.pkl'), 'rb')
pkl_data = pickle.load(file)
self.val_name_list = pkl_data[fold][1]
self.len = len(self.val_name_list)
self.transforms = Compose([Resize(size[0], size[1])])
self.pseudo_mask_transformation = Compose(
[ShiftScaleRotate(shift_limit=0.1, scale_limit=0.1, rotate_limit=180, p=0.7,
border_mode=cv2.BORDER_CONSTANT, value=0, interpolation=cv2.INTER_NEAREST),
OneOf([ElasticTransform(p=1, alpha=50, sigma=30, alpha_affine=30, border_mode=cv2.BORDER_CONSTANT,
value=0, interpolation=cv2.INTER_NEAREST),
OpticalDistortion(p=1, distort_limit=0.5, shift_limit=0.1, border_mode=cv2.BORDER_CONSTANT,
value=0, interpolation=cv2.INTER_NEAREST)], p=0.5)])
def __getitem__(self, idx):
name = self.val_name_list[idx]
if random.randint(0, 1) == 1:
img = io.imread(os.path.join(self.root, 'img_match_challenge_val', name))
else:
img = io.imread(os.path.join(self.root, 'img', name))
Disc_Cup_mask = cv2.imread(os.path.join(self.root, 'mask', name.rstrip('.jpg') + '.png'), 0) # [0, 100, 200]
cate = int(name.strip('.jpg').split('_')[-1])
x1_new, x2_new, y1_new, y2_new = crop_mask_expand(Disc_Cup_mask, expand_Percentage=0.2)
Disc_Cup_mask_ROI = Disc_Cup_mask[x1_new: x2_new, y1_new: y2_new]
Disc_Cup_mask_ROI = self.pseudo_mask_transformation(image=Disc_Cup_mask_ROI)['image']
Disc_Cup_mask[x1_new: x2_new, y1_new: y2_new] = Disc_Cup_mask_ROI
augmented = self.transforms(image=img, mask=Disc_Cup_mask)
img, Disc_Cup_mask = augmented['image'], augmented['mask']
img = torch.from_numpy(img).float().permute(2, 0, 1) / 255
Disc_mask = (Disc_Cup_mask > 0).astype(np.uint8)
Cup_mask = (Disc_Cup_mask == 200).astype(np.uint8)
Disc_mask = torch.from_numpy(Disc_mask).unsqueeze(0).float()
Cup_mask = torch.from_numpy(Cup_mask).unsqueeze(0).float()
img = torch.cat((img, Disc_mask, Cup_mask), dim=0)
cate = torch.tensor(cate)
return img, cate
def __len__(self):
return self.len
if __name__ == '__main__':
train_data = Dataset_val(data_root='data', size=(512, 512), fold=0)
train_dataloader = DataLoader(dataset=train_data, batch_size=1, shuffle=True)
for i, (inputs, cate) in enumerate(train_dataloader):
print(cate, cate.size())
img = inputs[:, :3, :, :].squeeze(0).permute(1, 2, 0).numpy()
Disc = inputs[:, 3, :, :].squeeze(0).numpy()
Cup = inputs[:, 4, :, :].squeeze(0).numpy()
plt.subplot(131)
plt.imshow(img)
plt.subplot(132)
plt.imshow(Disc)
plt.subplot(133)
plt.imshow(Cup)
plt.show()
|
python
|
#!/usr/bin/env python
from subprocess import check_call
import sys
import os
import traceback
def safe_remove(f):
try:
if os.path.exists(f):
os.remove(f)
except:
traceback.print_exc()
pass
def tsprint(msg):
sys.stderr.write(msg)
sys.stderr.write("\n")
if __name__ == "__main__":
tsprint("WARNING: The s3cp.py script is deprecated. Use 's3mi cp' or 's3mi cat' instead.")
if sys.argv[2] == "-":
check_call(["s3mi", "cat", sys.argv[1]])
else:
safe_remove(sys.argv[2])
with open(sys.argv[2], "ab") as dest:
check_call(["s3mi", "cat", sys.argv[1]], stdout=dest)
|
python
|
from .__init__ import *
def gen_func(maxRadius=100, format='string'):
r = random.randint(1, maxRadius)
ans = round((2 * math.pi / 3) * r**3, 3)
if format == 'string':
problem = f"Volume of hemisphere with radius {r} m = "
solution = f"{ans} m^3"
return problem, solution
elif format == 'latex':
return "Latex unavailable"
else:
return r, ans
volume_sphere = Generator("Volume of Hemisphere", 117, gen_func,
["maxRadius=100"])
|
python
|
# -*- coding:utf8 -*-
'''
export data
'''
import logging
import xlwt
from LMDI import Lmdi
from SinglePeriodAAM import Spaam
from MultiPeriodAAM import Mpaam
class WriteLmdiData(object):
'''
write data using with surrounding
'''
def __init__(self, xls_file_name, *lmdis):
'''
construction
Args:
xls_file_name: to save excel file name
lmdis: the total lmdis to write
'''
self._xls_file_name = xls_file_name
self._lmdis = lmdis
def __enter__(self):
self._workbook = xlwt.Workbook(encoding='utf8')
return self
def write(self):
'''
write the excel
'''
for lmdi in self._lmdis:
if lmdi.name == '':
raise Exception(Lmdi.__name__+' should initialize by name. ')
sheet = self._workbook.add_sheet(lmdi.name)
self._write_columns_names(sheet)
self._write_column(sheet, 0, lmdi.province_names)
self._write_column(sheet, 1, lmdi.pro_t)
self._write_column(sheet, 2, lmdi.pro_t1)
self._write_column(sheet, 3, lmdi.energy_t)
self._write_column(sheet, 4, lmdi.energy_t1)
self._write_column(sheet, 5, lmdi.co2_t)
self._write_column(sheet, 6, lmdi.co2_t1)
self._write_column(sheet, 7, lmdi.lambda_t_t)
self._write_column(sheet, 8, lmdi.lambda_t_t1)
self._write_column(sheet, 9, lmdi.lambda_t1_t)
self._write_column(sheet, 10, lmdi.lambda_t1_t1)
self._write_column(sheet, 11, lmdi.theta_t_t)
self._write_column(sheet, 12, lmdi.theta_t_t1)
self._write_column(sheet, 13, lmdi.theta_t1_t)
self._write_column(sheet, 14, lmdi.theta_t1_t1)
self._write_column(sheet, 15, lmdi.emx())
self._write_column(sheet, 16, lmdi.pei())
self._write_column(sheet, 17, lmdi.pis())
self._write_column(sheet, 18, lmdi.isg())
self._write_column(sheet, 19, lmdi.eue())
self._write_column(sheet, 20, lmdi.est())
self._write_column(sheet, 21, lmdi.yoe())
self._write_column(sheet, 22, lmdi.yct())
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is None:
self._workbook.save(self._xls_file_name)
elif exc_type is Exception:
logging.error(exc_val)
else:
pass
def _write_columns_names(self, sheet):
sheet.write(0, 0, label=u'省份')
sheet.write(0, 1, label=u'T 期产出')
sheet.write(0, 2, label=u'T+1 期产出')
sheet.write(0, 3, label=u'T 期能源消耗')
sheet.write(0, 4, label=u'T+1 期能源消耗')
sheet.write(0, 5, label=u'T 期Co2排放')
sheet.write(0, 6, label=u'T+1 期Co2排放')
sheet.write(0, 7, label=u'lambda_t_t')
sheet.write(0, 8, label=u'lambda_t_t1')
sheet.write(0, 9, label=u'lambda_t1_t')
sheet.write(0, 10, label=u'lambda_t1_t1')
sheet.write(0, 11, label=u'theta_t_t')
sheet.write(0, 12, label=u'theta_t_t1')
sheet.write(0, 13, label=u'theta_t1_t')
sheet.write(0, 14, label=u'theta_t1_t1')
sheet.write(0, 15, label=u'emx')
sheet.write(0, 16, label=u'pei')
sheet.write(0, 17, label=u'pis')
sheet.write(0, 18, label=u'isg')
sheet.write(0, 19, label=u'eue')
sheet.write(0, 20, label=u'est')
sheet.write(0, 21, label=u'yoe')
sheet.write(0, 22, label=u'yct')
def _write_column(self, sheet, column, values):
'''
Args:
sheet: the sheet
column: the column to WriteData
values: the values to write
'''
try:
row = 1
for value in values:
sheet.write(row, column, label=value)
row += 1
except TypeError:
logging.error('the type error in '+str(column)+ ' column')
raise
def WriteLmdi(object):
'''
输出lmdi结果, 按照word的格式
'''
def __init__(self, xls_file_name, *lmdis):
self._xls_file_name = xls_file_name
self._lmdis = lmdis
def __enter__(self):
self._workbook = xlwt.Workbook(encoding='utf8')
return self
def write(self):
pass
def _write_base_previous(self, sheet):
columns = ['periods', '']
def __exit__(self, exc_type, exc_val, ext_tb):
if exc_type is None:
self._workbook.save(self._xls_file_name)
elif exc_type is Exception:
logging.error(exc_val)
raise Exception
else:
pass
class WriteSpaamData(object):
'''
write the spaam data
'''
def __init__(self, xls_file_name, *spaams):
'''
construction
Args:
xls_file_name: to save excel file name
spaams: the total spaam to write
'''
self._xls_file_name = xls_file_name
self._spaams = spaams
def __enter__(self):
self._workbook = xlwt.Workbook(encoding='utf8')
return self
def write(self):
'''
write value
'''
for spaam in self._spaams:
if spaam.name == '':
raise Exception(Spaam.__name__ + ' should be initialized by name')
sheet = self._workbook.add_sheet(spaam.name)
self._write_columns_names(sheet)
self._write_column(sheet, 0, spaam.province_names)
self._write_column(sheet, 1, spaam.emx_attributions)
self._write_column(sheet, 2, spaam.pei_attributions)
self._write_column(sheet, 3, spaam.pis_attributions)
self._write_column(sheet, 4, spaam.isg_attributions)
self._write_column(sheet, 5, spaam.eue_attributions)
self._write_column(sheet, 6, spaam.est_attributions)
self._write_column(sheet, 7, spaam.yoe_attributions)
self._write_column(sheet, 8, spaam.yct_attributions)
def _write_columns_names(self, sheet):
sheet.write(0, 0, label=u'省份')
sheet.write(0, 1, label=u'emx')
sheet.write(0, 2, label=u'pei')
sheet.write(0, 3, label=u'pis')
sheet.write(0, 4, label=u'isg')
sheet.write(0, 5, label=u'eue')
sheet.write(0, 6, label=u'est')
sheet.write(0, 7, label=u'yoe')
sheet.write(0, 8, label=u'yct')
def _write_column(self, sheet, column, values):
'''
write values to a perticular column
'''
row = 1
for value in values:
if column != 0:
value *= 100.0
sheet.write(row, column, label=value)
row += 1
def __exit__(self, exc_type, exc_val, ect_tb):
if exc_type is None:
self._workbook.save(self._xls_file_name)
elif exc_type is Exception:
logging.error(exc_val)
else:
pass
class WriteMpaamData(object):
'''
write the mpaam data
'''
def __init__(self, xls_file_name, *mpaams):
'''
construction
Args:
xls_file_name: to save excel file name
mpaams: the total mpaam to write
'''
self._xls_file_name = xls_file_name
self._mpaams = mpaams
def __enter__(self):
self._workbook = xlwt.Workbook(encoding='utf8')
return self
def write(self):
'''
write value
'''
for mpaam in self._mpaams:
if mpaam.name == '':
raise Exception(Mpaam.__name__ + ' should be initialized by name')
sheet = self._workbook.add_sheet(mpaam.name)
self._write_columns_names(sheet)
self._write_column(sheet, 0, mpaam.province_names)
self._write_column(sheet, 1, mpaam.emx())
self._write_column(sheet, 2, mpaam.pei())
self._write_column(sheet, 3, mpaam.pis())
self._write_column(sheet, 4, mpaam.isg())
self._write_column(sheet, 5, mpaam.eue())
self._write_column(sheet, 6, mpaam.est())
self._write_column(sheet, 7, mpaam.yoe())
self._write_column(sheet, 8, mpaam.yct())
def _write_columns_names(self, sheet):
sheet.write(0, 0, label=u'省份')
sheet.write(0, 1, label=u'emx')
sheet.write(0, 2, label=u'pei')
sheet.write(0, 3, label=u'pis')
sheet.write(0, 4, label=u'isg')
sheet.write(0, 5, label=u'eue')
sheet.write(0, 6, label=u'est')
sheet.write(0, 7, label=u'yoe')
sheet.write(0, 8, label=u'yct')
def _write_column(self, sheet, column, values):
'''
write the values to perticualar column
'''
row = 1
for value in values:
if column != 0:
value *= 100
sheet.write(row, column, label=value)
row += 1
def __exit__(self, exc_type, exc_val, ect_tb):
if exc_type is None:
self._workbook.save(self._xls_file_name)
elif exc_type is Exception:
logging.error(exc_val)
else:
pass
|
python
|
from pathlib import PurePath
def part1(l: list[int]) -> int:
l = l.copy()
i = 0
while i < len(l):
match l[i]:
case 1:
l[l[i + 3]] = l[l[i + 1]] + l[l[i + 2]]
i += 3
case 2:
l[l[i + 3]] = l[l[i + 1]] * l[l[i + 2]]
i += 3
case 99:
break
i += 1
return l[0]
def part2(l: list[int]) -> int:
ans = 19690720
for noun in range(0, 100):
for verb in range(0, 100):
curr = l.copy()
curr[1] = noun
curr[2] = verb
try:
result = part1(curr)
except IndexError:
continue
else:
# ++noun -> result + 345600
# ++verb -> result + 1
if ans - result > 99:
break
if result == ans:
return 100 * noun + verb
return -1
def test() -> None:
assert part1([1,9,10,3,2,3,11,0,99,30,40,50]) == 3500
if __name__ == '__main__':
with open(f'./data/input-{PurePath(__file__).stem}.txt', 'r') as f:
l = list(map(int, f.read().split(',')))
print("Part 1:", part1(l))
print("Part 2:", part2(l))
|
python
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-06 21:11
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('marketgrab', '0008_remove_data_v'),
]
operations = [
migrations.AddField(
model_name='data',
name='volume',
field=models.DecimalField(decimal_places=0, default=1, max_digits=15),
preserve_default=False,
),
]
|
python
|
from django.contrib import admin
from . import models
def push_to_influxdb(modeladmin, request, queryset):
for row in queryset:
row.push_to_influxdb()
@admin.register(models.Instance)
class InstanceAdmin(admin.ModelAdmin):
list_display = [
'name',
'url',
'users',
'statuses',
'up',
'open_registrations',
'https_score',
'last_fetched',
]
search_fields = ['name']
actions = [push_to_influxdb]
|
python
|
"""https://open.kattis.com/problems/provincesandgold"""
from collections import OrderedDict
vic, tres = OrderedDict(), OrderedDict()
vic = {"Province": 8, "Duchy": 5, "Estate": 2}
tres = {"Gold": 6, "Silver": 3, "Copper": 0}
inp = list(map(int, input().split()))
money = inp[0] * 3 + inp[1] * 2 + inp[2]
options = []
for coin, cost in tres.items():
if money >= cost:
options.append(coin)
break
for prov, cost in vic.items():
if money >= cost:
options.insert(0, prov)
break
if len(options) == 2:
print(options[0], "or", options[1])
else:
print(options[0])
|
python
|
#This script is hijacked from targetscan_parsecontextscores.py. It asks which miRNAs
#are enriched for having sites in a particular sequence set. Actually, more precisely,
#it just gives the density of sites for each miRNA. Number of sites for a miRNA / total sequence
#search space.
import os
import gffutils
import argparse
from numpy import mean as mean
from numpy import median as median
def parsecontextscores(csfile, gff, featurename):
#Make dictionary of this form:
# {UTRname : [[UTRlength], [names of all miRNAs that have sites in that UTR]]}
#csfile = output of targetscan_60_context_scores.pl
#gff = gff file of regions of interest
#featurename = feature category in gff file (3rd field)
lengthdict = {}
CSdict = {}
#First need to get lengths
gff_fn = gff
db_fn = os.path.basename(gff_fn) + '.db'
if os.path.isfile(db_fn) == False: #if database doesn't exist, create it
gffutils.create_db(gff_fn, db_fn)
db = gffutils.FeatureDB(db_fn)
features = db.features_of_type(featurename)
for feature in features:
featureid = feature.id
featurelength = feature.stop - feature.start
lengthdict[featureid] = featurelength
os.remove(db_fn)
#Now get miRNA names
csfilehandle = open(csfile, 'r')
for line in csfilehandle:
line = line.strip().split('\t')
if line[0] != 'Gene ID': #skip header line
featureid = line[0].split(';')[0] #Remove Parent=...
species = line[1]
miRNAname = line[2]
if species == '10090': #this is mouse; for other species, change this number
if featureid not in CSdict:
CSdict[featureid] = [[lengthdict[featureid]], [miRNAname]]
elif featureid in CSdict:
CSdict[featureid][1].append(miRNAname)
csfilehandle.close()
return CSdict
def parseCSdict(CSdict):
#CSdict = {UTRname : [[UTRlength], [names of all miRNAs that have sites in that UTR]]}
miRNAsites = {} #{miRNA : number of sites}
miRNAdensities = {} #{miRNA : density of sites}
totalsequencelength = 0
for UTR in CSdict:
totalsequencelength += int(CSdict[UTR][0][0])
print 'The total sequence search space was {0} nt'.format(totalsequencelength)
#Count miRNA occurences
for UTR in CSdict:
miRNAs = CSdict[UTR][1]
for miRNA in miRNAs:
if miRNA not in miRNAsites:
miRNAsites[miRNA] = 1
elif miRNA in miRNAsites:
miRNAsites[miRNA] +=1
for miRNA in miRNAsites:
miRNAdensities[miRNA] = miRNAsites[miRNA] / float(totalsequencelength)
return miRNAdensities
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--csfile', type = str, help = 'Targetscan_60_context_scores.pl output.')
parser.add_argument('--gff', type = str, help = 'Gff of regions that targetscan looked through.')
parser.add_argument('--featurename', type = str, help = 'Feature category in gff file (3rd field of gff)')
parser.add_argument('--outfile', type = str, help = 'Output file.')
args = parser.parse_args()
CSdict = parsecontextscores(args.csfile, args.gff, args.featurename)
miRNAdensities = parseCSdict(CSdict)
outfh = open(args.outfile, 'w')
outfh.write('miRNA' + '\t' + 'density' + '\n')
for entry in miRNAdensities:
outfh.write(entry + '\t' + str(miRNAdensities[entry]) + '\n')
outfh.close()
|
python
|
from cansfr import *
class can11xx (object):
'''
can11xx hierarchy
----------------- canm0
{sfr /
updrpl - ams ~ ~
{i2c / |
sfr1108 cani2c - - - tsti2c ~ isp
/ / /
sfr11xx - sfr111x - sfr1110 } can11xx nvm - atm
\ \ \
sfr1112 - - - - - - tstcsp ~ csp
/ {canm0
cspnvm
'''
def __init__ (me):
me.sfr = sfr11xx() # initial
if me.is_master_rdy():
revid = me.get_revid ()
# print 'master is ready', revid
if revid > 0: # found
if sfr1108().check (revid): me.sfr = sfr1108(revid)
elif sfr1110().check (revid): me.sfr = sfr1110(revid)
elif sfr1112().check (revid): me.sfr = sfr1112(revid)
elif sfr1124().check (revid): me.sfr = sfr1124(revid)
else:
print 'un-recognized REVID: %02X' % revid
me.sfr = sfr11xx()
# else:
# print 'master is not ready'
def is_master_rdy (me): raise NotImplementedError()
def sfrwx (me, adr, wdat): raise NotImplementedError() # non-INC write
def sfrwi (me, adr, wdat): raise NotImplementedError() # INC write
def sfrrx (me, adr, cnt): raise NotImplementedError() # non-INC read
def sfrri (me, adr, cnt): raise NotImplementedError() # INC read
def get_revid (me):
sav = me.sfrrx (me.sfr.DEC, 1) # try slave
if len(sav): # data returned
me.sfrwx (me.sfr.DEC, [me.sfr.REVID])
revid = \
me.sfrrx (me.sfr.REVID, 1)[0] & 0x7f
me.sfrwx (me.sfr.DEC, [sav[0]])
return revid
return 0
class cani2c (can11xx):
def __init__ (me, busmst, deva, rpt=0):
me.deva = deva
me.busmst = busmst # SFR master (I2C)
can11xx.__init__ (me) # SFR target
if me.sfr.revid:
if rpt:
print 'I2C master finds %s, 0x%02x' % (me.sfr.name, me.deva)
if me.sfr.inc == 1: # CAN1108/11
me.sfrwx (me.sfr.I2CCTL, [me.sfrrx (me.sfr.I2CCTL,1)[0] | 0x01]) # we'll work in NINC mode
def is_master_rdy (me):
''' Is this master ready for issuing things?
'''
return TRUE if me.busmst else FALSE
def sfrwx (me, adr, wdat):
return me.busmst.write (me.deva, adr, wdat)
def sfrrx (me, adr, cnt):
return me.busmst.read (me.deva, adr, cnt, FALSE)
def sfrri (me, adr, cnt):
sav = me.sfrrx (me.sfr.I2CCTL, 1)[0]
setinc = sav & 0xfe if me.sfr.inc else sav | 0x01
me.sfrwx (me.sfr.I2CCTL, [setinc]) # INC mode
rdat = me.busmst.read (me.deva, adr, cnt)
me.sfrwx (me.sfr.I2CCTL, [sav])
return rdat
def sfrwi (me, adr, wdat):
sav = me.sfrrx (me.sfr.I2CCTL, 1)[0]
setinc = sav & 0xfe if me.sfr.inc else sav | 0x01
me.sfrwx (me.sfr.I2CCTL, [setinc]) # INC mode
ret = me.busmst.write (me.deva, adr, wdat)
me.sfrwx (me.sfr.I2CCTL, [sav])
return ret
|
python
|
from __future__ import print_function
import argparse
import pickle
import os
import time
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import numpy as np
from torchvision import datasets, transforms
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = nn.Conv2d(1, 10, kernel_size=5)
self.conv2 = nn.Conv2d(10, 20, kernel_size=5)
self.conv2_drop = nn.Dropout2d()
self.fc1 = nn.Linear(320, 50)
self.fc2 = nn.Linear(50, 10)
def forward(self, x):
x = F.relu(F.max_pool2d(self.conv1(x), 2))
x = F.relu(F.max_pool2d(self.conv2_drop(self.conv2(x)), 2))
x = x.view(-1, 320)
x = F.relu(self.fc1(x))
x = F.dropout(x, training=self.training)
x = self.fc2(x)
return F.log_softmax(x, dim=1)
def get_stat(data):
# TODO: Add num backpropped
stat = {}
stat["average"] = np.average(data)
stat["p25"] = np.percentile(data, 25)
stat["p50"] = np.percentile(data, 50)
stat["p75"] = np.percentile(data, 75)
stat["p90"] = np.percentile(data, 90)
stat["max"] = max(data)
stat["min"] = min(data)
return stat
def update_batch_stats(batch_stats, num_backpropped, pool_losses=None, chosen_losses=None, gradients=None):
'''
batch_stats = [{'chosen_losses': {stat},
'pool_losses': {stat}}]
'''
snapshot = {"chosen_losses": get_stat(chosen_losses),
"pool_losses": get_stat(pool_losses)}
batch_stats.append(snapshot)
def train(args,
model,
device,
trainloader,
optimizer,
epoch,
total_num_images_backpropped,
images_hist,
batch_stats=None):
print('\nEpoch: %d' % epoch)
model.train()
train_loss = 0
correct = 0
total = 0
losses_pool = []
data_pool = []
targets_pool = []
ids_pool = []
num_backprop = 0
loss_reduction = None
for batch_idx, (data, targets, image_id) in enumerate(trainloader):
data, targets = data.to(device), targets.to(device)
if args.selective_backprop:
output = model(data)
loss = F.nll_loss(output, targets)
losses_pool.append(loss.item())
data_pool.append(data)
targets_pool.append(targets)
ids_pool.append(image_id.item())
if len(losses_pool) == args.pool_size:
# Choose frames from pool to backprop
indices = np.array(losses_pool).argsort()[-args.top_k:]
chosen_data = [data_pool[i] for i in indices]
chosen_targets = [targets_pool[i] for i in indices]
chosen_ids = [ids_pool[i] for i in indices]
chosen_losses = [losses_pool[i] for i in indices]
data_batch = torch.stack(chosen_data, dim=1)[0]
targets_batch = torch.cat(chosen_targets)
output_batch = model(data_batch) # redundant
for chosen_id in chosen_ids:
images_hist[chosen_id] += 1
# Get stats for batches
if batch_stats is not None:
update_batch_stats(batch_stats,
total_num_images_backpropped,
pool_losses = losses_pool,
chosen_losses = chosen_losses)
# Note: This will only work for batch size of 1
loss_reduction = F.nll_loss(output_batch, targets_batch)
optimizer.zero_grad()
loss_reduction.backward()
optimizer.step()
train_loss += loss_reduction.item()
num_backprop += args.top_k
losses_pool = []
data_pool = []
targets_pool = []
ids_pool = []
output = output_batch
targets = targets_batch
else:
output = net(data)
loss_reduction = F.nll_loss(output, targets)
optimizer.zero_grad()
loss_reduction.backward()
optimizer.step()
train_loss += loss_reduction.item()
num_backprop += args.batch_size
_, predicted = output.max(1)
total += targets.size(0)
correct += predicted.eq(targets).sum().item()
if batch_idx % args.log_interval == 0 and loss_reduction is not None:
print('train_debug,{},{},{:.6f},{:.6f},{},{:.6f}'.format(
epoch,
total_num_images_backpropped + num_backprop,
loss_reduction.item(),
train_loss / float(num_backprop),
time.time(),
100.*correct/total))
return num_backprop
def test(args, model, device, test_loader, epoch, total_num_images_backpropped):
model.eval()
test_loss = 0
correct = 0
total = 0
with torch.no_grad():
for data, target in test_loader:
data, target = data.to(device), target.to(device)
output = model(data)
test_loss += F.nll_loss(output, target, reduction='sum').item() # sum up batch loss
pred = output.max(1, keepdim=True)[1] # get the index of the max log-probability
correct += pred.eq(target.view_as(pred)).sum().item()
total += target.size(0)
test_loss /= len(test_loader.dataset)
print('test_debug,{},{},{:.6f},{:.6f},{}'.format(
epoch,
total_num_images_backpropped,
test_loss,
100.*correct/total,
time.time()))
def main():
# Training settings
parser = argparse.ArgumentParser(description='PyTorch MNIST Example')
parser.add_argument('--batch-size', type=int, default=1, metavar='N',
help='input batch size for training (default: 1)')
parser.add_argument('--test-batch-size', type=int, default=1000, metavar='N',
help='input batch size for testing (default: 1000)')
parser.add_argument('--epochs', type=int, default=500, metavar='N',
help='number of epochs to train (default: 10)')
parser.add_argument('--lr', type=float, default=0.01, metavar='LR',
help='learning rate (default: 0.01)')
parser.add_argument('--decay', default=0, type=float, help='decay')
parser.add_argument('--momentum', type=float, default=0.5, metavar='M',
help='SGD momentum (default: 0.5)')
parser.add_argument('--no-cuda', action='store_true', default=False,
help='disables CUDA training')
parser.add_argument('--seed', type=int, default=1, metavar='S',
help='random seed (default: 1)')
parser.add_argument('--log-interval', type=int, default=10, metavar='N',
help='how many batches to wait before logging training status')
parser.add_argument('--selective-backprop', type=bool, default=False, metavar='N',
help='whether or not to use selective-backprop')
parser.add_argument('--top-k', type=int, default=8, metavar='N',
help='how many images to backprop per batch')
parser.add_argument('--pool-size', type=int, default=16, metavar='N',
help='how many images to backprop per batch')
parser.add_argument('--pickle-dir', default="/tmp/",
help='directory for pickles')
parser.add_argument('--pickle-prefix', default="stats",
help='file prefix for pickles')
args = parser.parse_args()
use_cuda = not args.no_cuda and torch.cuda.is_available()
torch.manual_seed(args.seed)
device = torch.device("cuda" if use_cuda else "cpu")
kwargs = {'num_workers': 1, 'pin_memory': True} if use_cuda else {}
trainset = datasets.MNIST('../data', train=True, download=True,
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
]))
trainset = [t + (i,) for i, t in enumerate(trainset)] # Add image index to train set
chunk_size = args.pool_size * 10
partitions = [trainset[i:i + chunk_size] for i in xrange(0, len(trainset), chunk_size)]
test_loader = torch.utils.data.DataLoader(
datasets.MNIST('../data', train=False, transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
])),
batch_size=args.test_batch_size, shuffle=True, **kwargs)
model = Net().to(device)
optimizer = optim.SGD(model.parameters(), lr=args.lr, momentum=args.momentum, weight_decay=args.decay)
# Store frequency of each image getting backpropped
keys = range(len(trainset))
images_hist = dict(zip(keys, [0] * len(keys)))
batch_stats = []
# Make images hist pickle path
image_id_pickle_dir = os.path.join(args.pickle_dir, "image_id_hist")
if not os.path.exists(image_id_pickle_dir):
os.mkdir(image_id_pickle_dir)
image_id_pickle_file = os.path.join(image_id_pickle_dir,
"{}_images_hist.pickle".format(args.pickle_prefix))
# Make batch stats pickle path
batch_stats_pickle_dir = os.path.join(args.pickle_dir, "batch_stats")
if not os.path.exists(batch_stats_pickle_dir):
os.mkdir(batch_stats_pickle_dir)
batch_stats_pickle_file = os.path.join(batch_stats_pickle_dir,
"{}_batch_stats.pickle".format(args.pickle_prefix))
total_num_images_backpropped = 0
for epoch in range(1, args.epochs + 1):
for partition in partitions:
trainloader = torch.utils.data.DataLoader(partition, batch_size=args.batch_size, shuffle=True, num_workers=2)
test(args, model, device, test_loader, epoch, total_num_images_backpropped)
num_images_backpropped = train(args,
model,
device,
trainloader,
optimizer,
epoch,
total_num_images_backpropped,
images_hist,
batch_stats=batch_stats)
total_num_images_backpropped += num_images_backpropped
with open(image_id_pickle_file, "wb") as handle:
pickle.dump(images_hist, handle, protocol=pickle.HIGHEST_PROTOCOL)
with open(batch_stats_pickle_file, "wb") as handle:
print(batch_stats_pickle_file)
pickle.dump(batch_stats, handle, protocol=pickle.HIGHEST_PROTOCOL)
if __name__ == '__main__':
main()
|
python
|
# coding: utf-8
"""
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. # noqa: E501
The version of the OpenAPI document: 1.0.9-1295
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from intersight.api_client import ApiClient
from intersight.exceptions import (ApiTypeError, ApiValueError)
class FirmwareApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_firmware_distributable(self, firmware_distributable,
**kwargs): # noqa: E501
"""Create a 'firmware.Distributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_firmware_distributable(firmware_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param FirmwareDistributable firmware_distributable: The 'firmware.Distributable' resource to create. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareDistributable
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_firmware_distributable_with_http_info(
firmware_distributable, **kwargs) # noqa: E501
def create_firmware_distributable_with_http_info(self,
firmware_distributable,
**kwargs): # noqa: E501
"""Create a 'firmware.Distributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_firmware_distributable_with_http_info(firmware_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param FirmwareDistributable firmware_distributable: The 'firmware.Distributable' resource to create. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareDistributable, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['firmware_distributable'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method create_firmware_distributable" %
key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'firmware_distributable' is set
if self.api_client.client_side_validation and (
'firmware_distributable' not in local_var_params
or # noqa: E501
local_var_params['firmware_distributable'] is None
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `firmware_distributable` when calling `create_firmware_distributable`"
) # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'firmware_distributable' in local_var_params:
body_params = local_var_params['firmware_distributable']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params[
'Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/Distributables',
'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareDistributable', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_firmware_driver_distributable(self,
firmware_driver_distributable,
**kwargs): # noqa: E501
"""Create a 'firmware.DriverDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_firmware_driver_distributable(firmware_driver_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param FirmwareDriverDistributable firmware_driver_distributable: The 'firmware.DriverDistributable' resource to create. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareDriverDistributable
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_firmware_driver_distributable_with_http_info(
firmware_driver_distributable, **kwargs) # noqa: E501
def create_firmware_driver_distributable_with_http_info(
self, firmware_driver_distributable, **kwargs): # noqa: E501
"""Create a 'firmware.DriverDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_firmware_driver_distributable_with_http_info(firmware_driver_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param FirmwareDriverDistributable firmware_driver_distributable: The 'firmware.DriverDistributable' resource to create. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareDriverDistributable, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['firmware_driver_distributable'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_firmware_driver_distributable" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'firmware_driver_distributable' is set
if self.api_client.client_side_validation and (
'firmware_driver_distributable' not in local_var_params
or # noqa: E501
local_var_params['firmware_driver_distributable'] is None
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `firmware_driver_distributable` when calling `create_firmware_driver_distributable`"
) # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'firmware_driver_distributable' in local_var_params:
body_params = local_var_params['firmware_driver_distributable']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params[
'Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/DriverDistributables',
'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareDriverDistributable', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_firmware_eula(self, firmware_eula, **kwargs): # noqa: E501
"""Create a 'firmware.Eula' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_firmware_eula(firmware_eula, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param FirmwareEula firmware_eula: The 'firmware.Eula' resource to create. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareEula
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_firmware_eula_with_http_info(
firmware_eula, **kwargs) # noqa: E501
def create_firmware_eula_with_http_info(self, firmware_eula,
**kwargs): # noqa: E501
"""Create a 'firmware.Eula' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_firmware_eula_with_http_info(firmware_eula, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param FirmwareEula firmware_eula: The 'firmware.Eula' resource to create. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareEula, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['firmware_eula'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method create_firmware_eula" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'firmware_eula' is set
if self.api_client.client_side_validation and (
'firmware_eula' not in local_var_params or # noqa: E501
local_var_params['firmware_eula'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `firmware_eula` when calling `create_firmware_eula`"
) # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'firmware_eula' in local_var_params:
body_params = local_var_params['firmware_eula']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params[
'Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/Eulas',
'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareEula', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_firmware_server_configuration_utility_distributable(
self, firmware_server_configuration_utility_distributable,
**kwargs): # noqa: E501
"""Create a 'firmware.ServerConfigurationUtilityDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_firmware_server_configuration_utility_distributable(firmware_server_configuration_utility_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param FirmwareServerConfigurationUtilityDistributable firmware_server_configuration_utility_distributable: The 'firmware.ServerConfigurationUtilityDistributable' resource to create. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareServerConfigurationUtilityDistributable
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_firmware_server_configuration_utility_distributable_with_http_info(
firmware_server_configuration_utility_distributable,
**kwargs) # noqa: E501
def create_firmware_server_configuration_utility_distributable_with_http_info(
self, firmware_server_configuration_utility_distributable,
**kwargs): # noqa: E501
"""Create a 'firmware.ServerConfigurationUtilityDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_firmware_server_configuration_utility_distributable_with_http_info(firmware_server_configuration_utility_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param FirmwareServerConfigurationUtilityDistributable firmware_server_configuration_utility_distributable: The 'firmware.ServerConfigurationUtilityDistributable' resource to create. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareServerConfigurationUtilityDistributable, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['firmware_server_configuration_utility_distributable'
] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_firmware_server_configuration_utility_distributable"
% key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'firmware_server_configuration_utility_distributable' is set
if self.api_client.client_side_validation and (
'firmware_server_configuration_utility_distributable' not in
local_var_params or # noqa: E501
local_var_params[
'firmware_server_configuration_utility_distributable'] is
None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `firmware_server_configuration_utility_distributable` when calling `create_firmware_server_configuration_utility_distributable`"
) # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'firmware_server_configuration_utility_distributable' in local_var_params:
body_params = local_var_params[
'firmware_server_configuration_utility_distributable']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params[
'Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/ServerConfigurationUtilityDistributables',
'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=
'FirmwareServerConfigurationUtilityDistributable', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_firmware_upgrade(self, firmware_upgrade,
**kwargs): # noqa: E501
"""Create a 'firmware.Upgrade' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_firmware_upgrade(firmware_upgrade, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param FirmwareUpgrade firmware_upgrade: The 'firmware.Upgrade' resource to create. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareUpgrade
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_firmware_upgrade_with_http_info(
firmware_upgrade, **kwargs) # noqa: E501
def create_firmware_upgrade_with_http_info(self, firmware_upgrade,
**kwargs): # noqa: E501
"""Create a 'firmware.Upgrade' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_firmware_upgrade_with_http_info(firmware_upgrade, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param FirmwareUpgrade firmware_upgrade: The 'firmware.Upgrade' resource to create. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareUpgrade, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['firmware_upgrade'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method create_firmware_upgrade" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'firmware_upgrade' is set
if self.api_client.client_side_validation and (
'firmware_upgrade' not in local_var_params or # noqa: E501
local_var_params['firmware_upgrade'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `firmware_upgrade` when calling `create_firmware_upgrade`"
) # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'firmware_upgrade' in local_var_params:
body_params = local_var_params['firmware_upgrade']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params[
'Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/Upgrades',
'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareUpgrade', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_firmware_distributable(self, moid, **kwargs): # noqa: E501
"""Delete a 'firmware.Distributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_firmware_distributable(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_firmware_distributable_with_http_info(
moid, **kwargs) # noqa: E501
def delete_firmware_distributable_with_http_info(self, moid,
**kwargs): # noqa: E501
"""Delete a 'firmware.Distributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_firmware_distributable_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method delete_firmware_distributable" %
key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `delete_firmware_distributable`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/Distributables/{Moid}',
'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_firmware_driver_distributable(self, moid,
**kwargs): # noqa: E501
"""Delete a 'firmware.DriverDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_firmware_driver_distributable(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_firmware_driver_distributable_with_http_info(
moid, **kwargs) # noqa: E501
def delete_firmware_driver_distributable_with_http_info(
self, moid, **kwargs): # noqa: E501
"""Delete a 'firmware.DriverDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_firmware_driver_distributable_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_firmware_driver_distributable" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `delete_firmware_driver_distributable`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/DriverDistributables/{Moid}',
'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_firmware_server_configuration_utility_distributable(
self, moid, **kwargs): # noqa: E501
"""Delete a 'firmware.ServerConfigurationUtilityDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_firmware_server_configuration_utility_distributable(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_firmware_server_configuration_utility_distributable_with_http_info(
moid, **kwargs) # noqa: E501
def delete_firmware_server_configuration_utility_distributable_with_http_info(
self, moid, **kwargs): # noqa: E501
"""Delete a 'firmware.ServerConfigurationUtilityDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_firmware_server_configuration_utility_distributable_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_firmware_server_configuration_utility_distributable"
% key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `delete_firmware_server_configuration_utility_distributable`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/ServerConfigurationUtilityDistributables/{Moid}',
'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_firmware_upgrade(self, moid, **kwargs): # noqa: E501
"""Delete a 'firmware.Upgrade' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_firmware_upgrade(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_firmware_upgrade_with_http_info(
moid, **kwargs) # noqa: E501
def delete_firmware_upgrade_with_http_info(self, moid,
**kwargs): # noqa: E501
"""Delete a 'firmware.Upgrade' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_firmware_upgrade_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method delete_firmware_upgrade" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `delete_firmware_upgrade`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/Upgrades/{Moid}',
'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_firmware_distributable_by_moid(self, moid, **kwargs): # noqa: E501
"""Read a 'firmware.Distributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_distributable_by_moid(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareDistributable
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_firmware_distributable_by_moid_with_http_info(
moid, **kwargs) # noqa: E501
def get_firmware_distributable_by_moid_with_http_info(
self, moid, **kwargs): # noqa: E501
"""Read a 'firmware.Distributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_distributable_by_moid_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareDistributable, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_firmware_distributable_by_moid" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `get_firmware_distributable_by_moid`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/Distributables/{Moid}',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareDistributable', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_firmware_distributable_list(self, **kwargs): # noqa: E501
"""Read a 'firmware.Distributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_distributable_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareDistributableList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_firmware_distributable_list_with_http_info(
**kwargs) # noqa: E501
def get_firmware_distributable_list_with_http_info(self,
**kwargs): # noqa: E501
"""Read a 'firmware.Distributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_distributable_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareDistributableList, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'filter', 'orderby', 'top', 'skip', 'select', 'expand', 'apply',
'count', 'inlinecount', 'at'
] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_firmware_distributable_list" % key)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'filter' in local_var_params and local_var_params[
'filter'] is not None: # noqa: E501
query_params.append(
('$filter', local_var_params['filter'])) # noqa: E501
if 'orderby' in local_var_params and local_var_params[
'orderby'] is not None: # noqa: E501
query_params.append(
('$orderby', local_var_params['orderby'])) # noqa: E501
if 'top' in local_var_params and local_var_params[
'top'] is not None: # noqa: E501
query_params.append(
('$top', local_var_params['top'])) # noqa: E501
if 'skip' in local_var_params and local_var_params[
'skip'] is not None: # noqa: E501
query_params.append(
('$skip', local_var_params['skip'])) # noqa: E501
if 'select' in local_var_params and local_var_params[
'select'] is not None: # noqa: E501
query_params.append(
('$select', local_var_params['select'])) # noqa: E501
if 'expand' in local_var_params and local_var_params[
'expand'] is not None: # noqa: E501
query_params.append(
('$expand', local_var_params['expand'])) # noqa: E501
if 'apply' in local_var_params and local_var_params[
'apply'] is not None: # noqa: E501
query_params.append(
('$apply', local_var_params['apply'])) # noqa: E501
if 'count' in local_var_params and local_var_params[
'count'] is not None: # noqa: E501
query_params.append(
('$count', local_var_params['count'])) # noqa: E501
if 'inlinecount' in local_var_params and local_var_params[
'inlinecount'] is not None: # noqa: E501
query_params.append(
('$inlinecount',
local_var_params['inlinecount'])) # noqa: E501
if 'at' in local_var_params and local_var_params[
'at'] is not None: # noqa: E501
query_params.append(('at', local_var_params['at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/Distributables',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareDistributableList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_firmware_driver_distributable_by_moid(self, moid,
**kwargs): # noqa: E501
"""Read a 'firmware.DriverDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_driver_distributable_by_moid(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareDriverDistributable
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_firmware_driver_distributable_by_moid_with_http_info(
moid, **kwargs) # noqa: E501
def get_firmware_driver_distributable_by_moid_with_http_info(
self, moid, **kwargs): # noqa: E501
"""Read a 'firmware.DriverDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_driver_distributable_by_moid_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareDriverDistributable, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_firmware_driver_distributable_by_moid" %
key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `get_firmware_driver_distributable_by_moid`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/DriverDistributables/{Moid}',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareDriverDistributable', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_firmware_driver_distributable_list(self, **kwargs): # noqa: E501
"""Read a 'firmware.DriverDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_driver_distributable_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareDriverDistributableList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_firmware_driver_distributable_list_with_http_info(
**kwargs) # noqa: E501
def get_firmware_driver_distributable_list_with_http_info(
self, **kwargs): # noqa: E501
"""Read a 'firmware.DriverDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_driver_distributable_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareDriverDistributableList, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'filter', 'orderby', 'top', 'skip', 'select', 'expand', 'apply',
'count', 'inlinecount', 'at'
] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_firmware_driver_distributable_list" % key)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'filter' in local_var_params and local_var_params[
'filter'] is not None: # noqa: E501
query_params.append(
('$filter', local_var_params['filter'])) # noqa: E501
if 'orderby' in local_var_params and local_var_params[
'orderby'] is not None: # noqa: E501
query_params.append(
('$orderby', local_var_params['orderby'])) # noqa: E501
if 'top' in local_var_params and local_var_params[
'top'] is not None: # noqa: E501
query_params.append(
('$top', local_var_params['top'])) # noqa: E501
if 'skip' in local_var_params and local_var_params[
'skip'] is not None: # noqa: E501
query_params.append(
('$skip', local_var_params['skip'])) # noqa: E501
if 'select' in local_var_params and local_var_params[
'select'] is not None: # noqa: E501
query_params.append(
('$select', local_var_params['select'])) # noqa: E501
if 'expand' in local_var_params and local_var_params[
'expand'] is not None: # noqa: E501
query_params.append(
('$expand', local_var_params['expand'])) # noqa: E501
if 'apply' in local_var_params and local_var_params[
'apply'] is not None: # noqa: E501
query_params.append(
('$apply', local_var_params['apply'])) # noqa: E501
if 'count' in local_var_params and local_var_params[
'count'] is not None: # noqa: E501
query_params.append(
('$count', local_var_params['count'])) # noqa: E501
if 'inlinecount' in local_var_params and local_var_params[
'inlinecount'] is not None: # noqa: E501
query_params.append(
('$inlinecount',
local_var_params['inlinecount'])) # noqa: E501
if 'at' in local_var_params and local_var_params[
'at'] is not None: # noqa: E501
query_params.append(('at', local_var_params['at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/DriverDistributables',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareDriverDistributableList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_firmware_eula_by_moid(self, moid, **kwargs): # noqa: E501
"""Read a 'firmware.Eula' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_eula_by_moid(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareEula
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_firmware_eula_by_moid_with_http_info(
moid, **kwargs) # noqa: E501
def get_firmware_eula_by_moid_with_http_info(self, moid,
**kwargs): # noqa: E501
"""Read a 'firmware.Eula' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_eula_by_moid_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareEula, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method get_firmware_eula_by_moid" %
key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `get_firmware_eula_by_moid`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/Eulas/{Moid}',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareEula', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_firmware_eula_list(self, **kwargs): # noqa: E501
"""Read a 'firmware.Eula' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_eula_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareEulaList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_firmware_eula_list_with_http_info(**
kwargs) # noqa: E501
def get_firmware_eula_list_with_http_info(self, **kwargs): # noqa: E501
"""Read a 'firmware.Eula' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_eula_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareEulaList, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'filter', 'orderby', 'top', 'skip', 'select', 'expand', 'apply',
'count', 'inlinecount', 'at'
] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method get_firmware_eula_list" % key)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'filter' in local_var_params and local_var_params[
'filter'] is not None: # noqa: E501
query_params.append(
('$filter', local_var_params['filter'])) # noqa: E501
if 'orderby' in local_var_params and local_var_params[
'orderby'] is not None: # noqa: E501
query_params.append(
('$orderby', local_var_params['orderby'])) # noqa: E501
if 'top' in local_var_params and local_var_params[
'top'] is not None: # noqa: E501
query_params.append(
('$top', local_var_params['top'])) # noqa: E501
if 'skip' in local_var_params and local_var_params[
'skip'] is not None: # noqa: E501
query_params.append(
('$skip', local_var_params['skip'])) # noqa: E501
if 'select' in local_var_params and local_var_params[
'select'] is not None: # noqa: E501
query_params.append(
('$select', local_var_params['select'])) # noqa: E501
if 'expand' in local_var_params and local_var_params[
'expand'] is not None: # noqa: E501
query_params.append(
('$expand', local_var_params['expand'])) # noqa: E501
if 'apply' in local_var_params and local_var_params[
'apply'] is not None: # noqa: E501
query_params.append(
('$apply', local_var_params['apply'])) # noqa: E501
if 'count' in local_var_params and local_var_params[
'count'] is not None: # noqa: E501
query_params.append(
('$count', local_var_params['count'])) # noqa: E501
if 'inlinecount' in local_var_params and local_var_params[
'inlinecount'] is not None: # noqa: E501
query_params.append(
('$inlinecount',
local_var_params['inlinecount'])) # noqa: E501
if 'at' in local_var_params and local_var_params[
'at'] is not None: # noqa: E501
query_params.append(('at', local_var_params['at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/Eulas',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareEulaList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_firmware_running_firmware_by_moid(self, moid,
**kwargs): # noqa: E501
"""Read a 'firmware.RunningFirmware' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_running_firmware_by_moid(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareRunningFirmware
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_firmware_running_firmware_by_moid_with_http_info(
moid, **kwargs) # noqa: E501
def get_firmware_running_firmware_by_moid_with_http_info(
self, moid, **kwargs): # noqa: E501
"""Read a 'firmware.RunningFirmware' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_running_firmware_by_moid_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareRunningFirmware, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_firmware_running_firmware_by_moid" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `get_firmware_running_firmware_by_moid`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/RunningFirmwares/{Moid}',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareRunningFirmware', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_firmware_running_firmware_list(self, **kwargs): # noqa: E501
"""Read a 'firmware.RunningFirmware' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_running_firmware_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareRunningFirmwareList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_firmware_running_firmware_list_with_http_info(
**kwargs) # noqa: E501
def get_firmware_running_firmware_list_with_http_info(
self, **kwargs): # noqa: E501
"""Read a 'firmware.RunningFirmware' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_running_firmware_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareRunningFirmwareList, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'filter', 'orderby', 'top', 'skip', 'select', 'expand', 'apply',
'count', 'inlinecount', 'at'
] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_firmware_running_firmware_list" % key)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'filter' in local_var_params and local_var_params[
'filter'] is not None: # noqa: E501
query_params.append(
('$filter', local_var_params['filter'])) # noqa: E501
if 'orderby' in local_var_params and local_var_params[
'orderby'] is not None: # noqa: E501
query_params.append(
('$orderby', local_var_params['orderby'])) # noqa: E501
if 'top' in local_var_params and local_var_params[
'top'] is not None: # noqa: E501
query_params.append(
('$top', local_var_params['top'])) # noqa: E501
if 'skip' in local_var_params and local_var_params[
'skip'] is not None: # noqa: E501
query_params.append(
('$skip', local_var_params['skip'])) # noqa: E501
if 'select' in local_var_params and local_var_params[
'select'] is not None: # noqa: E501
query_params.append(
('$select', local_var_params['select'])) # noqa: E501
if 'expand' in local_var_params and local_var_params[
'expand'] is not None: # noqa: E501
query_params.append(
('$expand', local_var_params['expand'])) # noqa: E501
if 'apply' in local_var_params and local_var_params[
'apply'] is not None: # noqa: E501
query_params.append(
('$apply', local_var_params['apply'])) # noqa: E501
if 'count' in local_var_params and local_var_params[
'count'] is not None: # noqa: E501
query_params.append(
('$count', local_var_params['count'])) # noqa: E501
if 'inlinecount' in local_var_params and local_var_params[
'inlinecount'] is not None: # noqa: E501
query_params.append(
('$inlinecount',
local_var_params['inlinecount'])) # noqa: E501
if 'at' in local_var_params and local_var_params[
'at'] is not None: # noqa: E501
query_params.append(('at', local_var_params['at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/RunningFirmwares',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareRunningFirmwareList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_firmware_server_configuration_utility_distributable_by_moid(
self, moid, **kwargs): # noqa: E501
"""Read a 'firmware.ServerConfigurationUtilityDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_server_configuration_utility_distributable_by_moid(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareServerConfigurationUtilityDistributable
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_firmware_server_configuration_utility_distributable_by_moid_with_http_info(
moid, **kwargs) # noqa: E501
def get_firmware_server_configuration_utility_distributable_by_moid_with_http_info(
self, moid, **kwargs): # noqa: E501
"""Read a 'firmware.ServerConfigurationUtilityDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_server_configuration_utility_distributable_by_moid_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareServerConfigurationUtilityDistributable, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_firmware_server_configuration_utility_distributable_by_moid"
% key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `get_firmware_server_configuration_utility_distributable_by_moid`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/ServerConfigurationUtilityDistributables/{Moid}',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=
'FirmwareServerConfigurationUtilityDistributable', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_firmware_server_configuration_utility_distributable_list(
self, **kwargs): # noqa: E501
"""Read a 'firmware.ServerConfigurationUtilityDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_server_configuration_utility_distributable_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareServerConfigurationUtilityDistributableList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_firmware_server_configuration_utility_distributable_list_with_http_info(
**kwargs) # noqa: E501
def get_firmware_server_configuration_utility_distributable_list_with_http_info(
self, **kwargs): # noqa: E501
"""Read a 'firmware.ServerConfigurationUtilityDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_server_configuration_utility_distributable_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareServerConfigurationUtilityDistributableList, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'filter', 'orderby', 'top', 'skip', 'select', 'expand', 'apply',
'count', 'inlinecount', 'at'
] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_firmware_server_configuration_utility_distributable_list"
% key)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'filter' in local_var_params and local_var_params[
'filter'] is not None: # noqa: E501
query_params.append(
('$filter', local_var_params['filter'])) # noqa: E501
if 'orderby' in local_var_params and local_var_params[
'orderby'] is not None: # noqa: E501
query_params.append(
('$orderby', local_var_params['orderby'])) # noqa: E501
if 'top' in local_var_params and local_var_params[
'top'] is not None: # noqa: E501
query_params.append(
('$top', local_var_params['top'])) # noqa: E501
if 'skip' in local_var_params and local_var_params[
'skip'] is not None: # noqa: E501
query_params.append(
('$skip', local_var_params['skip'])) # noqa: E501
if 'select' in local_var_params and local_var_params[
'select'] is not None: # noqa: E501
query_params.append(
('$select', local_var_params['select'])) # noqa: E501
if 'expand' in local_var_params and local_var_params[
'expand'] is not None: # noqa: E501
query_params.append(
('$expand', local_var_params['expand'])) # noqa: E501
if 'apply' in local_var_params and local_var_params[
'apply'] is not None: # noqa: E501
query_params.append(
('$apply', local_var_params['apply'])) # noqa: E501
if 'count' in local_var_params and local_var_params[
'count'] is not None: # noqa: E501
query_params.append(
('$count', local_var_params['count'])) # noqa: E501
if 'inlinecount' in local_var_params and local_var_params[
'inlinecount'] is not None: # noqa: E501
query_params.append(
('$inlinecount',
local_var_params['inlinecount'])) # noqa: E501
if 'at' in local_var_params and local_var_params[
'at'] is not None: # noqa: E501
query_params.append(('at', local_var_params['at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/ServerConfigurationUtilityDistributables',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=
'FirmwareServerConfigurationUtilityDistributableList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_firmware_upgrade_by_moid(self, moid, **kwargs): # noqa: E501
"""Read a 'firmware.Upgrade' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_upgrade_by_moid(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareUpgrade
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_firmware_upgrade_by_moid_with_http_info(
moid, **kwargs) # noqa: E501
def get_firmware_upgrade_by_moid_with_http_info(self, moid,
**kwargs): # noqa: E501
"""Read a 'firmware.Upgrade' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_upgrade_by_moid_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareUpgrade, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method get_firmware_upgrade_by_moid" %
key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `get_firmware_upgrade_by_moid`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/Upgrades/{Moid}',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareUpgrade', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_firmware_upgrade_list(self, **kwargs): # noqa: E501
"""Read a 'firmware.Upgrade' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_upgrade_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareUpgradeList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_firmware_upgrade_list_with_http_info(
**kwargs) # noqa: E501
def get_firmware_upgrade_list_with_http_info(self, **kwargs): # noqa: E501
"""Read a 'firmware.Upgrade' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_upgrade_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareUpgradeList, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'filter', 'orderby', 'top', 'skip', 'select', 'expand', 'apply',
'count', 'inlinecount', 'at'
] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method get_firmware_upgrade_list" %
key)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'filter' in local_var_params and local_var_params[
'filter'] is not None: # noqa: E501
query_params.append(
('$filter', local_var_params['filter'])) # noqa: E501
if 'orderby' in local_var_params and local_var_params[
'orderby'] is not None: # noqa: E501
query_params.append(
('$orderby', local_var_params['orderby'])) # noqa: E501
if 'top' in local_var_params and local_var_params[
'top'] is not None: # noqa: E501
query_params.append(
('$top', local_var_params['top'])) # noqa: E501
if 'skip' in local_var_params and local_var_params[
'skip'] is not None: # noqa: E501
query_params.append(
('$skip', local_var_params['skip'])) # noqa: E501
if 'select' in local_var_params and local_var_params[
'select'] is not None: # noqa: E501
query_params.append(
('$select', local_var_params['select'])) # noqa: E501
if 'expand' in local_var_params and local_var_params[
'expand'] is not None: # noqa: E501
query_params.append(
('$expand', local_var_params['expand'])) # noqa: E501
if 'apply' in local_var_params and local_var_params[
'apply'] is not None: # noqa: E501
query_params.append(
('$apply', local_var_params['apply'])) # noqa: E501
if 'count' in local_var_params and local_var_params[
'count'] is not None: # noqa: E501
query_params.append(
('$count', local_var_params['count'])) # noqa: E501
if 'inlinecount' in local_var_params and local_var_params[
'inlinecount'] is not None: # noqa: E501
query_params.append(
('$inlinecount',
local_var_params['inlinecount'])) # noqa: E501
if 'at' in local_var_params and local_var_params[
'at'] is not None: # noqa: E501
query_params.append(('at', local_var_params['at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/Upgrades',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareUpgradeList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_firmware_upgrade_status_by_moid(self, moid,
**kwargs): # noqa: E501
"""Read a 'firmware.UpgradeStatus' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_upgrade_status_by_moid(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareUpgradeStatus
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_firmware_upgrade_status_by_moid_with_http_info(
moid, **kwargs) # noqa: E501
def get_firmware_upgrade_status_by_moid_with_http_info(
self, moid, **kwargs): # noqa: E501
"""Read a 'firmware.UpgradeStatus' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_upgrade_status_by_moid_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareUpgradeStatus, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_firmware_upgrade_status_by_moid" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `get_firmware_upgrade_status_by_moid`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/UpgradeStatuses/{Moid}',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareUpgradeStatus', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_firmware_upgrade_status_list(self, **kwargs): # noqa: E501
"""Read a 'firmware.UpgradeStatus' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_upgrade_status_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareUpgradeStatusList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_firmware_upgrade_status_list_with_http_info(
**kwargs) # noqa: E501
def get_firmware_upgrade_status_list_with_http_info(
self, **kwargs): # noqa: E501
"""Read a 'firmware.UpgradeStatus' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_firmware_upgrade_status_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareUpgradeStatusList, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'filter', 'orderby', 'top', 'skip', 'select', 'expand', 'apply',
'count', 'inlinecount', 'at'
] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_firmware_upgrade_status_list" % key)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'filter' in local_var_params and local_var_params[
'filter'] is not None: # noqa: E501
query_params.append(
('$filter', local_var_params['filter'])) # noqa: E501
if 'orderby' in local_var_params and local_var_params[
'orderby'] is not None: # noqa: E501
query_params.append(
('$orderby', local_var_params['orderby'])) # noqa: E501
if 'top' in local_var_params and local_var_params[
'top'] is not None: # noqa: E501
query_params.append(
('$top', local_var_params['top'])) # noqa: E501
if 'skip' in local_var_params and local_var_params[
'skip'] is not None: # noqa: E501
query_params.append(
('$skip', local_var_params['skip'])) # noqa: E501
if 'select' in local_var_params and local_var_params[
'select'] is not None: # noqa: E501
query_params.append(
('$select', local_var_params['select'])) # noqa: E501
if 'expand' in local_var_params and local_var_params[
'expand'] is not None: # noqa: E501
query_params.append(
('$expand', local_var_params['expand'])) # noqa: E501
if 'apply' in local_var_params and local_var_params[
'apply'] is not None: # noqa: E501
query_params.append(
('$apply', local_var_params['apply'])) # noqa: E501
if 'count' in local_var_params and local_var_params[
'count'] is not None: # noqa: E501
query_params.append(
('$count', local_var_params['count'])) # noqa: E501
if 'inlinecount' in local_var_params and local_var_params[
'inlinecount'] is not None: # noqa: E501
query_params.append(
('$inlinecount',
local_var_params['inlinecount'])) # noqa: E501
if 'at' in local_var_params and local_var_params[
'at'] is not None: # noqa: E501
query_params.append(('at', local_var_params['at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/UpgradeStatuses',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareUpgradeStatusList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_firmware_distributable(self, moid, firmware_distributable,
**kwargs): # noqa: E501
"""Update a 'firmware.Distributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_firmware_distributable(moid, firmware_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareDistributable firmware_distributable: The 'firmware.Distributable' resource to update. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareDistributable
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.patch_firmware_distributable_with_http_info(
moid, firmware_distributable, **kwargs) # noqa: E501
def patch_firmware_distributable_with_http_info(self, moid,
firmware_distributable,
**kwargs): # noqa: E501
"""Update a 'firmware.Distributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_firmware_distributable_with_http_info(moid, firmware_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareDistributable firmware_distributable: The 'firmware.Distributable' resource to update. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareDistributable, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid', 'firmware_distributable'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method patch_firmware_distributable" %
key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `patch_firmware_distributable`"
) # noqa: E501
# verify the required parameter 'firmware_distributable' is set
if self.api_client.client_side_validation and (
'firmware_distributable' not in local_var_params
or # noqa: E501
local_var_params['firmware_distributable'] is None
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `firmware_distributable` when calling `patch_firmware_distributable`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'firmware_distributable' in local_var_params:
body_params = local_var_params['firmware_distributable']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params[
'Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json',
'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/Distributables/{Moid}',
'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareDistributable', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_firmware_driver_distributable(self, moid,
firmware_driver_distributable,
**kwargs): # noqa: E501
"""Update a 'firmware.DriverDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_firmware_driver_distributable(moid, firmware_driver_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareDriverDistributable firmware_driver_distributable: The 'firmware.DriverDistributable' resource to update. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareDriverDistributable
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.patch_firmware_driver_distributable_with_http_info(
moid, firmware_driver_distributable, **kwargs) # noqa: E501
def patch_firmware_driver_distributable_with_http_info(
self, moid, firmware_driver_distributable, **kwargs): # noqa: E501
"""Update a 'firmware.DriverDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_firmware_driver_distributable_with_http_info(moid, firmware_driver_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareDriverDistributable firmware_driver_distributable: The 'firmware.DriverDistributable' resource to update. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareDriverDistributable, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid', 'firmware_driver_distributable'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_firmware_driver_distributable" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `patch_firmware_driver_distributable`"
) # noqa: E501
# verify the required parameter 'firmware_driver_distributable' is set
if self.api_client.client_side_validation and (
'firmware_driver_distributable' not in local_var_params
or # noqa: E501
local_var_params['firmware_driver_distributable'] is None
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `firmware_driver_distributable` when calling `patch_firmware_driver_distributable`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'firmware_driver_distributable' in local_var_params:
body_params = local_var_params['firmware_driver_distributable']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params[
'Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json',
'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/DriverDistributables/{Moid}',
'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareDriverDistributable', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_firmware_running_firmware(self, moid, firmware_running_firmware,
**kwargs): # noqa: E501
"""Update a 'firmware.RunningFirmware' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_firmware_running_firmware(moid, firmware_running_firmware, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareRunningFirmware firmware_running_firmware: The 'firmware.RunningFirmware' resource to update. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareRunningFirmware
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.patch_firmware_running_firmware_with_http_info(
moid, firmware_running_firmware, **kwargs) # noqa: E501
def patch_firmware_running_firmware_with_http_info(
self, moid, firmware_running_firmware, **kwargs): # noqa: E501
"""Update a 'firmware.RunningFirmware' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_firmware_running_firmware_with_http_info(moid, firmware_running_firmware, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareRunningFirmware firmware_running_firmware: The 'firmware.RunningFirmware' resource to update. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareRunningFirmware, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid', 'firmware_running_firmware'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_firmware_running_firmware" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `patch_firmware_running_firmware`"
) # noqa: E501
# verify the required parameter 'firmware_running_firmware' is set
if self.api_client.client_side_validation and (
'firmware_running_firmware' not in local_var_params
or # noqa: E501
local_var_params['firmware_running_firmware'] is None
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `firmware_running_firmware` when calling `patch_firmware_running_firmware`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'firmware_running_firmware' in local_var_params:
body_params = local_var_params['firmware_running_firmware']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params[
'Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json',
'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/RunningFirmwares/{Moid}',
'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareRunningFirmware', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_firmware_server_configuration_utility_distributable(
self, moid, firmware_server_configuration_utility_distributable,
**kwargs): # noqa: E501
"""Update a 'firmware.ServerConfigurationUtilityDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_firmware_server_configuration_utility_distributable(moid, firmware_server_configuration_utility_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareServerConfigurationUtilityDistributable firmware_server_configuration_utility_distributable: The 'firmware.ServerConfigurationUtilityDistributable' resource to update. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareServerConfigurationUtilityDistributable
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.patch_firmware_server_configuration_utility_distributable_with_http_info(
moid, firmware_server_configuration_utility_distributable,
**kwargs) # noqa: E501
def patch_firmware_server_configuration_utility_distributable_with_http_info(
self, moid, firmware_server_configuration_utility_distributable,
**kwargs): # noqa: E501
"""Update a 'firmware.ServerConfigurationUtilityDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_firmware_server_configuration_utility_distributable_with_http_info(moid, firmware_server_configuration_utility_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareServerConfigurationUtilityDistributable firmware_server_configuration_utility_distributable: The 'firmware.ServerConfigurationUtilityDistributable' resource to update. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareServerConfigurationUtilityDistributable, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'moid', 'firmware_server_configuration_utility_distributable'
] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_firmware_server_configuration_utility_distributable"
% key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `patch_firmware_server_configuration_utility_distributable`"
) # noqa: E501
# verify the required parameter 'firmware_server_configuration_utility_distributable' is set
if self.api_client.client_side_validation and (
'firmware_server_configuration_utility_distributable' not in
local_var_params or # noqa: E501
local_var_params[
'firmware_server_configuration_utility_distributable'] is
None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `firmware_server_configuration_utility_distributable` when calling `patch_firmware_server_configuration_utility_distributable`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'firmware_server_configuration_utility_distributable' in local_var_params:
body_params = local_var_params[
'firmware_server_configuration_utility_distributable']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params[
'Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json',
'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/ServerConfigurationUtilityDistributables/{Moid}',
'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=
'FirmwareServerConfigurationUtilityDistributable', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_firmware_distributable(self, moid, firmware_distributable,
**kwargs): # noqa: E501
"""Update a 'firmware.Distributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_firmware_distributable(moid, firmware_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareDistributable firmware_distributable: The 'firmware.Distributable' resource to update. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareDistributable
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_firmware_distributable_with_http_info(
moid, firmware_distributable, **kwargs) # noqa: E501
def update_firmware_distributable_with_http_info(self, moid,
firmware_distributable,
**kwargs): # noqa: E501
"""Update a 'firmware.Distributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_firmware_distributable_with_http_info(moid, firmware_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareDistributable firmware_distributable: The 'firmware.Distributable' resource to update. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareDistributable, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid', 'firmware_distributable'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method update_firmware_distributable" %
key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `update_firmware_distributable`"
) # noqa: E501
# verify the required parameter 'firmware_distributable' is set
if self.api_client.client_side_validation and (
'firmware_distributable' not in local_var_params
or # noqa: E501
local_var_params['firmware_distributable'] is None
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `firmware_distributable` when calling `update_firmware_distributable`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'firmware_distributable' in local_var_params:
body_params = local_var_params['firmware_distributable']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params[
'Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json',
'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/Distributables/{Moid}',
'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareDistributable', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_firmware_driver_distributable(self, moid,
firmware_driver_distributable,
**kwargs): # noqa: E501
"""Update a 'firmware.DriverDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_firmware_driver_distributable(moid, firmware_driver_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareDriverDistributable firmware_driver_distributable: The 'firmware.DriverDistributable' resource to update. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareDriverDistributable
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_firmware_driver_distributable_with_http_info(
moid, firmware_driver_distributable, **kwargs) # noqa: E501
def update_firmware_driver_distributable_with_http_info(
self, moid, firmware_driver_distributable, **kwargs): # noqa: E501
"""Update a 'firmware.DriverDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_firmware_driver_distributable_with_http_info(moid, firmware_driver_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareDriverDistributable firmware_driver_distributable: The 'firmware.DriverDistributable' resource to update. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareDriverDistributable, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid', 'firmware_driver_distributable'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_firmware_driver_distributable" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `update_firmware_driver_distributable`"
) # noqa: E501
# verify the required parameter 'firmware_driver_distributable' is set
if self.api_client.client_side_validation and (
'firmware_driver_distributable' not in local_var_params
or # noqa: E501
local_var_params['firmware_driver_distributable'] is None
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `firmware_driver_distributable` when calling `update_firmware_driver_distributable`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'firmware_driver_distributable' in local_var_params:
body_params = local_var_params['firmware_driver_distributable']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params[
'Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json',
'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/DriverDistributables/{Moid}',
'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareDriverDistributable', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_firmware_running_firmware(self, moid, firmware_running_firmware,
**kwargs): # noqa: E501
"""Update a 'firmware.RunningFirmware' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_firmware_running_firmware(moid, firmware_running_firmware, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareRunningFirmware firmware_running_firmware: The 'firmware.RunningFirmware' resource to update. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareRunningFirmware
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_firmware_running_firmware_with_http_info(
moid, firmware_running_firmware, **kwargs) # noqa: E501
def update_firmware_running_firmware_with_http_info(
self, moid, firmware_running_firmware, **kwargs): # noqa: E501
"""Update a 'firmware.RunningFirmware' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_firmware_running_firmware_with_http_info(moid, firmware_running_firmware, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareRunningFirmware firmware_running_firmware: The 'firmware.RunningFirmware' resource to update. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareRunningFirmware, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid', 'firmware_running_firmware'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_firmware_running_firmware" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `update_firmware_running_firmware`"
) # noqa: E501
# verify the required parameter 'firmware_running_firmware' is set
if self.api_client.client_side_validation and (
'firmware_running_firmware' not in local_var_params
or # noqa: E501
local_var_params['firmware_running_firmware'] is None
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `firmware_running_firmware` when calling `update_firmware_running_firmware`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'firmware_running_firmware' in local_var_params:
body_params = local_var_params['firmware_running_firmware']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params[
'Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json',
'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/RunningFirmwares/{Moid}',
'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirmwareRunningFirmware', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_firmware_server_configuration_utility_distributable(
self, moid, firmware_server_configuration_utility_distributable,
**kwargs): # noqa: E501
"""Update a 'firmware.ServerConfigurationUtilityDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_firmware_server_configuration_utility_distributable(moid, firmware_server_configuration_utility_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareServerConfigurationUtilityDistributable firmware_server_configuration_utility_distributable: The 'firmware.ServerConfigurationUtilityDistributable' resource to update. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FirmwareServerConfigurationUtilityDistributable
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_firmware_server_configuration_utility_distributable_with_http_info(
moid, firmware_server_configuration_utility_distributable,
**kwargs) # noqa: E501
def update_firmware_server_configuration_utility_distributable_with_http_info(
self, moid, firmware_server_configuration_utility_distributable,
**kwargs): # noqa: E501
"""Update a 'firmware.ServerConfigurationUtilityDistributable' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_firmware_server_configuration_utility_distributable_with_http_info(moid, firmware_server_configuration_utility_distributable, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param FirmwareServerConfigurationUtilityDistributable firmware_server_configuration_utility_distributable: The 'firmware.ServerConfigurationUtilityDistributable' resource to update. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FirmwareServerConfigurationUtilityDistributable, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'moid', 'firmware_server_configuration_utility_distributable'
] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_firmware_server_configuration_utility_distributable"
% key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `update_firmware_server_configuration_utility_distributable`"
) # noqa: E501
# verify the required parameter 'firmware_server_configuration_utility_distributable' is set
if self.api_client.client_side_validation and (
'firmware_server_configuration_utility_distributable' not in
local_var_params or # noqa: E501
local_var_params[
'firmware_server_configuration_utility_distributable'] is
None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `firmware_server_configuration_utility_distributable` when calling `update_firmware_server_configuration_utility_distributable`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'firmware_server_configuration_utility_distributable' in local_var_params:
body_params = local_var_params[
'firmware_server_configuration_utility_distributable']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params[
'Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json',
'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/firmware/ServerConfigurationUtilityDistributables/{Moid}',
'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=
'FirmwareServerConfigurationUtilityDistributable', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
|
python
|
# -*- coding: utf-8 -*-
#
# michael a.g. aïvázis
# orthologue
# (c) 1998-2020 all rights reserved
#
"""
This package provides the implementation of a simple evaluation network.
There are three fundamental abstractions: variables, operators, and literals. Variables hold
the values computed by the evaluation network, operators compute their values by acting on the
values of other nodes, and literals encapsulate foreign objects, such as numeric constants.
These abstractions provide the machinery for representing arbitrary expressions as graphs.
The interesting aspect of this package is that nodal values get updated automatically when the
values of any of the nodes in their domain change. Nodes keep track of the set of dependents
that are interested in their values and post notifications when their values change.
In addition, this package provides {SymbolTable}, a simple manager for evaluation nodes. Beyond
node storage, {SymbolTable} enables the naming of nodes and can act as the name resolution
context for {Expression} nodes, which evaluate strings with arbitrary python expressions that
may involve the values of other nodes in the model. The other nodes provided here operate
independently of {SymbolTable}. However, it is a good idea to build some kind of container to
hold nodes while the evaluation graph is in use.
Simple examples of the use of the ideas in this package are provided in the unit tests. For a
somewhat more advanced example, take a look at {pyre.config.Configurator}, which is a
{Hierarchical} model that builds an evaluation network out of the traits of pyre components, so
that trait settings can refer to the values of other traits in the configuration files.
"""
# the node generator
from .Calculator import Calculator as calculator
# implementation note: these factories are functions (rather than a raw import of the
# corresponding constructor) in order to prevent the secondary {import} from happening when the
# package itself is first imported. this enables the package to override compile time settings
# and makes it possible to implement the {debug} capability
# factories
# model
def model(**kwds):
"""
Build a node container that specializes in names that have encoded hierarchical levels,
such as file paths or namespaces
"""
from .Hierarchical import Hierarchical
return Hierarchical(**kwds)
# nodes
def var(value=None, **kwds):
"""
Build a variable, i.e. a node that can hold an arbitrary value
"""
# get the base node
from .Node import Node
# build a variable and return it
return Node.variable(value=value, **kwds)
def expression(*, formula, model):
"""
Build a new node that evaluates a {formula} that involves the names of other nodes as
resolved in the symbol table {model}.
"""
# build the node and return it
return model.expression(value=formula)
def sequence(*operands):
"""
Build a node that holds a sequence of other nodes
"""
# access the constructor
from .Node import Node
# build the node and return it
return Node.sequence(operands=operands)
def mapping(**operands):
"""
Build a node that holds a sequence of other nodes
"""
# access the constructor
from .Node import Node
# build the node and return it
return Node.mapping(operands=operands)
def average(*operands):
"""
Compute the average of a collection of nodes
"""
# access the constructor
from .Node import Node
# build the node and return it
return Node.average(operands=operands)
def count(*operands):
"""
Compute the length of a collection of nodes
"""
# access the constructor
from .Node import Node
# build the node and return it
return Node.count(operands=operands)
def max(*operands):
"""
Compute the minimum of a collection of nodes
"""
# access the constructor
from .Node import Node
# build the node and return it
return Node.max(operands=operands)
def min(*operands):
"""
Compute the minimum of a collection of nodes
"""
# access the constructor
from .Node import Node
# build the node and return it
return Node.min(operands=operands)
def product(*operands):
"""
Compute the sum of a collection of nodes
"""
# access the constructor
from .Node import Node
# build the node and return it
return Node.product(operands=operands)
def sum(*operands):
"""
Compute the sum of a collection of nodes
"""
# access the constructor
from .Node import Node
# build the node and return it
return Node.sum(operands=list(operands))
def debug():
"""
Support for debugging the calc package
"""
# print(" ++ debugging 'pyre.calc'")
# attach {ExtentAware} as the metaclass of {Node} so we can verify that all instances of
# this class are properly garbage collected
from ..patterns.ExtentAware import ExtentAware
# get the normal metaclass
global calculator
# derive a new one
class counted(calculator, ExtentAware): pass
# and set it as the default
calculator = counted
# all done
return
# end of file
|
python
|
#!/usr/bin/env python3
# coding: utf-8
import os
import glob
import sfml as sf
class Animation:
"""
An animated texture.
"""
def __init__(self, frames, interval=0):
"""
:param frames: Iterable of sf.Texture objects
:param interval: Time between two frames (default: 0.0s)
"""
self.frames = frames
self.interval = interval
self.index = 0
self.time = 0
@classmethod
def load_from_dir(cls, path, interval=None):
"""
Load an animation from a directory. Directory must contain some image
files named by their index (e.g. "1.png", "2.png", etc...)
:param path: str object, path to the directory to load
:param interval: Time between two frames
:return: Animation
"""
if path[-1] not in (os.sep, '/'):
path += os.sep
frames = list()
for frame_path in glob.iglob(path + '[0-9].png'):
frame = sf.Texture.from_file(frame_path)
frames.append(frame)
if interval is None:
return cls(frames)
else:
return cls(frames, interval)
def get_frame(self, dt):
"""
Returns the texture of the entity.
:param dt: The time between the current and the previous frame.
:return: A sf.Texture instance
"""
self.time += dt
if self.time > self.interval:
self.time = 0
self.index += 1
self.index %= len(self.frames)
return self.frames[self.index]
def reset(self):
self.time = 0
self.index = 0
|
python
|
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
from utils import get_args, get, print_args, get_seed, extract_seed_from_ckpt
from logger import set_logger
from vilmedic.executors import Trainor, Validator
def main():
# Get args and create seed
config, override = get_args()
seed = get_seed()
# Create checkpoint dir
config.ckpt_dir = os.path.join(config.ckpt_dir, config.name)
os.makedirs(config.ckpt_dir, exist_ok=True)
# If ckpt is specified, we continue training. Lets extract seed
if config.ckpt is not None:
config.ckpt = os.path.join(config.ckpt_dir, config.ckpt)
seed = extract_seed_from_ckpt(config.ckpt)
# Create logger according to seed
set_logger(config.ckpt_dir, seed)
# Nice print args
print_args(config, ['trainor', 'validator'], seed, override)
# Fetch args for training and validation
train_config = get(config, 'trainor')
val_config = get(config, 'validator')
# Trainor
trainor = Trainor(config=train_config, # train_config is all args but the other executors args
seed=seed)
# Evaluator
evaluator = Validator(config=val_config,
models=[trainor.model],
seed=seed,
from_training=True)
# Lets be gentle, give evaluator to trainor
trainor.evaluator = evaluator
# Boom
trainor.start()
if __name__ == "__main__":
main()
|
python
|
import h5py
import os
from ._core import hfile
from .. import utils
from .. import h5tree
def test_h5tree(hfile):
assert hfile is not None
assert os.path.exists(hfile)
assert not utils.isHdf5FileObject(hfile)
str_list = [
b"Q=1",
b"Q=0.1",
b"Q=0.01",
b"Q=0.001",
b"Q=0.0001",
b"Q=0.00001",
]
with h5py.File(hfile, "w") as f:
assert not utils.isHdf5FileObject(hfile)
assert f is not None
assert utils.isHdf5FileObject(f)
f.create_dataset("str_list", data=str_list)
f.create_dataset("title", data=b"this is the title")
f.create_dataset("subtitle", data=[b"<a subtitle>"])
f.create_dataset("names", data=[b"one", b"two"])
assert os.path.exists(hfile)
mc = h5tree.Hdf5TreeView(hfile)
assert mc is not None
assert len(mc.report()) == 5
|
python
|
from uuid import uuid1
POSTGRES_MAX_TABLE_NAME_LEN_CHARS = 63
NULL_CHARACTER = "\\N"
def generate_table_name(source_table_name: str) -> str:
table_name_template = "loading_{source_table_name}_" + uuid1().hex
# postgres has a max table name length of 63 characters, so it's possible
# the staging table name could exceed the max table length. when this happens,
# use only the uuid portion of the staging table name to ensure that the
# table name is unique.
max_source_table_name_length = POSTGRES_MAX_TABLE_NAME_LEN_CHARS - len(
table_name_template.replace("{source_table_name}", "")
)
truncated_source_table_name = source_table_name[: max_source_table_name_length - 1]
return table_name_template.format(source_table_name=truncated_source_table_name)
|
python
|
'''
black_scholes.py
Created on Oct 11, 2018
@author: William Quintano
'''
from scipy.stats import norm
import math
'''
Calculates the price of a stock option using the black scholes model
:param s strike price
:param t remaining lifespan of option in years
:param u price of underlying stock (To get call buying or put selling price: u = highest bid for stock.
To get call selling or put buying price: u = asking price for stock.)
:param r risk-free-rate. This should be the rate of a US treasury bill/bond with a duration close to t
:param v volatility
:param c option type. True for call, false for put
'''
def black_scholes(s,t,u,r,v,c):
if(c):
sign=1
else:
sign=-1
d1 = sign*(math.log(u/s)+(r+.5*v**2)*t)/(v*t**.5)
d2 = sign*(d1 - v*t**.5)
return sign*(u*norm.cdf(d1,0,1)) - sign*((s*norm.cdf(d2,0,1))/math.exp(r*t))
|
python
|
#!/usr/bin/env python
from argparse import ArgumentParser
from distutils.util import get_platform
from setuptools import find_packages, setup
parser = ArgumentParser()
parser.add_argument("--plat-name", type=str, default=get_platform())
args, unknown_args = parser.parse_known_args()
if args.plat_name == "win32":
source_path = "src/main/win32"
elif args.plat_name == "win-amd64":
source_path = "src/main/win-amd64"
else:
raise OSError("mosi-cbc does not support '%s' platform" % args.plat_name)
long_description = "!!! pypandoc and/or pandoc not found, long_description is bad, don't upload this to PyPI !!!"
if any(arg in unknown_args for arg in ["sdist", "bdist_wheel"]):
try:
# noinspection PyUnresolvedReferences
from pypandoc import convert, download_pandoc
download_pandoc()
long_description = convert("README.md", "rst")
except (ImportError, OSError):
pass
setup(
name="mosi-cbc",
version="0.0.1",
description="CBC solver plugin for the mosi package.",
long_description=long_description,
url="https://github.com/alexbahnisch/mosi-cbc",
author="Alex Bahnisch",
author_email="[email protected]",
license="MIT",
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Operating System :: Microsoft :: Windows",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5"
"Programming Language :: Python :: 3.6"
],
keywords="mosi cbc",
packages=find_packages(source_path),
package_dir={"": source_path},
package_data={"": ["cbc.exe"]},
install_requires=[
"mosi>=0.0.3"
],
setup_requires=[
"pypandoc>=1.4"
],
tests_require=[
"pytest>=3.2.3",
"pytest-runner>=2.12.1"
],
test_suite="src.tests"
)
|
python
|
# Generated by Django 3.2.3 on 2021-06-06 17:24
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('asset', '0001_initial'),
('category', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Investment',
fields=[
('investment_id', models.AutoField(primary_key=True, serialize=False)),
('quantity', models.DecimalField(decimal_places=10, default=1.0, max_digits=15)),
('asset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='asset', to='asset.asset')),
],
options={
'db_table': 'ewallet_investment',
},
),
migrations.CreateModel(
name='Wallet',
fields=[
('wallet_id', models.AutoField(primary_key=True, serialize=False)),
('name', models.TextField(max_length=255)),
('describe', models.TextField(blank=True, default=None, max_length=255, null=True)),
('balance', models.DecimalField(decimal_places=10, default=0.0, max_digits=15)),
('category_id', models.ForeignKey(db_column='category_id', on_delete=django.db.models.deletion.CASCADE, to='category.category')),
('investment', models.ManyToManyField(through='wallet.Investment', to='asset.Asset')),
],
options={
'db_table': 'ewallet_wallet',
},
),
migrations.AddField(
model_name='investment',
name='wallet',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='wallet', to='wallet.wallet'),
),
]
|
python
|
#!/usr/bin/env dls-python2.7
"""Write coordinated magnet moves to different outputs.
A PvWriter and a Simulation writer are available to take magnet_jogs.Moves
and apply them to their respective interfaces.
"""
from cothread.catools import caput
from controls import PvReferences, PvMonitors, Arrays
import magnet_jogs
class AbstractWriter(object):
"""
Abstract writer.
Takes coordinated magnet moves keys and writes the values to a location.
"""
def __init__(self):
self.magnet_coordinator = magnet_jogs.MagnetCoordinator()
def write(self, move, factor):
"""
Apply the requested move.
Args:
move (magnet_jogs.Move): which move to perform.
factor (float): scale factor to apply to move.
"""
raise NotImplementedError()
class PvWriter(AbstractWriter):
"""Write coordinated magnets moves to PV's on the machine."""
def __init__(self):
AbstractWriter.__init__(self)
self.scale_pvs = [ctrl + ':WFSCA' for ctrl in PvReferences.CTRLS]
self.set_scale_pvs = [name + ':SETWFSCA' for name in PvReferences.NAMES]
self.offset_pvs = [ctrl + ':OFFSET' for ctrl in PvReferences.CTRLS]
def write(self, move, factor):
if move == 'SCALE':
scale_jog_values = self.magnet_coordinator.jog(
PvMonitors.get_instance().get_scales(), move, factor)
set_scale_jog_values = self.magnet_coordinator.jog(
PvMonitors.get_instance().get_set_scales(), move, factor)
self.write_to_pvs(self.scale_pvs, scale_jog_values)
self.write_to_pvs(self.set_scale_pvs, set_scale_jog_values)
else:
offset_jog_values = self.magnet_coordinator.jog(
PvMonitors.get_instance().get_offsets(), move, factor)
self.write_to_pvs(self.offset_pvs, offset_jog_values)
def write_to_pvs(self, pvs, jog_values):
caput(pvs, jog_values)
class SimWriter(AbstractWriter):
"""Write coordinated magnets moves to the manual simulation controller."""
def __init__(self, controller):
"""
Class initialised with instance of the simulation controller.
Args:
controller (straight.SimModeController): write to the controller's
stored /scales and offsets
"""
AbstractWriter.__init__(self)
self.controller = controller
def write(self, move, factor):
if move == magnet_jogs.Moves.SCALE:
jog_values = self.magnet_coordinator.jog(
self.controller.scales, move, factor)
else:
jog_values = self.magnet_coordinator.jog(
self.controller.offsets, move, factor)
self.check_bounds(move, jog_values)
self.update_sim_values(move, jog_values)
def update_sim_values(self, key, jog_values):
"""Pass jog values to the controller."""
if key == magnet_jogs.Moves.SCALE:
self.controller.update_sim(Arrays.SCALES, jog_values)
else:
self.controller.update_sim(Arrays.OFFSETS, jog_values)
def reset(self):
"""Reset simulation with the PVs to reflect the real chicane."""
simulated_scales = PvMonitors.get_instance().get_scales()
self.controller.update_sim(Arrays.SCALES, simulated_scales)
simulated_offsets = PvMonitors.get_instance().get_offsets()
self.controller.update_sim(Arrays.OFFSETS, simulated_offsets)
def check_bounds(self, key, jog_values):
"""Raise exception if new value exceeds magnet current limit."""
pvm = PvMonitors.get_instance()
scales = self.controller.scales
offsets = self.controller.offsets
imaxs = pvm.get_max_currents()
imins = pvm.get_min_currents()
# Check errors on limits.
for idx, (max_val, min_val, offset, scale, new_val) in enumerate(
zip(imaxs, imins, offsets, scales, jog_values)):
if key == magnet_jogs.Moves.SCALE:
high = offset + new_val
low = offset - new_val
else:
high = new_val + scale
low = new_val - scale
if high > max_val or low < min_val:
raise magnet_jogs.OverCurrentException(idx)
|
python
|
"""Test BrownianExcursion."""
import pytest
from stochastic.processes.continuous import BrownianExcursion
def test_brownian_excursion_str_repr(t):
instance = BrownianExcursion(t)
assert isinstance(repr(instance), str)
assert isinstance(str(instance), str)
def test_brownian_excursion_sample(t, n, threshold):
instance = BrownianExcursion(t)
s = instance.sample(n)
assert len(s) == n + 1
assert (s >= 0).all()
assert s[0] == pytest.approx(0, threshold)
assert s[-1] == pytest.approx(0, threshold)
def test_brownian_excursion_sample_at(t, times, threshold):
instance = BrownianExcursion(t)
s = instance.sample_at(times)
assert len(s) == len(times)
assert (s >= 0).all()
if times[0] == 0:
assert s[0] == pytest.approx(0, threshold)
assert s[-1] == pytest.approx(0, threshold)
|
python
|
import random
class RandomFlip(object):
"""Flips node positions along a given axis randomly with a given
probability.
Args:
axis (int): The axis along the position of nodes being flipped.
p (float, optional): Probability that node positions will be flipped.
(default: :obj:`0.5`)
.. testsetup::
import torch
from torch_geometric.data import Data
.. testcode::
from torch_geometric.transforms import RandomFlip
pos = torch.tensor([[-1, 1], [-3, 0], [2, -1]], dtype=torch.float)
data = Data(pos=pos)
data = RandomFlip(axis=0, p=1)(data)
print(data.pos)
.. testoutput::
tensor([[ 1., 1.],
[ 3., 0.],
[-2., -1.]])
"""
def __init__(self, axis, p=0.5):
self.axis = axis
self.p = p
def __call__(self, data):
if random.random() < self.p:
data.pos[:, self.axis] = -data.pos[:, self.axis]
return data
def __repr__(self):
return '{}(axis={}, p={})'.format(self.__class__.__name__, self.axis,
self.p)
|
python
|
def is_lock_ness_monster(s):
return any(phrase in s for phrase in ["tree fiddy", "three fifty", "3.50"])
|
python
|
import json
import pandas as pd
import time
#################################
#
#with open('logs.json', 'r') as data:
# data = data.read()
#
#logs = json.loads(data)
#
########################
def get_data(file):
with open(file, 'r') as data:
data = data.read()
logs = json.loads(data)
#s = Sender('Test', '192.168.1.214')
#logs = s.list_logs()
df = pd.DataFrame(columns=['acquired_time'])
lenghth = len(logs)
i = 0
while i < lenghth:
for x in logs[i]:
if x == "create_params_file:output":
stats = logs[i][6]
stats = stats.split(',')
acquired_time = stats[3].split('"')
acquired_time = acquired_time[3]
print(acquired_time)
df_temp = pd.DataFrame({'acquired_time': [acquired_time]})
df = pd.concat([df, df_temp])
i += 1
df = df.reset_index(drop=True)
return df
def wirte_logs_to_txt():
f = open("logs.txt", "a+")
lenghth = len(logs)
i = 0
while i < lenghth:
for x in logs[i]:
f.write(str(x))
f.write('\r\n')
f.write('#################################################### \r\n')
i += 1
f.close()
#df = get_data('logs.json')
#
#print(df)
def fibonacci(n):
if n == 0:
return 0
elif n == 1:
return 1
return fibonacci(n - 1) + fibonacci(n - 2)
def memoize(func):
cache = dict()
def memoized_func(*args):
if args in cache:
return cache[args]
result = func(*args)
cache[args] = result
return result
return memoized_func
memoized_fibonacci = memoize(fibonacci)
x = memoized_fibonacci(10)
#x = fibonacci(35)
x = memoized_fibonacci(12)
#x = fibonacci(35)
print(x)
|
python
|
# coding: utf-8
import datetime
import random
from http import HTTPStatus
from unittest.mock import Mock
from django.test.client import RequestFactory
import pytest
from src.infrastructure.api.views.exchange_rate import (
CurrencyViewSet, CurrencyExchangeRateViewSet)
from tests.fixtures import currency, exchange_rate
@pytest.mark.unit
def test_currency_viewset_get(currency):
viewset = CurrencyViewSet()
viewset.viewset_factory = Mock()
viewset.viewset_factory.create.return_value = Mock()
viewset.viewset_factory.create.return_value.get.return_value = (
vars(currency),
HTTPStatus.OK.value
)
response = viewset.get(RequestFactory(), currency.code)
assert hasattr(response, 'status_code')
assert response.status_code == HTTPStatus.OK.value
assert hasattr(response, 'data')
assert isinstance(response.data, dict)
@pytest.mark.unit
def test_currency_viewset_list(currency):
viewset = CurrencyViewSet()
viewset.viewset_factory = Mock()
viewset.viewset_factory.create.return_value = Mock()
viewset.viewset_factory.create.return_value.list.return_value = (
[vars(currency) for _ in range(random.randint(1, 10))],
HTTPStatus.OK.value
)
response = viewset.list(RequestFactory(), currency.code)
assert hasattr(response, 'status_code')
assert response.status_code == HTTPStatus.OK.value
assert hasattr(response, 'data')
assert isinstance(response.data, list)
@pytest.mark.unit
def test_currency_exchange_rate_viewset_convert(exchange_rate):
viewset = CurrencyExchangeRateViewSet()
viewset.viewset_factory = Mock()
viewset.viewset_factory.create.return_value = Mock()
viewset.viewset_factory.create.return_value.convert.return_value = (
{
'exchanged_currency': exchange_rate.exchanged_currency,
'exchanged_amount': round(random.uniform(10, 100), 2),
'rate_value': round(random.uniform(0.5, 1.5), 6)
},
HTTPStatus.OK.value
)
request = RequestFactory()
request.query_params = {
'source_currency': exchange_rate.source_currency,
'exchanged_currency': exchange_rate.exchanged_currency,
'amount': round(random.uniform(10, 100), 2)
}
response = viewset.convert(request)
assert hasattr(response, 'status_code')
assert response.status_code == HTTPStatus.OK.value
assert hasattr(response, 'data')
assert isinstance(response.data, dict)
@pytest.mark.unit
def test_currency_exchange_rate_viewset_list(exchange_rate):
series_length = random.randint(1, 10)
viewset = CurrencyExchangeRateViewSet()
viewset.viewset_factory = Mock()
viewset.viewset_factory.create.return_value = Mock()
viewset.viewset_factory.create.return_value.list.return_value = (
[exchange_rate for _ in range(series_length)],
HTTPStatus.OK.value
)
request = RequestFactory()
request.query_params = {
'source_currency': exchange_rate.source_currency,
'date_from': (
datetime.date.today() + datetime.timedelta(days=-series_length)
).strftime('%Y-%m-%d'),
'date_to': datetime.date.today().strftime('%Y-%m-%d'),
}
response = viewset.list(request)
assert hasattr(response, 'status_code')
assert response.status_code == HTTPStatus.OK.value
assert hasattr(response, 'data')
assert isinstance(response.data, list)
@pytest.mark.unit
def test_currency_exchange_rate_viewset_calculate_twr(exchange_rate):
viewset = CurrencyExchangeRateViewSet()
viewset.viewset_factory = Mock()
viewset.viewset_factory.create.return_value = Mock()
viewset.viewset_factory.create.return_value.calculate_twr.return_value = (
{'time_weighted_rate': round(random.uniform(0.5, 1.5), 6)},
HTTPStatus.OK.value
)
request = RequestFactory()
request.query_params = {
'source_currency': exchange_rate.source_currency,
'exchanged_currency': exchange_rate.exchanged_currency,
'date_from': (
datetime.date.today() + datetime.timedelta(days=-5)
).strftime('%Y-%m-%d'),
'date_to': datetime.date.today().strftime('%Y-%m-%d'),
}
response = viewset.calculate_twr(request)
assert hasattr(response, 'status_code')
assert response.status_code == HTTPStatus.OK.value
assert hasattr(response, 'data')
assert isinstance(response.data, dict)
|
python
|
import mysql.connector
# Multicraft Cred
mydb = mysql.connector.connect(
host="",
user="",
password="",
database=""
)
mycursor = mydb.cursor()
# WHMCS Cred
mydb_whmcs = mysql.connector.connect(
host="",
user="",
password="",
database=""
)
mycursor_whmcs = mydb_whmcs.cursor()
|
python
|
import argparse
import sys
import analyse
import calibration
from logginghelpers import configure_logging
ALL_CMD = 'all'
MUNICIPALITY_CMD = 'municipality'
def parse_settings(settings) -> dict:
result = {}
if settings is not None:
for setting in settings:
k, v = setting.split('=')
result[k] = v
return result
def schedule_calibration_run(args):
settings = parse_settings(args.override_settings)
tags = args.tags
if tags is None:
tags = []
if args.calibrateSubCommand == ALL_CMD:
calibration.schedule_for_all_municipalities(args.settings_key, settings, args.force_all, args.limit, tags)
elif args.calibrateSubCommand == MUNICIPALITY_CMD:
calibration.schedule_for_single_municipality(args.municipality_id, args.settings_key, settings, tags)
def analyse_data(args):
tags = args.tags
if tags is None:
tags = []
if args.analyseSubCommand == ALL_CMD:
analyse.analyse_all_calibration_runs(args.output_file, args.limit, tags)
def add_settings_override_parser(parser):
parser.add_argument('-o', '--override-settings', type=str, nargs='*', dest='override_settings', metavar='KEY=VALUE', help='override individual scrapy settings')
def add_tags_parser(parser, help_text):
parser.add_argument('-t', '--tags', type=str, nargs='*', dest='tags', metavar='TAG_VALUE', help=help_text)
def add_settings_key_parser(parser, default):
parser.add_argument('-s', '--settings-key', type=str, default=default, dest='settings_key', help='use settings stored in default_scrapy_settings with this key')
def add_calibration_parser(subparsers):
schedule_calibration_parser = subparsers.add_parser('calibrate', help='schedule calibration runs')
schedule_calibration_parser.set_defaults(func=schedule_calibration_run)
calibration_subparsers = schedule_calibration_parser.add_subparsers(
help='schedule calibration run for:', required=True, dest='calibrateSubCommand'
)
all_subparser = calibration_subparsers.add_parser(ALL_CMD,
help='all municipalities')
all_subparser.add_argument(
'-f',
'--force',
default=False,
action='store_true',
dest='force_all',
help='Schedule calibration run for all municipalities even if they are already calibrated',
)
all_subparser.add_argument(
'-l',
'--limit',
type=int,
dest='limit',
help='limit amount of runs to be scheduled',
)
add_settings_key_parser(all_subparser, 'CALIBRATE')
add_settings_override_parser(all_subparser)
add_tags_parser(all_subparser, 'tags to attach to the created queue entries')
municipality_subparser = calibration_subparsers.add_parser(MUNICIPALITY_CMD,
help='given municipality id')
municipality_subparser.add_argument('municipality_id', type=int, help='id of the municipality')
add_settings_key_parser(municipality_subparser, 'CALIBRATE')
add_settings_override_parser(municipality_subparser)
add_tags_parser(municipality_subparser, 'tags to attach to the created queue entry')
def add_analyse_parser(subparsers):
analyse_parser = subparsers.add_parser('analyse', help='analyse data')
analyse_subparsers = analyse_parser.add_subparsers(help='analyse what')
parser = analyse_subparsers.add_parser('calibration', help='analyse calibration runs')
parser.set_defaults(func=analyse_data)
calibration_subparser = parser.add_subparsers(required=True, dest='analyseSubCommand', help='analyse calibration runs')
all_subparser = calibration_subparser.add_parser(ALL_CMD, help='analyse all finished calibration runs')
all_subparser.add_argument('-o', '--output-file', type=str, help='output data to this csv file', required=True)
all_subparser.add_argument(
'-l',
'--limit',
type=int,
dest='limit',
help='limit amount of runs to be analysed',
)
add_tags_parser(all_subparser, 'only entries which contain all of the specified tags')
def parse_args(args):
parser = argparse.ArgumentParser('scheduler.py')
subparsers = parser.add_subparsers(help='action types')
add_calibration_parser(subparsers)
add_analyse_parser(subparsers)
arguments = parser.parse_args(args)
return arguments
if __name__ == '__main__':
configure_logging()
parsed_args = parse_args(sys.argv[1:])
if hasattr(parsed_args, 'func'):
parsed_args.func(parsed_args)
else:
parse_args(['-h'])
|
python
|
import sys
import time
import pygame
from pygame.locals import *
from classes.disk import Disk
from classes.robot import Robot
from classes.exit import Exit
"""
This class is used for the simulation
It loops 60 times a second so it can draw and update the simulation
"""
class Window:
def __init__(self):
"""Initialize PyGame"""
pygame.init()
"""Set the window Size"""
self.width = 600
self.height = 600
"""Create the Screen"""
self.font = pygame.font.SysFont("monospace", 20)
self.screen = pygame.display.set_mode((self.width, self.height))
"""Initialize classes as None"""
self.disk = None
self.r1 = None
self.r2 = None
self.exit = None
self.clock = None
def new(self, radius, r1StartPos, r2StartPos, exitPos,
startPointOnEdge, r1TravelToEdge, r2TravelToEdge,
r1TravelOnCircleEdge, r2TravelOnCircleEdge):
"""Create our classes"""
self.disk = Disk(radius, (300,300))
self.r1 = Robot(self.disk, r1StartPos, exitPos, startPointOnEdge, r1TravelToEdge, r1TravelOnCircleEdge, False)
self.r2 = Robot(self.disk, r2StartPos, exitPos, startPointOnEdge, r2TravelToEdge, r2TravelOnCircleEdge, True)
self.exit = Exit(exitPos)
self.clock = pygame.time.Clock()
def _draw(self):
self.screen.fill(pygame.Color(255,255,255))
self.disk.draw(self.screen)
self.exit.draw(self.screen)
self.r1.draw(self.screen, self.font)
self.r2.draw(self.screen, self.font)
pygame.display.update()
def _update(self):
elapsedTime = self.clock.tick_busy_loop(60)/1000 #Seconds since last update
self.r1.update(elapsedTime)
self.r2.update(elapsedTime)
def MainLoop(self):
"""This is the Main Draw Loop"""
while 1:
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
if(None not in (self.r1, self.r2, self.disk, self.exit, self.clock)):
if(self.r1.getIsFinished() and self.r2.getIsFinished()):
time.sleep(2)
return
self._update()
self._draw()
|
python
|
from os import name
from django.urls import path, include
from .views import *
urlpatterns = [
path('latest-products/', LatestProductsList.as_view(), name="latest-products"),
path('checkout/', checkout, name="checkout"),
path('orders/', OrdersList.as_view(), name="orders"),
path('products/search/', search, name="search"),
path('products/<slug:category_slug>/', CategoryDetail.as_view(), name="category-detail"),
path('products/<slug:category_slug>/<slug:product_slug>/', ProductDetail.as_view(), name="product-detail"),
]
|
python
|
from Cartas import Baraja, Carta
import os
Line = '---------------------------'
def clear():
if os.name == "nt":
os.system("cls")
else:
os.system("clear")
class Jugador:
def __init__(self, nombre: str, mazo: Baraja):
self.nombre = nombre
self.cardSum = 0
self.acabar = False
self.mazo = mazo
self.baraja = Baraja()
def hit(self):
carta = self.mazo.getRandom()
self.baraja + carta
self.cardSum = self.baraja.valor
if self.cardSum > 21:
self.acabar = True
def Turnar(self, baraja: Baraja) -> Baraja:
self.mazo = baraja
self.cardSum = self.baraja.valor
while 1:
try:
print(f'Por medio del siguiente puede seleccionar las acciones que desea realizar')
print('1. Tomar')
print('2. Parar')
respuesta = int(input())
if (not isinstance(respuesta, int)) or (not respuesta > 0 and not respuesta < 3):
raise TypeError
if respuesta == 1:
self.hit()
elif respuesta == 2:
self.stay()
break
except TypeError and ValueError:
print('El valor ingresado no es un número válido')
clear()
return self.mazo
def stay(self):
self.acabar = True
def mostrarCartas(self):
space = ' ' * (len(Line) - len(self.nombre) - 7)
print(f'{Line}--')
print(f'| [{self.baraja.valor}]\t{self.nombre}{space}|')
print(f'{Line}--')
print('|| ', end='')
for i in range(len(self.baraja)):
print(f'{str(self.baraja[i].valor.sign)}{self.baraja[i].simbolo}', end='')
if i < len(self.baraja) - 1:
print(' | ', end='')
print(f' ||')
print(f'{Line}--')
print()
def addCarta(self, carta: Carta) -> Baraja:
self.baraja + carta
for card in self.mazo.cartas:
if carta == card:
self.mazo.cartas.remove(card)
return self.mazo
class Crupier(Jugador):
def __init__(self, mazo: Baraja):
super().__init__('Crupier', mazo)
def hit(self):
carta = self.mazo.getRandom()
if carta.valor == 'A':
if carta.value + self.cardSum < 21:
self.cardSum = self.baraja.valor + 11
self.baraja + carta
else:
self.baraja + carta
self.cardSum = self.baraja.valor
else:
self.baraja + carta
self.cardSum = self.baraja.valor
if self.cardSum > 21:
self.acabar = True
def Turnar(self, baraja: Baraja) -> Baraja:
self.mazo = baraja
if self.cardSum < 16:
self.hit()
return self.mazo
def mostrarCartas(self, mostrar: bool = False):
space = ' '
space *= (len(Line) - len(self.nombre) - 7)
print(f'{Line}--')
if not mostrar:
print(f'| [{self.baraja[0].value}]\t{self.nombre}{space}|')
else:
print(f'| [{self.baraja.valor}]\t{self.nombre}{space}|')
print(f'{Line}--')
print('|| ', end='')
for i in range(len(self.baraja)):
if i > 0 and not mostrar:
print(f'??', end='')
else:
print(f'{str(self.baraja[i].valor.sign)}{self.baraja[i].simbolo}', end='')
if i < len(self.baraja) - 1:
print(' | ', end='')
print(f' ||')
print(f'{Line}--')
print()
|
python
|
import time
import random
import itertools as it
from pathlib import Path
from collections import namedtuple
import numpy as np
import torch.nn.functional as F
from vizdoom import DoomGame, ScreenResolution, \
ScreenFormat, GameVariable, Mode, Button
from utils.helpers import get_logger
logger = get_logger(__file__)
class DoomEnvironment():
def __init__(self, cfgs):
scenario_name = cfgs['name']
filepath = Path(__file__).parent
config_file = filepath.joinpath('assets/{}.cfg'.format(scenario_name))
scenario_file = filepath.joinpath('assets/{}.wad'.format(scenario_name))
logger.info('Loading game config from {}'.format(config_file.name))
logger.info('Loading scenario config from {}'.format(scenario_file.name))
assert config_file.is_file(), \
"{} no such file".format(config_file)
assert scenario_file.is_file(), \
"{} no such file".format(scenario_file)
self.game = DoomGame()
self.game.load_config(config_file.as_posix())
self.game.set_doom_scenario_path(scenario_file.as_posix())
self.game.set_screen_resolution(ScreenResolution.RES_320X240)
self.game.set_screen_format(ScreenFormat.GRAY8)
# Enables depth buffer.
self.game.set_depth_buffer_enabled(True)
# Enables labeling of in game objects labeling.
self.game.set_labels_buffer_enabled(True)
# Enables buffer with top down map of the current episode/level.
self.game.set_automap_buffer_enabled(True)
# Sets other rendering options (all of these options except
# crosshair are enabled (set to True) by default)
self.game.set_render_hud(True)
self.game.set_render_minimal_hud(False) # If hud is enabled
self.game.set_render_crosshair(False)
self.game.set_render_weapon(True)
self.game.set_render_decals(False) # Bullet holes and blood on the walls
self.game.set_render_particles(False)
self.game.set_render_effects_sprites(False) # Smoke and blood
self.game.set_render_messages(False) # In-game messages
self.game.set_render_corpses(False)
# Effect upon taking damage or picking up items
self.game.set_render_screen_flashes(True)
# Makes episodes start after 10 tics (~after raising the weapon)
self.game.set_episode_start_time(10)
# Makes the window appear (turned on by default)
self.game.set_window_visible(True)
# Turns on the sound. (turned off by default)
self.game.set_sound_enabled(True)
# Sets ViZDoom mode (PLAYER, ASYNC_PLAYER, SPECTATOR, ASYNC_SPECTATOR,
# PLAYER mode is default)
self.game.set_mode(Mode.PLAYER)
self.game.init()
self.action_size = self.game.get_available_buttons_size()
self.actions = [a.tolist() for a in np.eye(self.action_size, dtype=bool)]
logger.debug('Action space size {}'.format(self.action_size))
logger.info('Environment setup')
def step(self, action):
reward = self.game.make_action(self.actions[action])
done = self.game.is_episode_finished()
next_state = self.get_frame()
return next_state, reward, done, {}
def reset(self):
self.game.new_episode()
return self.get_frame()
def get_total_reward(self):
return self.game.get_total_reward()
def close(self):
self.game.close()
def get_frame(self):
state = self.game.get_state()
return state.screen_buffer if state is not None else None
def update_env(self, update_fn, **kwargs):
pass
|
python
|
import dash
import dash_core_components as dcc
import dash_html_components as html
import dash_bootstrap_components as dbc
import pandas as pd
import plotly.graph_objs as go
dataset_train = pd.read_csv('sg_train.csv')
dataset_train_copy = dataset_train.copy()
dataset_train_copy = dataset_train_copy.sort_values(by='Date')
dataset_test = pd.read_csv('sg_test_predict.csv')
dataset_test = dataset_test.drop(columns=['Unnamed: 0'])
fig = go.Figure()
fig.add_trace(go.Candlestick(x=pd.to_datetime(dataset_train_copy['Date']),open=dataset_train_copy['Open'],high=dataset_train_copy['High'],
low=dataset_train_copy['Low'],close=dataset_train_copy['Close'],increasing=dict(line=dict(color='blue')),
decreasing=dict(line=dict(color='red')),name='Real'))
fig.add_trace(go.Candlestick(x=pd.to_datetime(dataset_test['Date']),open=dataset_test['Open'],high=dataset_test['High'],
low=dataset_test['Low'],close=dataset_test['Close'],increasing=dict(line=dict(color='green')),
decreasing=dict(line=dict(color='pink')),name='Predicted'))
fig.update_layout(title='Stock Price Trend',title_x=0.5,xaxis=dict(tickformat='%Y-%m-%d',title='Date', nticks=10, tickangle=-45), yaxis_title='Stock Price')
app = dash.Dash(__name__, external_stylesheets=[dbc.themes.CERULEAN])
server = app.server
app.layout = html.Div(
[
dbc.Row([
dbc.Col((html.H1('Stock Predictor',
style={'textAlign': 'center', 'color': 'white', 'marginTop': 90})), width=12)
], style={'background-color': '#87D3F8', 'marginBottom': 20, 'height': 200}),
html.Div([
dbc.Row([
dbc.Col(html.H2(html.B('Predictions for Open,High,Low and Close Prices'),
style={'textAlign': 'left', 'marginBottom': 30, 'marginLeft': 10}), width=12)])
]),
html.Div([
dbc.Row([
dbc.Col(html.H5('Select the Range of dates using the Range slider below the graph',style={'textAlign':'left','marginBottom':20,'marginLeft':10}),width=12)])
]),
dbc.Row([
dbc.Col(dcc.Graph(id='candle-stick-chart', figure=fig, config={'displayModeBar': False})),
]),
])
if __name__ == '__main__':
app.run_server(debug=False)
|
python
|
import unittest
import xmlconfigparse
import xml.etree.ElementTree as ET
import xml.etree.ElementPath as EP
class XmlToDictTest(unittest.TestCase):
"""
"""
@classmethod
def setUpClass(cls):
"""Creates new xml file to test"""
# Creates xml file to be modified by test
root = ET.Element("root")
foo = ET.SubElement(root, "foo")
ET.SubElement(foo, "bar")
ET.SubElement(foo, "bar")
tree = ET.ElementTree(root)
tree.write("test.xml")
# Creates xml file to be tested against
test_root = ET.Element("root")
test_foo = ET.SubElement(test_root, "foo")
ET.SubElement(test_foo, "bar")
test_bar = ET.SubElement(test_foo, "bar")
test_name = ET.SubElement(test_bar, "name")
test_no = ET.SubElement(test_name, "no")
ET.SubElement(test_no, "more")
test_value = ET.SubElement(test_name, "value")
test_name.text = "test"
test_name.set("veg", "3")
test_value.text = "total"
test_tree = ET.ElementTree(test_root)
test_tree.write("testcase.xml")
def test_xmlinsert(self):
""" Test module
"""
xmlconfigparse.xmlinsert(
"name[@veg=3 text()=test][no/more]/value[text()=total]",
"test.xml",
tag="foo/bar[last()]",
)
try:
xmlroot = ET.parse("test.xml").getroot()
xmltestroot = ET.parse("testcase.xml").getroot()
except ET.ParseError:
self.fail(msg="Xml Parse Error")
xmlteststring = ET.tostring(xmlroot)
xmltempstring = ET.tostring(xmltestroot)
self.assertEqual(xmlteststring, xmltempstring, msg="Unexpected string returned")
def test_elementinset(self):
"""Test method insert subelements
"""
element_test = ET.Element("test")
element_temp = ET.Element("test")
new_temp = ET.SubElement(element_temp, "new")
ET.SubElement(new_temp, "insert")
token_iter = EP.xpath_tokenizer("new/insert")
xmlconfigparse.elementinsert(token_iter, element_test)
element_temp_string = ET.tostring(element_temp)
element_test_string = ET.tostring(element_test)
self.assertEqual(
element_test_string, element_temp_string, msg="Unexpected string returned"
)
def test_predicate(self):
"""Test predicate addition
"""
element_test = ET.Element("test")
element_temp = ET.Element("test")
element_temp.text = "Hey"
element_temp.set("val", "8")
ET.SubElement(element_temp, "ins")
token_iter = EP.xpath_tokenizer("@val=8]")
xmlconfigparse.add_predicate(token_iter, element_test)
token_iter = EP.xpath_tokenizer("text()=Hey]")
xmlconfigparse.add_predicate(token_iter, element_test)
token_iter = EP.xpath_tokenizer("ins/]")
xmlconfigparse.add_predicate(token_iter, element_test)
element_temp_string = ET.tostring(element_temp)
element_test_string = ET.tostring(element_test)
self.assertEqual(
element_test_string, element_temp_string, msg="Unexpected string returned"
)
def test_attribute(self):
"""Test attribute setting
"""
# template elements
attrib_element = ET.Element("test")
text_element = ET.Element("test")
attrib_element.set("val", "4")
text_element.text = "foo"
# testing elements
no_text_element = ET.Element("test")
no_attrib_element = ET.Element("test")
xmlconfigparse.set_xml_attribute(["@", "="], ["val", "4"], no_attrib_element)
xmlconfigparse.set_xml_attribute(["()", "="], ["text", "foo"], no_text_element)
element_attrib_string = ET.tostring(attrib_element)
element_text_string = ET.tostring(text_element)
no_attrib_string = ET.tostring(no_attrib_element)
no_text_string = ET.tostring(no_text_element)
self.assertEqual(
element_text_string, no_text_string, msg="Unexpected string returned"
)
self.assertEqual(
element_attrib_string, no_attrib_string, msg="Unexpected string returned"
)
if __name__ == "__main__":
unittest.main()
|
python
|
from typing import Literal
foo: Literal[""] = ""
bar: Literal[''] = ''
|
python
|
time_travel = int(input())
speed_travel = int(input())
liters = (speed_travel * time_travel) / 12
print('{:.3f}'.format(liters))
|
python
|
import os
import re
import ssl
from operator import itemgetter
import requests
import urllib3
from bs4 import BeautifulSoup
from smart_open import open
ssl._create_default_https_context = ssl._create_unverified_context
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
MAIN_PAGE = "https://tff.org/default.aspx?pageID=545"
FIRST_VALID_SEASON_URL = "http://www.tff.org/default.aspx?pageID=561"
class Match:
def __init__(self, url_match):
self.match_url = url_match
self.match_soup = BeautifulSoup(requests.get(url_match, timeout=5, verify=ssl.CERT_NONE, headers={"User-agent":
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.45 Safari/537.36"}).content,
"html.parser")
self.home_team = None
self.away_team = None
self.referee = None
self.day = None
self.time = None
self.goals = None
self.subs = None
self.red_cards = None
self.events_array = None
def parse_match_data(self):
self.parse_teams()
self.parse_referee()
self.parse_goals()
self.parse_date()
self.parse_subs()
self.parse_red_cards()
self.create_json()
def parse_teams(self):
teams_tag = self.match_soup.find_all('a', {'id': re.compile(r'Takim.$')})
team_one, team_two = teams_tag[0].text, teams_tag[1].text
self.home_team = team_one.strip()
self.away_team = team_two.strip()
def parse_referee(self):
ref_tag = self.match_soup.find('a', {'id': re.compile(r'Hakem')})
ref_name = ref_tag.text[:-7]
self.referee = ref_name
def parse_date(self):
date_tag = self.match_soup.find('span', id=lambda x: x and "Tarih" in x).text
date_splitted = date_tag.split("-")
try:
date, time = date_splitted[0], date_splitted[1]
except IndexError:
date = date_splitted[0]
time = 'Yok'
self.day = date
self.time = time
def parse_goals(self):
all_goals = []
goals = self.match_soup.findAll('a', id=lambda x: x and "Goller" in x)
for i in goals:
goal_info = i.text
team_id = re.search(r'Takim.', str(i)).group(0)[-1]
splitted_goal_info = goal_info.split(",")
scorer = splitted_goal_info[0]
scored_with = goal_info[-2]
try:
goal_minute = int(splitted_goal_info[1].split(".")[0])
except ValueError: # if goal scored in extra minutes
splitted_by_plus = splitted_goal_info[1].split(".")[0].split("+")
goal_minute = int(splitted_by_plus[0]) + int(splitted_by_plus[1])
if int(team_id) == 1:
goal_element = ['home', scorer, goal_minute, scored_with]
else:
goal_element = ['away', scorer, goal_minute, scored_with]
all_goals.append(goal_element)
self.goals = sorted(all_goals, key=itemgetter(2))
def parse_subs(self):
team_one_out, team_two_out = [], []
team_one_in, team_two_in = [], []
subs_in_tags = self.match_soup.find_all(['a', "span"], {'id': re.compile(r'Takim._rptCikanlar')})
team_id = 1
for e, i in enumerate(subs_in_tags):
if e == 0:
continue
if i.text == "Oyundan Çıkanlar":
team_id = 2
continue
if team_id == 1:
team_one_out.append(i.text)
else:
team_two_out.append(i.text)
subs_out_tags = self.match_soup.find_all(['a', "span"], {'id': re.compile(r'Takim._rptGirenler')})
team_id = 1
for e, i in enumerate(subs_out_tags):
if e == 0:
continue
if i.text == "Oyuna Girenler":
team_id = 2
continue
if team_id == 1:
team_one_in.append(i.text)
else:
team_two_in.append(i.text)
team_one_all = [team_one_in, team_one_out]
team_two_all = [team_two_in, team_two_out]
all_subs = [team_one_all, team_two_all]
self.subs = all_subs
def parse_red_cards(self):
team_one_red, team_two_red = [], []
red_card_tags = self.match_soup.find_all('img', {'alt': ["Çift Sarı Kart", "Kırmızı Kart"]})
for i in red_card_tags:
team_id = re.search(r'Takim.', str(i)).group(0)[-1]
name_minute = i.text.strip()
name_minute_splitted = name_minute.split("\n")
# Discard cards seen after the match
if name_minute_splitted[-1] == "MS":
continue
if int(team_id) == 1:
team_one_red.extend(name_minute_splitted)
else:
team_two_red.extend(name_minute_splitted)
all_red_cards = [team_one_red, team_two_red]
self.red_cards = all_red_cards
def create_json(self):
constant_info = {
'match_start_date': self.day.strip(),
'match_start_time': self.time.strip(),
'home_team': self.home_team,
'away_team': self.away_team,
'referee': self.referee
}
events = []
if len(self.goals) > 0:
home_score, away_score = 0, 0
for goal in self.goals:
dict_merge = {}
if goal[0] == 'home':
home_score += 1
else:
away_score += 1
event_json = {'type': 'goal', 'scoring_team': goal[0], 'scorer': goal[1], 'event_minute': goal[2],
'scored_with': goal[3], 'current_home_score': home_score,
'current_away_score': away_score}
dict_merge.update(event_json)
dict_merge.update(constant_info)
events.append(dict_merge)
sub_time = None
if self.subs is not None:
for home_sub_in, home_sub_out in zip(reversed(self.subs[0][0]), reversed(self.subs[0][1])):
if ".dk" in home_sub_in:
sub_time = home_sub_in.split(".")[0]
continue
dict_merge = {}
event_json = {'type': 'home_sub', 'event_minute': sub_time, 'sub_in': home_sub_in,
'sub_out': home_sub_out}
dict_merge.update(event_json)
dict_merge.update(constant_info)
events.append(dict_merge)
for away_sub_in, away_sub_out in zip(reversed(self.subs[1][0]), reversed(self.subs[1][1])):
if ".dk" in away_sub_in:
sub_time = away_sub_in.split(".")[0]
continue
dict_merge = {}
event_json = {'type': 'away_sub', 'event_minute': sub_time, 'sub_in': away_sub_in,
'sub_out': away_sub_out}
dict_merge.update(event_json)
dict_merge.update(constant_info)
events.append(dict_merge)
if self.red_cards is not None:
card_time = None
for home_red in reversed(self.red_cards[0]):
if ".dk" in home_red:
card_time = home_red.split(".")[0]
continue
dict_merge = {}
event_json = {'type': 'home_red', 'event_minute': card_time, 'info': home_red}
dict_merge.update(event_json)
dict_merge.update(constant_info)
events.append(dict_merge)
for away_red in reversed(self.red_cards[1]):
if ".dk" in away_red:
card_time = away_red.split(".")[0]
continue
dict_merge = {}
event_json = {'type': 'away_red', 'event_minute': card_time, 'info': away_red}
dict_merge.update(event_json)
dict_merge.update(constant_info)
events.append(dict_merge)
self.events_array = events
def __str__(self):
return_str = f"""
'Match date: ' {self.day}
'Match time: ' {self.time}
'Home team: ' {self.home_team}
'Away team: ' {self.away_team}
'Referee: ' {self.referee}
'Home team goals: ' {self.goals[0]}
'Away team goals: ' {self.goals[1]}
'Home team subs: ' {self.subs[0]}
'Away team subs: ' {self.subs[1]}
'Home team red cards: ' {self.red_cards[0]}
'Away team red cards: ' {self.red_cards[1]}
""".strip()
return return_str
def create_match_obj_list():
"""
read match_urls file from s3 and create match objects
"""
with open(f"s3://{os.environ['BUCKET_NAME']}/match_urls.txt") as f:
content = f.readlines()
all_matches = [x.strip() for x in content]
match_obj_list = []
for match_url in all_matches:
print("Parsing match url", match_url)
try:
match = Match(match_url)
match.parse_match_data()
match_obj_list.append(match)
except Exception as e:
print(str(e))
continue
return match_obj_list
|
python
|
# Copyright 2020 Dylan Baker
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for ConstList.
These work by using mypy ignore statements for code that should break, and
relying on mypy to catch "useless ignore" statements
"""
from __future__ import annotations
import typing
from constprotocol import ConstList
class MyList(list):
pass
def test_const_list_paramater() -> None:
"""Test passing various kinds into a function that returns a ConstList."""
def func(l: ConstList[str]) -> None:
pass
# Plain list does work
func(['a', 'b'])
# A ConstList does work
c: ConstList[str] = ['1', '2']
func(c)
# Sequence doesn't work, because it doesn't implement add or copy
a: typing.Sequence[str] = ['str']
func(a) # type: ignore
# Set doesn't work
b = {'a', 'b'}
func(b) # type: ignore
# Dict keys don't work
func({'a': 'b'}.keys()) # type: ignore
def test_const_list_return() -> None:
"""Test returning a ConstList and using that value."""
def func() -> ConstList[str]:
return ['a', 'b', 'c']
a = func()
# Does not have an append method
a.append('d') # type: ignore
# Does not have an iadd method
a += ['b'] # type: ignore
# Works, since a is not mutated
b: ConstList[str] = a + ['a']
# Does not have an append method
b.append('d') # type: ignore
# Works, since a is not mutated
c = func() + ['d']
c.append('e')
assert c == ['a', 'b', 'c', 'd', 'e']
# With a custom class
d: ConstList[str] = MyList(['a', 'b'])
e = d + ['c']
|
python
|
# FinSim
#
# Copyright 2019 Carnegie Mellon University. All Rights Reserved.
#
# NO WARRANTY. THIS CARNEGIE MELLON UNIVERSITY AND SOFTWARE ENGINEERING INSTITUTE MATERIAL IS FURNISHED ON AN "AS-IS" BASIS. CARNEGIE MELLON UNIVERSITY MAKES NO WARRANTIES OF ANY KIND, EITHER EXPRESSED OR IMPLIED, AS TO ANY MATTER INCLUDING, BUT NOT LIMITED TO, WARRANTY OF FITNESS FOR PURPOSE OR MERCHANTABILITY, EXCLUSIVITY, OR RESULTS OBTAINED FROM USE OF THE MATERIAL. CARNEGIE MELLON UNIVERSITY DOES NOT MAKE ANY WARRANTY OF ANY KIND WITH RESPECT TO FREEDOM FROM PATENT, TRADEMARK, OR COPYRIGHT INFRINGEMENT.
#
# Released under a MIT (SEI)-style license, please see license.txt or contact [email protected] for full terms.
#
# [DISTRIBUTION STATEMENT A] This material has been approved for public release and unlimited distribution. Please see Copyright notice for non-US Government use and distribution.
#
# This Software includes and/or makes use of the following Third-Party Software subject to its own license:
#
# 1. Django (https://www.djangoproject.com/foundation/faq/) Copyright 2005-2018 Django Software Foundation.
# 2. bootstrap (https://getbootstrap.com/docs/4.0/about/license/) Copyright 2018 Twitter.
# 3. glyphicons (https://www.glyphicons.com/license/) Copyright 2010-2018 GLYPHICONS.
# 4. jquery (https://jquery.org/license/) Copyright 2018 jquery foundation.
# 5. jquery tablesorter (https://mottie.github.io/tablesorter/docs/) Copyright 2007-2018 Christian Bach, Rob Garrison and Contributing Authors (see AUTHORS file).
# 6. jquery validate (https://jqueryvalidation.org/) Copyright 2006-2018 J�rn Zaefferer, Markus Staab, Brahim Arkni, and contributors .
# 7. jquery validate unobtrusive (https://github.com/aspnet/jquery-validation-unobtrusive/blob/master/LICENSE.txt) Copyright 2014-2018 .NET Foundation.
# 8. modernizr (https://github.com/Modernizr/Modernizr/blob/master/LICENSE) Copyright 2009-2018 https://github.com/Modernizr/Modernizr/graphs/contributors.
# 9. respond.js (https://github.com/scottjehl/Respond) Copyright 2011 Scott Jehl.
# 10. roboto fonts (https://fonts.google.com/specimen/Roboto) Copyright 2015-2018 Google, Inc..
# 11. xregexp (http://xregexp.com/) Copyright 2007-2012 Steven Levithan.
#
# DM19-0396
#
"""
Definition of forms.
"""
from django import forms
from django.contrib.auth.forms import AuthenticationForm
from django.utils.translation import ugettext_lazy as _
class BootstrapAuthenticationForm(AuthenticationForm): #Authenticate - username and password
"""Authentication form which uses boostrap CSS."""
username = forms.CharField(max_length=254,
widget=forms.TextInput({
'class': 'form-control',
'placeholder': 'User name'}))
password = forms.CharField(label=_("Password"),
widget=forms.PasswordInput({
'class': 'form-control',
'placeholder': 'Password'}))
# class BootstrapTemplateOfficialDatatableView(DatatableView): #JA comments
# model = CommercialAccounts
# datatable_options = {
# 'structure_template': "datatableview/bootstrap_structure.html",
# 'columns': [
# '',
# 'headline',
# 'blog',
# 'pub_date',
# ],
# }
|
python
|
#Follow up for "Unique Paths":
#
#Now consider if some obstacles are added to the grids. How many unique paths would there be?
#
#An obstacle and empty space is marked as 1 and 0 respectively in the grid.
#
#For example,
#There is one obstacle in the middle of a 3x3 grid as illustrated below.
#
#[
# [0,0,0],
# [0,1,0],
# [0,0,0]
#]
#The total number of unique paths is 2.
#
#Note: m and n will be at most 100.
class Solution(object):
def uniquePathsWithObstacles(self, obstacleGrid):
"""
:type obstacleGrid: List[List[int]]
:rtype: int
"""
m,n=len(obstacleGrid),len(obstacleGrid[0])
l=[[0 for i in xrange(n)] for i in xrange(m)]
for i in xrange(m):
for j in xrange(n):
if obstacleGrid[i][j]==0:
if i==0 and j==0:
l[i][j]=1
elif i==0:
l[i][j]=l[i][j-1]
elif j==0:
l[i][j]=l[i-1][j]
else:
l[i][j]+=l[i-1][j]+l[i][j-1]
return l[m-1][n-1]
|
python
|
# 给定一棵二叉树,你需要计算它的直径长度。一棵二叉树的直径长度是任意两个结点路径长度中的最大值。这条路径可能穿过根结点。
#
# 示例 :
# 给定二叉树
#
# 1
# / \
# 2 3
# / \
# 4 5
# 返回 3, 它的长度是路径 [4,2,1,3] 或者 [5,2,1,3]。
#
# 注意:两结点之间的路径长度是以它们之间边的数目表示。
#
# 来源:力扣(LeetCode)
# 链接:https://leetcode-cn.com/problems/diameter-of-binary-tree
# 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
# Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def __init__(self):
self.ans = 0
def depth(self, root: TreeNode) -> int:
if not root:
return 0
l = self.depth(root.left)
r = self.depth(root.right)
self.ans = max(self.ans, l + r + 1)
return max(l, r) + 1
def diameterOfBinaryTree(self, root: TreeNode) -> int:
if not root:
return 0
self.depth(root)
return self.ans - 1
|
python
|
#!/usr/bin/env python
"""
Creates MRI axial pictures for custom T-shirt.
Usage:
mripicture.py [options] <study>
mripicture.py [options] <study> [-s <subject>]...
mripicture.py [options] <study> [-s <subject>]... [-t <tag>]
Arguments:
<study> Nickname of the study to process
Options:
-s --subject Subjects
-o --output=FOLDER Output directory (default: /archive/data/{study}/data/tshirt)
-t --tag=TAG Scan tag [default: T1]
-f --force Force overwrite of output files [default: False]
-h --help Show this screen
-q, --quiet Show minimal output
-d, --debug Show debug messages
-v, --verbose Show intermediate steps
"""
import os
import glob
import logging
from nilearn import plotting
import numpy as np
from docopt import docopt
import datman.config
import datman.scan
logging.basicConfig(level=logging.WARN,
format="[%(name)s] %(levelname)s: %(message)s")
logger = logging.getLogger(os.path.basename(__file__))
def get_all_subjects(config):
nii_dir = config.get_path("nii")
subject_nii_dirs = glob.glob(os.path.join(nii_dir, "*"))
all_subs = [os.path.basename(path) for path in subject_nii_dirs]
return all_subs
def main():
arguments = docopt(__doc__)
study = arguments["<study>"]
outdir = arguments["--output"]
subs = arguments["<subject>"]
tag = arguments["--tag"]
force = arguments["--force"]
quiet = arguments["--quiet"]
debug = arguments["--debug"]
verbose = arguments["--verbose"]
config = datman.config.config(study=study)
# setup logging
if quiet:
logger.setLevel(logging.ERROR)
if verbose:
logger.setLevel(logging.INFO)
if debug:
logger.setLevel(logging.DEBUG)
if subs:
logger.info(
f"Creating pictures for subjects [ {', '.join(subs)} ] from "
f"{study} project using {tag} scans."
)
else:
subs = get_all_subjects(config)
logger.info(
f"Creating pictures for all {len(subs)} subjects from {study} "
f"project using {tag} scans."
)
if not outdir:
outdir = os.path.join(config.get_path("data"), "tshirt")
os.makedirs(outdir, exist_ok=True)
logger.debug(f"Output location set to: {outdir}")
if force:
logger.info("Overwriting existing files")
for subject in subs:
scan = datman.scan.Scan(subject, config)
tagged_scan = scan.get_tagged_nii(tag)
idx = np.argmax([ss.series_num for ss in tagged_scan])
# Set Path
imgpath = tagged_scan[idx].path
outpath = os.path.join(outdir, subject + "_T1.pdf")
if os.path.isfile(outpath) and not force:
logger.debug(f"Skipping subject {subject} as files already exist.")
else:
# Output Image
t1_pic = plotting.plot_anat(
imgpath,
cut_coords=(-20, -10, 2),
display_mode="x",
annotate=False,
draw_cross=False,
vmin=100,
vmax=1100,
threshold="auto",
)
t1_pic.savefig(outpath, dpi=1000)
logger.debug(
f"Created new brain pictures for subject {subject} from file "
f"{imgpath} and saved as {outpath}"
)
logger.info(f"Saved all output to: {outdir}")
if __name__ == "__main__":
main()
|
python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.