code
stringlengths 2k
1.04M
| repo_path
stringlengths 5
517
| parsed_code
stringlengths 0
1.04M
| quality_prob
float64 0.02
0.95
| learning_prob
float64 0.02
0.93
|
---|---|---|---|---|
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class Histogram(object):
"""
To capture all the histograms data related to profiling
"""
def __init__(self, **kwargs):
"""
Initializes a new Histogram object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param ranges:
The value to assign to the ranges property of this Histogram.
:type ranges: list[str]
:param counts:
The value to assign to the counts property of this Histogram.
:type counts: list[int]
"""
self.swagger_types = {
'ranges': 'list[str]',
'counts': 'list[int]'
}
self.attribute_map = {
'ranges': 'ranges',
'counts': 'counts'
}
self._ranges = None
self._counts = None
@property
def ranges(self):
"""
Gets the ranges of this Histogram.
Range of values
:return: The ranges of this Histogram.
:rtype: list[str]
"""
return self._ranges
@ranges.setter
def ranges(self, ranges):
"""
Sets the ranges of this Histogram.
Range of values
:param ranges: The ranges of this Histogram.
:type: list[str]
"""
self._ranges = ranges
@property
def counts(self):
"""
Gets the counts of this Histogram.
Count of each ranges.
:return: The counts of this Histogram.
:rtype: list[int]
"""
return self._counts
@counts.setter
def counts(self, counts):
"""
Sets the counts of this Histogram.
Count of each ranges.
:param counts: The counts of this Histogram.
:type: list[int]
"""
self._counts = counts
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
|
src/oci/data_connectivity/models/histogram.py
|
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class Histogram(object):
"""
To capture all the histograms data related to profiling
"""
def __init__(self, **kwargs):
"""
Initializes a new Histogram object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param ranges:
The value to assign to the ranges property of this Histogram.
:type ranges: list[str]
:param counts:
The value to assign to the counts property of this Histogram.
:type counts: list[int]
"""
self.swagger_types = {
'ranges': 'list[str]',
'counts': 'list[int]'
}
self.attribute_map = {
'ranges': 'ranges',
'counts': 'counts'
}
self._ranges = None
self._counts = None
@property
def ranges(self):
"""
Gets the ranges of this Histogram.
Range of values
:return: The ranges of this Histogram.
:rtype: list[str]
"""
return self._ranges
@ranges.setter
def ranges(self, ranges):
"""
Sets the ranges of this Histogram.
Range of values
:param ranges: The ranges of this Histogram.
:type: list[str]
"""
self._ranges = ranges
@property
def counts(self):
"""
Gets the counts of this Histogram.
Count of each ranges.
:return: The counts of this Histogram.
:rtype: list[int]
"""
return self._counts
@counts.setter
def counts(self, counts):
"""
Sets the counts of this Histogram.
Count of each ranges.
:param counts: The counts of this Histogram.
:type: list[int]
"""
self._counts = counts
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 0.851027 | 0.430806 |
from ...flash.flash import Flash
from ...core.coresight_target import CoreSightTarget
from ...core.memory_map import (FlashRegion, RamRegion, MemoryMap)
from ...debug.svd.loader import SVDFile
import logging
FLASH_ALGO = { 'load_address' : 0x20000000,
'instructions' : [
0xE00ABE00, 0x062D780D, 0x24084068, 0xD3000040, 0x1E644058, 0x1C49D1FA, 0x2A001E52, 0x4770D1F2,
0x4603B510, 0x4893460C, 0x68414448, 0xF0006888, 0xB1087080, 0xBD102001, 0x4448488E, 0x60486880,
0xE7F82000, 0x488B4602, 0x68414448, 0xF0206888, 0x60884070, 0x47702000, 0x44484886, 0x68886841,
0x7080F000, 0x2001B108, 0x6A484770, 0x2000B148, 0x6A486248, 0x2002B128, 0x6A486248, 0x2001B108,
0x6888E7F2, 0x4070F020, 0x5000F040, 0x20006088, 0xB510E7EA, 0x44484877, 0xF7FF6844, 0xB108FFDD,
0xBD102001, 0xF42068A0, 0xF440407F, 0x60A0402A, 0xF04068A0, 0x60A00002, 0x68A0BF00, 0x7080F000,
0xD1FA2800, 0xF02068A0, 0x60A04070, 0xF0006A60, 0xB1080002, 0xE7E42001, 0xE7E22000, 0x4605B570,
0x44484864, 0xF7FF6844, 0xB108FFB7, 0xBD702001, 0xF42068A0, 0xF440407F, 0x60A040AA, 0x68A06025,
0x0004F040, 0xBF0060A0, 0xF00068A0, 0x28007080, 0x68A0D1FA, 0x4070F020, 0x6A6060A0, 0x0002F000,
0x2001B108, 0x2000E7E3, 0xE92DE7E1, 0x460747F0, 0x4690468A, 0x4448484F, 0x46566844, 0xF0084645,
0xB1100003, 0xE8BD2001, 0x464587F0, 0xFF84F7FF, 0x2001B108, 0x68A0E7F7, 0x6000F020, 0x68A060A0,
0x0010F040, 0xE00E60A0, 0xCD016027, 0x68A06320, 0x0001F040, 0xBF0060A0, 0xF00068A0, 0x28007080,
0x1D3FD1FA, 0x2E041F36, 0xF007D303, 0x2800001F, 0x4838D1EA, 0x68C04448, 0xD1212880, 0xD31F2E10,
0xF02068A0, 0x60A00010, 0xF04068A0, 0x60A06000, 0x6027E014, 0x6320CD01, 0x6360CD01, 0x63A0CD01,
0x63E0CD01, 0xF04068A0, 0x60A00001, 0x68A0BF00, 0x7080F000, 0xD1FA2800, 0x3E103710, 0xD2E82E10,
0xD3192E04, 0xF02068A0, 0x60A06000, 0xF04068A0, 0x60A00010, 0x6027E00E, 0x6320CD01, 0xF04068A0,
0x60A00001, 0x68A0BF00, 0x7080F000, 0xD1FA2800, 0x1F361D3F, 0xD2EE2E04, 0x68A2B306, 0x6200F022,
0x68A260A2, 0x0210F042, 0xF04F60A2, 0x21FF30FF, 0x682AE005, 0x0201EA62, 0x02094010, 0x2E001E76,
0x6027D1F7, 0x68A26320, 0x0201F042, 0xBF0060A2, 0xF00268A2, 0x2A007280, 0xBF00D1FA, 0xF02068A0,
0x60A04070, 0xF0006A60, 0xB1080002, 0xE76A2001, 0xE7682000, 0x00000004, 0x00000000, 0x00000000,
# FLC_BASE, CLK_DIV, BRST_SIZE, FLASH_BASE, FLASH_SIZE, FLASH_SECTOR
0x40002000, 0x00000060, 0x00000020, 0x00000000, 0x00200000, 0x00002000
],
'pc_init' : 0x20000021,
'pc_eraseAll' : 0x20000093,
'pc_erase_sector' : 0x200000DD,
'pc_program_page' : 0x2000012B,
'begin_data' : 0x20004000, # Analyzer uses a max of 128B data (32 pages * 4 bytes / page)
'page_buffers' : [0x20006000, 0x20008000], # Enable double buffering
'begin_stack' : 0x20002000,
'static_base' : 0x20000278,
'min_program_length' : 4,
'analyzer_supported' : True,
'analyzer_address' : 0x2000A000 # Analyzer 0x2000A000..0x2000A600
}
class MAX32630(CoreSightTarget):
VENDOR = "Maxim"
memoryMap = MemoryMap(
FlashRegion( start=0, length=0x200000, blocksize=0x2000, is_boot_memory=True, algo=FLASH_ALGO),
RamRegion( start=0x20000000, length=0x40000),
)
def __init__(self, link):
super(MAX32630, self).__init__(link, self.memoryMap)
self._svd_location = SVDFile.from_builtin("max32630.svd")
|
pyocd/target/builtin/target_MAX32630.py
|
from ...flash.flash import Flash
from ...core.coresight_target import CoreSightTarget
from ...core.memory_map import (FlashRegion, RamRegion, MemoryMap)
from ...debug.svd.loader import SVDFile
import logging
FLASH_ALGO = { 'load_address' : 0x20000000,
'instructions' : [
0xE00ABE00, 0x062D780D, 0x24084068, 0xD3000040, 0x1E644058, 0x1C49D1FA, 0x2A001E52, 0x4770D1F2,
0x4603B510, 0x4893460C, 0x68414448, 0xF0006888, 0xB1087080, 0xBD102001, 0x4448488E, 0x60486880,
0xE7F82000, 0x488B4602, 0x68414448, 0xF0206888, 0x60884070, 0x47702000, 0x44484886, 0x68886841,
0x7080F000, 0x2001B108, 0x6A484770, 0x2000B148, 0x6A486248, 0x2002B128, 0x6A486248, 0x2001B108,
0x6888E7F2, 0x4070F020, 0x5000F040, 0x20006088, 0xB510E7EA, 0x44484877, 0xF7FF6844, 0xB108FFDD,
0xBD102001, 0xF42068A0, 0xF440407F, 0x60A0402A, 0xF04068A0, 0x60A00002, 0x68A0BF00, 0x7080F000,
0xD1FA2800, 0xF02068A0, 0x60A04070, 0xF0006A60, 0xB1080002, 0xE7E42001, 0xE7E22000, 0x4605B570,
0x44484864, 0xF7FF6844, 0xB108FFB7, 0xBD702001, 0xF42068A0, 0xF440407F, 0x60A040AA, 0x68A06025,
0x0004F040, 0xBF0060A0, 0xF00068A0, 0x28007080, 0x68A0D1FA, 0x4070F020, 0x6A6060A0, 0x0002F000,
0x2001B108, 0x2000E7E3, 0xE92DE7E1, 0x460747F0, 0x4690468A, 0x4448484F, 0x46566844, 0xF0084645,
0xB1100003, 0xE8BD2001, 0x464587F0, 0xFF84F7FF, 0x2001B108, 0x68A0E7F7, 0x6000F020, 0x68A060A0,
0x0010F040, 0xE00E60A0, 0xCD016027, 0x68A06320, 0x0001F040, 0xBF0060A0, 0xF00068A0, 0x28007080,
0x1D3FD1FA, 0x2E041F36, 0xF007D303, 0x2800001F, 0x4838D1EA, 0x68C04448, 0xD1212880, 0xD31F2E10,
0xF02068A0, 0x60A00010, 0xF04068A0, 0x60A06000, 0x6027E014, 0x6320CD01, 0x6360CD01, 0x63A0CD01,
0x63E0CD01, 0xF04068A0, 0x60A00001, 0x68A0BF00, 0x7080F000, 0xD1FA2800, 0x3E103710, 0xD2E82E10,
0xD3192E04, 0xF02068A0, 0x60A06000, 0xF04068A0, 0x60A00010, 0x6027E00E, 0x6320CD01, 0xF04068A0,
0x60A00001, 0x68A0BF00, 0x7080F000, 0xD1FA2800, 0x1F361D3F, 0xD2EE2E04, 0x68A2B306, 0x6200F022,
0x68A260A2, 0x0210F042, 0xF04F60A2, 0x21FF30FF, 0x682AE005, 0x0201EA62, 0x02094010, 0x2E001E76,
0x6027D1F7, 0x68A26320, 0x0201F042, 0xBF0060A2, 0xF00268A2, 0x2A007280, 0xBF00D1FA, 0xF02068A0,
0x60A04070, 0xF0006A60, 0xB1080002, 0xE76A2001, 0xE7682000, 0x00000004, 0x00000000, 0x00000000,
# FLC_BASE, CLK_DIV, BRST_SIZE, FLASH_BASE, FLASH_SIZE, FLASH_SECTOR
0x40002000, 0x00000060, 0x00000020, 0x00000000, 0x00200000, 0x00002000
],
'pc_init' : 0x20000021,
'pc_eraseAll' : 0x20000093,
'pc_erase_sector' : 0x200000DD,
'pc_program_page' : 0x2000012B,
'begin_data' : 0x20004000, # Analyzer uses a max of 128B data (32 pages * 4 bytes / page)
'page_buffers' : [0x20006000, 0x20008000], # Enable double buffering
'begin_stack' : 0x20002000,
'static_base' : 0x20000278,
'min_program_length' : 4,
'analyzer_supported' : True,
'analyzer_address' : 0x2000A000 # Analyzer 0x2000A000..0x2000A600
}
class MAX32630(CoreSightTarget):
VENDOR = "Maxim"
memoryMap = MemoryMap(
FlashRegion( start=0, length=0x200000, blocksize=0x2000, is_boot_memory=True, algo=FLASH_ALGO),
RamRegion( start=0x20000000, length=0x40000),
)
def __init__(self, link):
super(MAX32630, self).__init__(link, self.memoryMap)
self._svd_location = SVDFile.from_builtin("max32630.svd")
| 0.39222 | 0.311126 |
"""Classes and functions related to Quality Control of incoming data."""
# Python imports
from __future__ import absolute_import
import logging
# weewx imports
import weeutil.weeutil
import weewx.units
from weeutil.weeutil import to_float
log = logging.getLogger(__name__)
# ==============================================================================
# Class QC
# ==============================================================================
class QC(object):
"""Class to apply quality checks to a record."""
def __init__(self, mm_dict):
"""
Initialize
Args:
mm_dict: A dictionary containing the limits. The key is an observation type, the value
is a 2- or 3-way tuple. If a 2-way tuple, then the values are (min, max) acceptable
value in a record for that observation type. If a 3-way tuple, then the values are
(min, max, unit), where min and max are as before, but the value 'unit' is the unit the
min and max values are in. If 'unit' is not specified, then the values must be in the
same unit as the incoming record (a risky supposition!).
"""
self.mm_dict = {}
for obs_type in mm_dict:
self.mm_dict[obs_type] = list(mm_dict[obs_type])
# The incoming min, max values may be from a ConfigObj, which are typically strings.
# Convert to floats.
self.mm_dict[obs_type][0] = to_float(self.mm_dict[obs_type][0])
self.mm_dict[obs_type][1] = to_float(self.mm_dict[obs_type][1])
def apply_qc(self, data_dict, data_type=''):
"""Apply quality checks to the data in a record"""
converter = weewx.units.StdUnitConverters[data_dict['usUnits']]
for obs_type in self.mm_dict:
if obs_type in data_dict and data_dict[obs_type] is not None:
# Extract the minimum and maximum acceptable values
min_v, max_v = self.mm_dict[obs_type][0:2]
# If a unit has been specified, convert the min, max acceptable value to the same
# unit system as the incoming record:
if len(self.mm_dict[obs_type]) == 3:
min_max_unit = self.mm_dict[obs_type][2]
group = weewx.units.getUnitGroup(obs_type)
min_v = converter.convert((min_v, min_max_unit, group))[0]
max_v = converter.convert((max_v, min_max_unit, group))[0]
if not min_v <= data_dict[obs_type] <= max_v:
log.warning("%s %s value '%s' %s outside limits (%s, %s)",
weeutil.weeutil.timestamp_to_string(data_dict['dateTime']),
data_type, obs_type, data_dict[obs_type], min_v, max_v)
data_dict[obs_type] = None
|
dist/weewx-4.6.0b7/bin/weewx/qc.py
|
"""Classes and functions related to Quality Control of incoming data."""
# Python imports
from __future__ import absolute_import
import logging
# weewx imports
import weeutil.weeutil
import weewx.units
from weeutil.weeutil import to_float
log = logging.getLogger(__name__)
# ==============================================================================
# Class QC
# ==============================================================================
class QC(object):
"""Class to apply quality checks to a record."""
def __init__(self, mm_dict):
"""
Initialize
Args:
mm_dict: A dictionary containing the limits. The key is an observation type, the value
is a 2- or 3-way tuple. If a 2-way tuple, then the values are (min, max) acceptable
value in a record for that observation type. If a 3-way tuple, then the values are
(min, max, unit), where min and max are as before, but the value 'unit' is the unit the
min and max values are in. If 'unit' is not specified, then the values must be in the
same unit as the incoming record (a risky supposition!).
"""
self.mm_dict = {}
for obs_type in mm_dict:
self.mm_dict[obs_type] = list(mm_dict[obs_type])
# The incoming min, max values may be from a ConfigObj, which are typically strings.
# Convert to floats.
self.mm_dict[obs_type][0] = to_float(self.mm_dict[obs_type][0])
self.mm_dict[obs_type][1] = to_float(self.mm_dict[obs_type][1])
def apply_qc(self, data_dict, data_type=''):
"""Apply quality checks to the data in a record"""
converter = weewx.units.StdUnitConverters[data_dict['usUnits']]
for obs_type in self.mm_dict:
if obs_type in data_dict and data_dict[obs_type] is not None:
# Extract the minimum and maximum acceptable values
min_v, max_v = self.mm_dict[obs_type][0:2]
# If a unit has been specified, convert the min, max acceptable value to the same
# unit system as the incoming record:
if len(self.mm_dict[obs_type]) == 3:
min_max_unit = self.mm_dict[obs_type][2]
group = weewx.units.getUnitGroup(obs_type)
min_v = converter.convert((min_v, min_max_unit, group))[0]
max_v = converter.convert((max_v, min_max_unit, group))[0]
if not min_v <= data_dict[obs_type] <= max_v:
log.warning("%s %s value '%s' %s outside limits (%s, %s)",
weeutil.weeutil.timestamp_to_string(data_dict['dateTime']),
data_type, obs_type, data_dict[obs_type], min_v, max_v)
data_dict[obs_type] = None
| 0.906091 | 0.650939 |
import re
from pygments.lexer import RegexLexer, bygroups, default
from pygments.token import Keyword, Punctuation, String, Number, Operator, \
Whitespace, Name, Literal, Comment, Text
__all__ = ['SparqlLexer']
class SparqlLexer(RegexLexer):
"""
Lexer for `SPARQL <http://www.w3.org/TR/rdf-sparql-query/>`_ query language.
.. versionadded:: 2.0
"""
name = 'SPARQL'
aliases = ['sparql']
filenames = ['*.rq', '*.sparql']
mimetypes = ['application/sparql-query']
flags = re.IGNORECASE
tokens = {
'root': [
(r'\s+', Whitespace),
(r'(select|construct|describe|ask|where|filter|group\s+by|minus|'
r'distinct|reduced|from named|from|order\s+by|limit|'
r'offset|bindings|load|clear|drop|create|add|move|copy|'
r'insert\s+data|delete\s+data|delete\s+where|delete|insert|'
r'using named|using|graph|default|named|all|optional|service|'
r'silent|bind|union|not in|in|as|a)', Keyword),
(r'(prefix|base)(\s+)([a-z][\w-]*)(\s*)(\:)',
bygroups(Keyword, Whitespace, Name.Namespace, Whitespace,
Punctuation)),
(r'\?[a-z_]\w*', Name.Variable),
(r'<[^>]+>', Name.Label),
(r'([a-z][\w-]*)(\:)([a-z][\w-]*)',
bygroups(Name.Namespace, Punctuation, Name.Tag)),
(r'(str|lang|langmatches|datatype|bound|iri|uri|bnode|rand|abs|'
r'ceil|floor|round|concat|strlen|ucase|lcase|encode_for_uri|'
r'contains|strstarts|strends|strbefore|strafter|year|month|day|'
r'hours|minutes|seconds|timezone|tz|now|md5|sha1|sha256|sha384|'
r'sha512|coalesce|if|strlang|strdt|sameterm|isiri|isuri|isblank|'
r'isliteral|isnumeric|regex|substr|replace|exists|not exists|'
r'count|sum|min|max|avg|sample|group_concat|separator)\b',
Name.Function),
(r'(true|false)', Literal),
(r'[+\-]?\d*\.\d+', Number.Float),
(r'[+\-]?\d*(:?\.\d+)?E[+\-]?\d+', Number.Float),
(r'[+\-]?\d+', Number.Integer),
(r'(\|\||&&|=|\*|\-|\+|/)', Operator),
(r'[(){}.;,:^]', Punctuation),
(r'#[^\n]+', Comment),
(r'"""', String, 'triple-double-quoted-string'),
(r'"', String, 'single-double-quoted-string'),
(r"'''", String, 'triple-single-quoted-string'),
(r"'", String, 'single-single-quoted-string'),
],
'triple-double-quoted-string': [
(r'"""', String, 'end-of-string'),
(r'[^\\]+', String),
(r'\\', String, 'string-escape'),
],
'single-double-quoted-string': [
(r'"', String, 'end-of-string'),
(r'[^"\\\n]+', String),
(r'\\', String, 'string-escape'),
],
'triple-single-quoted-string': [
(r"'''", String, 'end-of-string'),
(r'[^\\]+', String),
(r'\\', String, 'string-escape'),
],
'single-single-quoted-string': [
(r"'", String, 'end-of-string'),
(r"[^'\\\n]+", String),
(r'\\', String, 'string-escape'),
],
'string-escape': [
(r'.', String, '#pop'),
],
'end-of-string': [
(r'(@)([a-z]+(:?-[a-z0-9]+)*)',
bygroups(Operator, Name.Function), '#pop:2'),
(r'\^\^', Operator, '#pop:2'),
default('#pop:2'),
],
}
|
OmniMarkupLib/Renderers/libs/pygments/lexers/rdf.py
|
import re
from pygments.lexer import RegexLexer, bygroups, default
from pygments.token import Keyword, Punctuation, String, Number, Operator, \
Whitespace, Name, Literal, Comment, Text
__all__ = ['SparqlLexer']
class SparqlLexer(RegexLexer):
"""
Lexer for `SPARQL <http://www.w3.org/TR/rdf-sparql-query/>`_ query language.
.. versionadded:: 2.0
"""
name = 'SPARQL'
aliases = ['sparql']
filenames = ['*.rq', '*.sparql']
mimetypes = ['application/sparql-query']
flags = re.IGNORECASE
tokens = {
'root': [
(r'\s+', Whitespace),
(r'(select|construct|describe|ask|where|filter|group\s+by|minus|'
r'distinct|reduced|from named|from|order\s+by|limit|'
r'offset|bindings|load|clear|drop|create|add|move|copy|'
r'insert\s+data|delete\s+data|delete\s+where|delete|insert|'
r'using named|using|graph|default|named|all|optional|service|'
r'silent|bind|union|not in|in|as|a)', Keyword),
(r'(prefix|base)(\s+)([a-z][\w-]*)(\s*)(\:)',
bygroups(Keyword, Whitespace, Name.Namespace, Whitespace,
Punctuation)),
(r'\?[a-z_]\w*', Name.Variable),
(r'<[^>]+>', Name.Label),
(r'([a-z][\w-]*)(\:)([a-z][\w-]*)',
bygroups(Name.Namespace, Punctuation, Name.Tag)),
(r'(str|lang|langmatches|datatype|bound|iri|uri|bnode|rand|abs|'
r'ceil|floor|round|concat|strlen|ucase|lcase|encode_for_uri|'
r'contains|strstarts|strends|strbefore|strafter|year|month|day|'
r'hours|minutes|seconds|timezone|tz|now|md5|sha1|sha256|sha384|'
r'sha512|coalesce|if|strlang|strdt|sameterm|isiri|isuri|isblank|'
r'isliteral|isnumeric|regex|substr|replace|exists|not exists|'
r'count|sum|min|max|avg|sample|group_concat|separator)\b',
Name.Function),
(r'(true|false)', Literal),
(r'[+\-]?\d*\.\d+', Number.Float),
(r'[+\-]?\d*(:?\.\d+)?E[+\-]?\d+', Number.Float),
(r'[+\-]?\d+', Number.Integer),
(r'(\|\||&&|=|\*|\-|\+|/)', Operator),
(r'[(){}.;,:^]', Punctuation),
(r'#[^\n]+', Comment),
(r'"""', String, 'triple-double-quoted-string'),
(r'"', String, 'single-double-quoted-string'),
(r"'''", String, 'triple-single-quoted-string'),
(r"'", String, 'single-single-quoted-string'),
],
'triple-double-quoted-string': [
(r'"""', String, 'end-of-string'),
(r'[^\\]+', String),
(r'\\', String, 'string-escape'),
],
'single-double-quoted-string': [
(r'"', String, 'end-of-string'),
(r'[^"\\\n]+', String),
(r'\\', String, 'string-escape'),
],
'triple-single-quoted-string': [
(r"'''", String, 'end-of-string'),
(r'[^\\]+', String),
(r'\\', String, 'string-escape'),
],
'single-single-quoted-string': [
(r"'", String, 'end-of-string'),
(r"[^'\\\n]+", String),
(r'\\', String, 'string-escape'),
],
'string-escape': [
(r'.', String, '#pop'),
],
'end-of-string': [
(r'(@)([a-z]+(:?-[a-z0-9]+)*)',
bygroups(Operator, Name.Function), '#pop:2'),
(r'\^\^', Operator, '#pop:2'),
default('#pop:2'),
],
}
| 0.460532 | 0.336481 |
from functools import partial
from typing import Iterator, List, Optional, Mapping
from preacher.compilation.argument import Arguments, inject_arguments
from preacher.compilation.error import on_key
from preacher.compilation.parameter import Parameter, compile_parameter
from preacher.compilation.util.functional import (
map_compile,
compile_flattening,
)
from preacher.compilation.util.type import (
ensure_bool,
ensure_optional_str,
ensure_list,
ensure_mapping,
)
from preacher.compilation.verification import DescriptionCompiler
from preacher.core.scenario import Scenario, Case
from .case import CaseCompiler
_KEY_LABEL = "label"
_KEY_WHEN = "when"
_KEY_DEFAULT = "default"
_KEY_ORDERED = "ordered"
_KEY_CASES = "cases"
_KEY_PARAMETERS = "parameters"
_KEY_SUBSCENARIOS = "subscenarios"
class ScenarioCompiler:
def __init__(self, description: DescriptionCompiler, case: CaseCompiler):
self._description = description
self._case = case
def compile(self, obj: object, arguments: Optional[Arguments] = None) -> Scenario:
"""
Compile the given object into a scenario.
Args:
obj: A compiled object, which should be a mapping.
arguments: Arguments to inject.
Returns:
The scenario as the result of compilation.
Raises:
CompilationError: when the compilation fails.
"""
obj = ensure_mapping(obj)
arguments = arguments or {}
label_obj = inject_arguments(obj.get(_KEY_LABEL), arguments)
with on_key(_KEY_LABEL):
label = ensure_optional_str(label_obj)
parameters_obj = obj.get(_KEY_PARAMETERS)
if parameters_obj is not None:
with on_key(_KEY_PARAMETERS):
parameters_obj = ensure_list(parameters_obj)
parameters = list(map_compile(compile_parameter, parameters_obj))
subscenarios = [
self._compile_parameterized(obj, arguments, parameter) for parameter in parameters
]
return Scenario(label=label, subscenarios=subscenarios)
ordered_obj = inject_arguments(obj.get(_KEY_ORDERED, True), arguments)
with on_key(_KEY_ORDERED):
ordered = ensure_bool(ordered_obj)
default_obj = inject_arguments(obj.get(_KEY_DEFAULT, {}), arguments)
with on_key(_KEY_DEFAULT):
case_compiler = self._case.compile_default(default_obj)
condition_obj = inject_arguments(obj.get(_KEY_WHEN, []), arguments)
with on_key(_KEY_WHEN):
conditions = self._compile_conditions(condition_obj)
case_obj = inject_arguments(obj.get(_KEY_CASES, []), arguments)
with on_key(_KEY_CASES):
cases = self._compile_cases(case_compiler, case_obj)
subscenario_obj = obj.get(_KEY_SUBSCENARIOS, [])
with on_key(_KEY_SUBSCENARIOS):
subscenarios = self._compile_subscenarios(
case_compiler,
subscenario_obj,
arguments,
)
return Scenario(
label=label,
ordered=ordered,
conditions=conditions,
cases=cases,
subscenarios=subscenarios,
)
def compile_flattening(
self,
obj: object,
arguments: Optional[Arguments] = None,
) -> Iterator[Scenario]:
"""
Compile the given object into a scenario with flattening:
a nested object list results in a flattened scenario.
Args:
obj: A compiled object or a list.
arguments: Arguments to inject.
Returns:
A scenario iterator as the result of compilation.
Raises:
CompilationError: when the compilation fails for each iteration.
"""
compile = partial(self.compile, arguments=arguments)
return compile_flattening(compile, obj)
def _compile_conditions(self, obj: object):
return list(map_compile(self._description.compile, ensure_list(obj)))
@staticmethod
def _compile_cases(case_compiler: CaseCompiler, obj: object) -> List[Case]:
return list(map_compile(case_compiler.compile_fixed, ensure_list(obj)))
def _compile_subscenarios(
self,
case: CaseCompiler,
obj: object,
arguments: Arguments,
) -> List[Scenario]:
compiler = ScenarioCompiler(description=self._description, case=case)
return list(
map_compile(
lambda sub_obj: compiler.compile(sub_obj, arguments=arguments),
ensure_list(obj),
)
)
def _compile_parameterized(
self,
obj: Mapping,
arguments: Arguments,
parameter: Parameter,
) -> Scenario:
template = {k: v for (k, v) in obj.items() if k not in (_KEY_LABEL, _KEY_PARAMETERS)}
template["label"] = parameter.label
arguments = dict(arguments)
arguments.update(parameter.arguments)
return self.compile(template, arguments)
|
preacher/compilation/scenario/scenario.py
|
from functools import partial
from typing import Iterator, List, Optional, Mapping
from preacher.compilation.argument import Arguments, inject_arguments
from preacher.compilation.error import on_key
from preacher.compilation.parameter import Parameter, compile_parameter
from preacher.compilation.util.functional import (
map_compile,
compile_flattening,
)
from preacher.compilation.util.type import (
ensure_bool,
ensure_optional_str,
ensure_list,
ensure_mapping,
)
from preacher.compilation.verification import DescriptionCompiler
from preacher.core.scenario import Scenario, Case
from .case import CaseCompiler
_KEY_LABEL = "label"
_KEY_WHEN = "when"
_KEY_DEFAULT = "default"
_KEY_ORDERED = "ordered"
_KEY_CASES = "cases"
_KEY_PARAMETERS = "parameters"
_KEY_SUBSCENARIOS = "subscenarios"
class ScenarioCompiler:
def __init__(self, description: DescriptionCompiler, case: CaseCompiler):
self._description = description
self._case = case
def compile(self, obj: object, arguments: Optional[Arguments] = None) -> Scenario:
"""
Compile the given object into a scenario.
Args:
obj: A compiled object, which should be a mapping.
arguments: Arguments to inject.
Returns:
The scenario as the result of compilation.
Raises:
CompilationError: when the compilation fails.
"""
obj = ensure_mapping(obj)
arguments = arguments or {}
label_obj = inject_arguments(obj.get(_KEY_LABEL), arguments)
with on_key(_KEY_LABEL):
label = ensure_optional_str(label_obj)
parameters_obj = obj.get(_KEY_PARAMETERS)
if parameters_obj is not None:
with on_key(_KEY_PARAMETERS):
parameters_obj = ensure_list(parameters_obj)
parameters = list(map_compile(compile_parameter, parameters_obj))
subscenarios = [
self._compile_parameterized(obj, arguments, parameter) for parameter in parameters
]
return Scenario(label=label, subscenarios=subscenarios)
ordered_obj = inject_arguments(obj.get(_KEY_ORDERED, True), arguments)
with on_key(_KEY_ORDERED):
ordered = ensure_bool(ordered_obj)
default_obj = inject_arguments(obj.get(_KEY_DEFAULT, {}), arguments)
with on_key(_KEY_DEFAULT):
case_compiler = self._case.compile_default(default_obj)
condition_obj = inject_arguments(obj.get(_KEY_WHEN, []), arguments)
with on_key(_KEY_WHEN):
conditions = self._compile_conditions(condition_obj)
case_obj = inject_arguments(obj.get(_KEY_CASES, []), arguments)
with on_key(_KEY_CASES):
cases = self._compile_cases(case_compiler, case_obj)
subscenario_obj = obj.get(_KEY_SUBSCENARIOS, [])
with on_key(_KEY_SUBSCENARIOS):
subscenarios = self._compile_subscenarios(
case_compiler,
subscenario_obj,
arguments,
)
return Scenario(
label=label,
ordered=ordered,
conditions=conditions,
cases=cases,
subscenarios=subscenarios,
)
def compile_flattening(
self,
obj: object,
arguments: Optional[Arguments] = None,
) -> Iterator[Scenario]:
"""
Compile the given object into a scenario with flattening:
a nested object list results in a flattened scenario.
Args:
obj: A compiled object or a list.
arguments: Arguments to inject.
Returns:
A scenario iterator as the result of compilation.
Raises:
CompilationError: when the compilation fails for each iteration.
"""
compile = partial(self.compile, arguments=arguments)
return compile_flattening(compile, obj)
def _compile_conditions(self, obj: object):
return list(map_compile(self._description.compile, ensure_list(obj)))
@staticmethod
def _compile_cases(case_compiler: CaseCompiler, obj: object) -> List[Case]:
return list(map_compile(case_compiler.compile_fixed, ensure_list(obj)))
def _compile_subscenarios(
self,
case: CaseCompiler,
obj: object,
arguments: Arguments,
) -> List[Scenario]:
compiler = ScenarioCompiler(description=self._description, case=case)
return list(
map_compile(
lambda sub_obj: compiler.compile(sub_obj, arguments=arguments),
ensure_list(obj),
)
)
def _compile_parameterized(
self,
obj: Mapping,
arguments: Arguments,
parameter: Parameter,
) -> Scenario:
template = {k: v for (k, v) in obj.items() if k not in (_KEY_LABEL, _KEY_PARAMETERS)}
template["label"] = parameter.label
arguments = dict(arguments)
arguments.update(parameter.arguments)
return self.compile(template, arguments)
| 0.914565 | 0.238018 |
import logging
from horizon import exceptions
from horizon import forms
from horizon import messages
from django.core.urlresolvers import reverse_lazy
from django.utils.translation import ugettext_lazy as _
from openstack_dashboard.dashboards.cdn.cdn_domain_manager.models import Domain, CdnBillMethod
import uuid
import datetime
import calendar
from openstack_dashboard.dashboards.cdn.middware import DomainManage
from openstack_dashboard import api
from openstack_dashboard.utils.memcache_manager import set_memcache_value
level_choice = [
('ip', _("Origin IP")),
('url', _("Origin Domain Name"))]
LOG = logging.getLogger(__name__)
access_choice = [
('white', _("White List")),
('black', _('Black List'))
]
class CreateForm(forms.SelfHandlingForm):
'''创建加速域名自处理modal表单'''
domain_name = forms.CharField(max_length=64, label=_("Domain Name"), required=True)
source_type = forms.ChoiceField(label=_("Origin Domain Type"),
choices=level_choice,
widget=forms.Select(attrs={'class': 'switchable',
'data-slug': 'origintype'},),
required=True)
origin_config_a = forms.CharField(label=_("IP Address"),
widget=forms.Textarea(attrs={'class': 'switched',
'data-switch-on': 'origintype',
'data-origintype-ip': _("IP Address List"),
}),
required=False)
origin_config_b = forms.CharField(max_length=64, label=_("Origin Domain Name"),
widget=forms.TextInput(attrs={'class': 'switched',
'data-switch-on': 'origintype',
'data-origintype-url': _("Origin Domain Name"),
}),
required=False)
failure_url = 'horizon:cdn:cdn_domain_manager:index'
def __init__(self, request, *args, **kwargs):
super(CreateForm, self).__init__(request, *args, **kwargs)
def handle(self, request, data):
try:
tenant_id = self.request.user.tenant_id
user_name = self.request.user.username
project_name = self.request.user.project_name
domain = Domain.objects.filter(domain_name=data['domain_name'])
# 判断域名是否已添加
if domain:
if domain[0].status != 'deleted':
message = _('%s has created') % data['domain_name']
messages.warning(request, message)
return True
if domain[0].status == 'deleted':
domainId=domain[0].domain_id
domain_api = DomainManage()
domain_api.enable(domainId=domainId)
domain[0].status = 'inProgress'
domain[0].save()
message = _('%s has been in database,it will be enable for you') % data['domain_name']
messages.success(request, message)
return True
else:
# 添加记费类型
billing_type = CdnBillMethod.objects.get(tenant_id=tenant_id)
domain_name = data['domain_name'].strip()
p = Domain(tenant_id=tenant_id, user_name=user_name, project_name=project_name,
domain_name=domain_name, domain_cname='-', source_type=data['source_type'],
current_type=billing_type.current_type,update_type=billing_type.update_type,
update_at=billing_type.update_at,effect_at=billing_type.effect_at)
p.save()
if data['source_type'] == 'ip':
for i in data['origin_config_a'].strip('\r\n').split('\r\n'):
o = p.sourceaddress_set.create(source_address=i)
o.save()
else:
o = p.sourceaddress_set.create(source_address=data['origin_config_b'])
o.save()
# 插入操作日志
api.logger.Logger(self.request).create(resource_type='CDN', action_name='Create Domain Name',
resource_name='CDN', config=_('Domain: %s') %data['domain_name'],
status='Success')
# 将domain_name和随机生成的uuid绑定存储到memcache中,为域名鉴权提供依据
set_memcache_value(str(data['domain_name']), str(uuid.uuid4()))
message = _('Domain %s was successfully created') % data['domain_name']
messages.success(request, message)
return data['domain_name']
except exceptions:
# 插入操作日志
api.logger.Logger(self.request).create(resource_type='CDN', action_name='Create Domain Name',
resource_name='CDN', config=_('Domain: %s') %data['domain_name'],
status='Error')
msg = _('Failed to create Domain %s') % data['name']
redirect = self.failure_url
exceptions.handle(request, msg, redirect=redirect)
return False
class VerifyForm(forms.SelfHandlingForm):
redirect_url = reverse_lazy('horizon:cdn:cdn_domain_manager:index')
def __init__(self, request, *args, **kwargs):
super(VerifyForm, self).__init__(request, *args, **kwargs)
def handle(self, request, data):
return True
class ModifyDomain(CreateForm):
domain_name = forms.CharField(max_length=64, label=_("Domain Name"), required=True,
widget=forms.TextInput(attrs={'readonly': 'readonly'}))
def __init__(self, request, *args, **kwargs):
super(ModifyDomain, self).__init__(request, *args, **kwargs)
def handle(self, request, data):
return True
class CreateAccess(forms.SelfHandlingForm):
failure_url = 'horizon:cdn:cdn_domain_manager:update'
type = forms.CharField(max_length=255, label=_("Type"), required=True)
access_type = forms.ChoiceField(label=_("Access Type"),
choices=access_choice,
widget=forms.Select(attrs={'class': 'switchable',
'data-slug': 'accesstype'},),
required=True)
refer = forms.BooleanField(label=_("refer"), required=False)
black_list = forms.CharField(label=_("Black List"), widget=forms.Textarea(attrs={'class': 'switched',
'data-switch-on': 'accesstype',
'data-accesstype-black': _("Black List"),
}), required=False)
white_list = forms.CharField(label=_("White List"), widget=forms.Textarea(attrs={'class': 'switched',
'data-switch-on': 'accesstype',
'data-accesstype-white': _("White List"),
}), required=False)
forbid_ip = forms.CharField(label=_("Forbid IP"), widget=forms.Textarea(), required=False)
def handle(self, request, data):
return True
class ModifyAccess(CreateAccess):
def __init__(self, request, *args, **kwargs):
super(ModifyAccess, self).__init__(request, *args, **kwargs)
def handle(self, request, data):
return True
class CreateCache(forms.SelfHandlingForm):
failure_url = 'horizon:cdn:cdn_domain_manager:update'
type = forms.CharField(max_length=255, label=_("Type"))
ignore = forms.BooleanField(label=_("ignore"), required=False)
time = forms.IntegerField(label=_("Time"))
def handle(self, request, data):
return True
class ModifyCache(CreateCache):
def __init__(self, request, *args, **kwargs):
super(ModifyCache, self).__init__(request, *args, **kwargs)
def handle(self, request, data):
return True
class ModifyAccountModeForm(forms.SelfHandlingForm):
failure_url = 'horizon:cdn:cdn_domain_manager:index'
account_mode = forms.ChoiceField(label=_("Account Mode"), choices=(('cdnflow', _("Flow Account")),(
'cdnbandwidth',_('Bandwidth Account'))))
def handle(self, request, data):
try:
now_date = datetime.datetime.utcnow()
days = calendar.monthrange(now_date.year,now_date.month)[1]
effect_date = (datetime.date.today().replace(day=1) + datetime.timedelta(days)).replace(day=1)
tenant_id = self.request.user.tenant_id
billing_type = CdnBillMethod.objects.get(tenant_id=tenant_id)
update_type = billing_type.update_type
post_type = data.get('account_mode')
if post_type != update_type:
domain_list = Domain.objects.filter(tenant_id=tenant_id)
for i in domain_list:
i.update_type = post_type
i.update_at = now_date
i.effect_at = effect_date
i.save()
# change billing method
billing_type.update_type = post_type
billing_type.update_at = now_date
billing_type.effect_at = effect_date
billing_type.save()
message = _('Modfiy account method successfully')
messages.success(request, message)
else:
message = _('Your account method is same, do not modify')
messages.success(request, message)
return True
except exceptions:
msg = _('Failed to change account method')
LOG.info(msg)
redirect = self.failure_url
exceptions.handle(request, msg, redirect=redirect)
return False
|
horizon/openstack_dashboard/dashboards/cdn/cdn_domain_manager/forms.py
|
import logging
from horizon import exceptions
from horizon import forms
from horizon import messages
from django.core.urlresolvers import reverse_lazy
from django.utils.translation import ugettext_lazy as _
from openstack_dashboard.dashboards.cdn.cdn_domain_manager.models import Domain, CdnBillMethod
import uuid
import datetime
import calendar
from openstack_dashboard.dashboards.cdn.middware import DomainManage
from openstack_dashboard import api
from openstack_dashboard.utils.memcache_manager import set_memcache_value
level_choice = [
('ip', _("Origin IP")),
('url', _("Origin Domain Name"))]
LOG = logging.getLogger(__name__)
access_choice = [
('white', _("White List")),
('black', _('Black List'))
]
class CreateForm(forms.SelfHandlingForm):
'''创建加速域名自处理modal表单'''
domain_name = forms.CharField(max_length=64, label=_("Domain Name"), required=True)
source_type = forms.ChoiceField(label=_("Origin Domain Type"),
choices=level_choice,
widget=forms.Select(attrs={'class': 'switchable',
'data-slug': 'origintype'},),
required=True)
origin_config_a = forms.CharField(label=_("IP Address"),
widget=forms.Textarea(attrs={'class': 'switched',
'data-switch-on': 'origintype',
'data-origintype-ip': _("IP Address List"),
}),
required=False)
origin_config_b = forms.CharField(max_length=64, label=_("Origin Domain Name"),
widget=forms.TextInput(attrs={'class': 'switched',
'data-switch-on': 'origintype',
'data-origintype-url': _("Origin Domain Name"),
}),
required=False)
failure_url = 'horizon:cdn:cdn_domain_manager:index'
def __init__(self, request, *args, **kwargs):
super(CreateForm, self).__init__(request, *args, **kwargs)
def handle(self, request, data):
try:
tenant_id = self.request.user.tenant_id
user_name = self.request.user.username
project_name = self.request.user.project_name
domain = Domain.objects.filter(domain_name=data['domain_name'])
# 判断域名是否已添加
if domain:
if domain[0].status != 'deleted':
message = _('%s has created') % data['domain_name']
messages.warning(request, message)
return True
if domain[0].status == 'deleted':
domainId=domain[0].domain_id
domain_api = DomainManage()
domain_api.enable(domainId=domainId)
domain[0].status = 'inProgress'
domain[0].save()
message = _('%s has been in database,it will be enable for you') % data['domain_name']
messages.success(request, message)
return True
else:
# 添加记费类型
billing_type = CdnBillMethod.objects.get(tenant_id=tenant_id)
domain_name = data['domain_name'].strip()
p = Domain(tenant_id=tenant_id, user_name=user_name, project_name=project_name,
domain_name=domain_name, domain_cname='-', source_type=data['source_type'],
current_type=billing_type.current_type,update_type=billing_type.update_type,
update_at=billing_type.update_at,effect_at=billing_type.effect_at)
p.save()
if data['source_type'] == 'ip':
for i in data['origin_config_a'].strip('\r\n').split('\r\n'):
o = p.sourceaddress_set.create(source_address=i)
o.save()
else:
o = p.sourceaddress_set.create(source_address=data['origin_config_b'])
o.save()
# 插入操作日志
api.logger.Logger(self.request).create(resource_type='CDN', action_name='Create Domain Name',
resource_name='CDN', config=_('Domain: %s') %data['domain_name'],
status='Success')
# 将domain_name和随机生成的uuid绑定存储到memcache中,为域名鉴权提供依据
set_memcache_value(str(data['domain_name']), str(uuid.uuid4()))
message = _('Domain %s was successfully created') % data['domain_name']
messages.success(request, message)
return data['domain_name']
except exceptions:
# 插入操作日志
api.logger.Logger(self.request).create(resource_type='CDN', action_name='Create Domain Name',
resource_name='CDN', config=_('Domain: %s') %data['domain_name'],
status='Error')
msg = _('Failed to create Domain %s') % data['name']
redirect = self.failure_url
exceptions.handle(request, msg, redirect=redirect)
return False
class VerifyForm(forms.SelfHandlingForm):
redirect_url = reverse_lazy('horizon:cdn:cdn_domain_manager:index')
def __init__(self, request, *args, **kwargs):
super(VerifyForm, self).__init__(request, *args, **kwargs)
def handle(self, request, data):
return True
class ModifyDomain(CreateForm):
domain_name = forms.CharField(max_length=64, label=_("Domain Name"), required=True,
widget=forms.TextInput(attrs={'readonly': 'readonly'}))
def __init__(self, request, *args, **kwargs):
super(ModifyDomain, self).__init__(request, *args, **kwargs)
def handle(self, request, data):
return True
class CreateAccess(forms.SelfHandlingForm):
failure_url = 'horizon:cdn:cdn_domain_manager:update'
type = forms.CharField(max_length=255, label=_("Type"), required=True)
access_type = forms.ChoiceField(label=_("Access Type"),
choices=access_choice,
widget=forms.Select(attrs={'class': 'switchable',
'data-slug': 'accesstype'},),
required=True)
refer = forms.BooleanField(label=_("refer"), required=False)
black_list = forms.CharField(label=_("Black List"), widget=forms.Textarea(attrs={'class': 'switched',
'data-switch-on': 'accesstype',
'data-accesstype-black': _("Black List"),
}), required=False)
white_list = forms.CharField(label=_("White List"), widget=forms.Textarea(attrs={'class': 'switched',
'data-switch-on': 'accesstype',
'data-accesstype-white': _("White List"),
}), required=False)
forbid_ip = forms.CharField(label=_("Forbid IP"), widget=forms.Textarea(), required=False)
def handle(self, request, data):
return True
class ModifyAccess(CreateAccess):
def __init__(self, request, *args, **kwargs):
super(ModifyAccess, self).__init__(request, *args, **kwargs)
def handle(self, request, data):
return True
class CreateCache(forms.SelfHandlingForm):
failure_url = 'horizon:cdn:cdn_domain_manager:update'
type = forms.CharField(max_length=255, label=_("Type"))
ignore = forms.BooleanField(label=_("ignore"), required=False)
time = forms.IntegerField(label=_("Time"))
def handle(self, request, data):
return True
class ModifyCache(CreateCache):
def __init__(self, request, *args, **kwargs):
super(ModifyCache, self).__init__(request, *args, **kwargs)
def handle(self, request, data):
return True
class ModifyAccountModeForm(forms.SelfHandlingForm):
failure_url = 'horizon:cdn:cdn_domain_manager:index'
account_mode = forms.ChoiceField(label=_("Account Mode"), choices=(('cdnflow', _("Flow Account")),(
'cdnbandwidth',_('Bandwidth Account'))))
def handle(self, request, data):
try:
now_date = datetime.datetime.utcnow()
days = calendar.monthrange(now_date.year,now_date.month)[1]
effect_date = (datetime.date.today().replace(day=1) + datetime.timedelta(days)).replace(day=1)
tenant_id = self.request.user.tenant_id
billing_type = CdnBillMethod.objects.get(tenant_id=tenant_id)
update_type = billing_type.update_type
post_type = data.get('account_mode')
if post_type != update_type:
domain_list = Domain.objects.filter(tenant_id=tenant_id)
for i in domain_list:
i.update_type = post_type
i.update_at = now_date
i.effect_at = effect_date
i.save()
# change billing method
billing_type.update_type = post_type
billing_type.update_at = now_date
billing_type.effect_at = effect_date
billing_type.save()
message = _('Modfiy account method successfully')
messages.success(request, message)
else:
message = _('Your account method is same, do not modify')
messages.success(request, message)
return True
except exceptions:
msg = _('Failed to change account method')
LOG.info(msg)
redirect = self.failure_url
exceptions.handle(request, msg, redirect=redirect)
return False
| 0.354768 | 0.063599 |
import pathlib
import typing
from os import environ as env
from typing import Any, Dict, Mapping, TypeVar
from urllib.parse import parse_qs, urlencode
import ipywidgets as w
import weldx
import weldx_widgets
import weldx_widgets.widget_base
import weldx_widgets.widget_factory
from weldx_widgets.translation_utils import _i18n as _
from weldx_widgets.widget_factory import button_layout
__all__ = [
"SaveAndNext",
"build_url",
"get_param_from_env",
"invoke_url",
]
def get_param_from_env(name, default=None) -> str:
"""Extract parameter from env.QUERY_STRING.
Parameters
----------
name :
name of the parameter to extract.
default :
optional default value, if parameter is not set.
Returns
-------
str :
value of the requested parameter.
"""
query_string = env.get("QUERY_STRING", "")
parameters = parse_qs(query_string)
try:
value = parameters[name][0]
except KeyError: # TODO: this can also raise something else, right?
if default:
return default
else:
raise RuntimeError(
f"parameter '{name}' unset and no default provided."
f" Given parameters: {parameters}"
)
return value
def build_url(board: str, parameters: dict = None, invoke=True, out=None) -> str:
"""Build an URL with given parameters.
Parameters
----------
board :
dash board to invoke next. May contain a relative path.
parameters :
optional parameters to encode.
invoke :
should the url be invoked in a web browser?
Returns
-------
str :
the built url.
"""
if invoke and not out:
raise ValueError("need output to invoke Javascript.")
server = env.get("SERVER_NAME", "localhost")
protocol = env.get("SERVER_PROTOCOL", "HTTP")
if "HTTPS" in protocol:
url = "https://"
else:
url = "http://"
url += server
# TODO: this only works from voila!
port = env.get("SERVER_PORT", "8888")
if port:
url += f":{port}/"
else:
url += "/"
voila = "voila" in env.get("SERVER_SOFTWARE", "")
prefix = "voila/render" if voila else ""
url += f"{prefix}/{board}"
if parameters:
params_encoded = urlencode(parameters)
url += f"?{params_encoded}"
if invoke:
invoke_url(url, out)
return url
def invoke_url(url, out):
"""Invoke url in new browser tab.
We cannot use python stdlib webbrowser here, because this code will be executed
on the server. So we impl this via Javascript.
"""
from IPython.display import Javascript, clear_output, display
with out:
clear_output()
js = Javascript(f'window.open("{url}");')
display(js)
_KeyType = TypeVar("KeyType")
def _deep_update_inplace(
mapping: Dict[_KeyType, Any], *updating_mappings: Dict[_KeyType, Any]
) -> Dict[_KeyType, Any]:
for updating_mapping in updating_mappings:
for k, v in updating_mapping.items():
if k in mapping and isinstance(mapping[k], dict) and isinstance(v, Mapping):
mapping[k] = _deep_update_inplace(mapping[k], v)
else:
mapping[k] = v
return mapping
class SaveAndNext(weldx_widgets.widget_base.WidgetMyVBox):
"""Collect all the data from passed import/output widget list and stores it.
Parameters
----------
filename:
output file name.
next_notebook:
next dashboard/notebook to invoke.
status :
the file update will contain the new status.
collect_data_from :
a list of widgets to build a tree from.
next_notebook_params :
optional parameters for next dashboard.
Notes
-----
The passed status will be set into the wx_user["kisa"]["status"] dict.
"""
def __init__(
self,
filename,
next_notebook: str,
status: str,
collect_data_from: typing.List[weldx_widgets.widget_base.WeldxImportExport],
next_notebook_desc: str = "Invoke next step",
next_notebook_params=None,
title="Save results",
disable_next_button=True,
):
self.status = status
self.collect_data_from = collect_data_from
self.out = w.Output()
if not disable_next_button:
self.btn_next = w.Button(
description=_(next_notebook_desc), layout=button_layout
)
if next_notebook_params is None:
next_notebook_params = dict()
self.next_notebook_params = next_notebook_params
self.next_notebook = next_notebook
self.btn_next.on_click(self.on_next)
self._initial_file = filename # remember initial choice of file.
fn_path = pathlib.Path(filename)
path = str(fn_path.parent)
fn = str(fn_path.name)
self.save_button = weldx_widgets.WidgetSaveButton(
desc="1." + _("Save") if not disable_next_button else _("Save"),
filename=fn,
path=path,
select_default=True,
)
self.save_button.set_handler(self.on_save)
if not disable_next_button:
self.save_button.children += (self.btn_next,)
children = [
weldx_widgets.widget_factory.make_title(title),
self.save_button,
self.out,
]
super(SaveAndNext, self).__init__(children=children)
@property
def filename(self):
"""Return output file name."""
return self.save_button.path
def on_save(self, _):
"""Handle saving data to file."""
from IPython.display import clear_output, display
clear_output()
result = dict()
for widget in self.collect_data_from:
_deep_update_inplace(result, widget.to_tree())
# set status
result["wx_user"] = {"KISA": {"status": self.status}}
def show_header(handle):
with self.out:
clear_output()
display(handle.show_asdf_header(False, True))
# open (existing) file and update it.
if pathlib.Path(self.filename).stem.endswith("_r"):
with self.out:
print("Refusing to save a read-only (template) file!")
print("Please choose another name with the '_r' suffix.")
return
if self.filename != self._initial_file:
# we want to save the previous file under a different name, so load contents
with weldx.WeldxFile(self._initial_file, mode="r") as fh:
_deep_update_inplace(fh, result)
if not pathlib.Path(self.filename).exists():
fh.write_to(self.filename)
show_header(fh)
else:
with weldx.WeldxFile(self.filename, mode="rw") as fh2:
_deep_update_inplace(fh2, fh)
show_header(fh2)
else:
with weldx.WeldxFile(self.filename, mode="rw", sync=True) as fh:
_deep_update_inplace(fh, result)
show_header(fh)
def on_next(self, _):
"""Invoke next notebook."""
build_url(
board=self.next_notebook,
parameters=dict(file=self.filename, **self.next_notebook_params),
invoke=True,
out=self.out,
)
|
weldx_widgets/kisa/save.py
|
import pathlib
import typing
from os import environ as env
from typing import Any, Dict, Mapping, TypeVar
from urllib.parse import parse_qs, urlencode
import ipywidgets as w
import weldx
import weldx_widgets
import weldx_widgets.widget_base
import weldx_widgets.widget_factory
from weldx_widgets.translation_utils import _i18n as _
from weldx_widgets.widget_factory import button_layout
__all__ = [
"SaveAndNext",
"build_url",
"get_param_from_env",
"invoke_url",
]
def get_param_from_env(name, default=None) -> str:
"""Extract parameter from env.QUERY_STRING.
Parameters
----------
name :
name of the parameter to extract.
default :
optional default value, if parameter is not set.
Returns
-------
str :
value of the requested parameter.
"""
query_string = env.get("QUERY_STRING", "")
parameters = parse_qs(query_string)
try:
value = parameters[name][0]
except KeyError: # TODO: this can also raise something else, right?
if default:
return default
else:
raise RuntimeError(
f"parameter '{name}' unset and no default provided."
f" Given parameters: {parameters}"
)
return value
def build_url(board: str, parameters: dict = None, invoke=True, out=None) -> str:
"""Build an URL with given parameters.
Parameters
----------
board :
dash board to invoke next. May contain a relative path.
parameters :
optional parameters to encode.
invoke :
should the url be invoked in a web browser?
Returns
-------
str :
the built url.
"""
if invoke and not out:
raise ValueError("need output to invoke Javascript.")
server = env.get("SERVER_NAME", "localhost")
protocol = env.get("SERVER_PROTOCOL", "HTTP")
if "HTTPS" in protocol:
url = "https://"
else:
url = "http://"
url += server
# TODO: this only works from voila!
port = env.get("SERVER_PORT", "8888")
if port:
url += f":{port}/"
else:
url += "/"
voila = "voila" in env.get("SERVER_SOFTWARE", "")
prefix = "voila/render" if voila else ""
url += f"{prefix}/{board}"
if parameters:
params_encoded = urlencode(parameters)
url += f"?{params_encoded}"
if invoke:
invoke_url(url, out)
return url
def invoke_url(url, out):
"""Invoke url in new browser tab.
We cannot use python stdlib webbrowser here, because this code will be executed
on the server. So we impl this via Javascript.
"""
from IPython.display import Javascript, clear_output, display
with out:
clear_output()
js = Javascript(f'window.open("{url}");')
display(js)
_KeyType = TypeVar("KeyType")
def _deep_update_inplace(
mapping: Dict[_KeyType, Any], *updating_mappings: Dict[_KeyType, Any]
) -> Dict[_KeyType, Any]:
for updating_mapping in updating_mappings:
for k, v in updating_mapping.items():
if k in mapping and isinstance(mapping[k], dict) and isinstance(v, Mapping):
mapping[k] = _deep_update_inplace(mapping[k], v)
else:
mapping[k] = v
return mapping
class SaveAndNext(weldx_widgets.widget_base.WidgetMyVBox):
"""Collect all the data from passed import/output widget list and stores it.
Parameters
----------
filename:
output file name.
next_notebook:
next dashboard/notebook to invoke.
status :
the file update will contain the new status.
collect_data_from :
a list of widgets to build a tree from.
next_notebook_params :
optional parameters for next dashboard.
Notes
-----
The passed status will be set into the wx_user["kisa"]["status"] dict.
"""
def __init__(
self,
filename,
next_notebook: str,
status: str,
collect_data_from: typing.List[weldx_widgets.widget_base.WeldxImportExport],
next_notebook_desc: str = "Invoke next step",
next_notebook_params=None,
title="Save results",
disable_next_button=True,
):
self.status = status
self.collect_data_from = collect_data_from
self.out = w.Output()
if not disable_next_button:
self.btn_next = w.Button(
description=_(next_notebook_desc), layout=button_layout
)
if next_notebook_params is None:
next_notebook_params = dict()
self.next_notebook_params = next_notebook_params
self.next_notebook = next_notebook
self.btn_next.on_click(self.on_next)
self._initial_file = filename # remember initial choice of file.
fn_path = pathlib.Path(filename)
path = str(fn_path.parent)
fn = str(fn_path.name)
self.save_button = weldx_widgets.WidgetSaveButton(
desc="1." + _("Save") if not disable_next_button else _("Save"),
filename=fn,
path=path,
select_default=True,
)
self.save_button.set_handler(self.on_save)
if not disable_next_button:
self.save_button.children += (self.btn_next,)
children = [
weldx_widgets.widget_factory.make_title(title),
self.save_button,
self.out,
]
super(SaveAndNext, self).__init__(children=children)
@property
def filename(self):
"""Return output file name."""
return self.save_button.path
def on_save(self, _):
"""Handle saving data to file."""
from IPython.display import clear_output, display
clear_output()
result = dict()
for widget in self.collect_data_from:
_deep_update_inplace(result, widget.to_tree())
# set status
result["wx_user"] = {"KISA": {"status": self.status}}
def show_header(handle):
with self.out:
clear_output()
display(handle.show_asdf_header(False, True))
# open (existing) file and update it.
if pathlib.Path(self.filename).stem.endswith("_r"):
with self.out:
print("Refusing to save a read-only (template) file!")
print("Please choose another name with the '_r' suffix.")
return
if self.filename != self._initial_file:
# we want to save the previous file under a different name, so load contents
with weldx.WeldxFile(self._initial_file, mode="r") as fh:
_deep_update_inplace(fh, result)
if not pathlib.Path(self.filename).exists():
fh.write_to(self.filename)
show_header(fh)
else:
with weldx.WeldxFile(self.filename, mode="rw") as fh2:
_deep_update_inplace(fh2, fh)
show_header(fh2)
else:
with weldx.WeldxFile(self.filename, mode="rw", sync=True) as fh:
_deep_update_inplace(fh, result)
show_header(fh)
def on_next(self, _):
"""Invoke next notebook."""
build_url(
board=self.next_notebook,
parameters=dict(file=self.filename, **self.next_notebook_params),
invoke=True,
out=self.out,
)
| 0.507812 | 0.173989 |
from datetime import datetime
from json import load
from requests import get, head, post, put
from urllib.parse import quote_plus
from uuid import uuid4
class DaoElastic(object):
_INDEX = 'unfact'
_TYPE = 'news'
_MAX_FETCH_SIZE = 300
_BASE_HOST = 'http://127.0.0.1:9200'
_MAPPING_FILE = '../resources/mapping.json'
@staticmethod
def _assert_response(response):
assert response.status_code in [200, 201], \
'Unexpected response [%d]: [%s]' % (
response.status_code, response.json())
return response
def __init__(self, base_host=None):
self._base_host = self._BASE_HOST if base_host is None else base_host
self._base_index = self._base_host + '/' + self._INDEX
self._base_url = self._base_index + '/' + self._TYPE
self._init_schema()
def _init_schema(self):
"""Sets up the index schema."""
response = head('%s/_mapping/%s' % (self._base_index, self._TYPE))
if response.status_code == 404:
print('Index not found, creating mapping.')
with open(self._MAPPING_FILE) as file:
json = load(file)
response = put(self._base_index, json=json)
self._assert_response(response)
elif response.status_code != 200:
raise ValueError('Connection error to [%s]: [%r]' % (
self._base_url, response.text))
def save_new_link(
self, *, short_url, full_url, domain, skip, newsletter_date):
"""
Arguments:
short_url (str)
full_url (str)
domain (str)
skip (boolean)
newsletter_date (datetime)
"""
assert short_url is not None and len(short_url) > 0
assert full_url is not None and len(full_url) > 0
assert skip is not None
assert newsletter_date is not None
date_str = newsletter_date.strftime('%Y-%m-%d')
news_id = str(uuid4()).replace('-', '')
url = '%s/%s/_create' % (self._base_url, news_id)
response = post(url, json={'id': news_id,
'short_url': short_url,
'full_url': full_url,
'domain': domain,
'skip': skip,
'newsletter_date': date_str})
self._assert_response(response)
def exists_short_url(self, *, short_url):
assert short_url is not None
url = '%s/_search' % self._base_url
query = {'query': {'constant_score': {'filter': {
'term': {'short_url': short_url}}}}}
response = get(url, json=query)
self._assert_response(response)
return response.json()['hits']['total'] > 0
def exists_full_url(self, *, full_url):
assert full_url is not None
url = '%s/_search' % self._base_url
query = {'query': {'constant_score': {'filter': {
'term': {'full_url': full_url}}}}}
response = get(url, json=query)
self._assert_response(response)
return response.json()['hits']['total'] > 0
def save_text_analysis(self, news, text_original, authors, text_en,
translator, language, sentiment_score,
sentiment_magnitude, entities, extractor):
"""
Arguments:
news (dict): The complete news object
text_original (str)
authors (str): Comma separated list of authors
text_en (str)
translator (str)
language (str)
sentiment_score (str)
sentiment_magnitude (str)
entities (list of obj)
extractor (str)
"""
assert news['short_url'] is not None and len(news['short_url']) > 0
assert news['id'] is not None and len(news['id']) > 0
assert text_original is not None and len(text_original) > 0
assert text_en is not None and len(text_en) > 0
assert language is not None and len(language) > 0
assert entities, 'Missing entities'
entities_dict = [{'name': entity.name,
'type': entity.entity_type,
'salience': entity.salience,
'wikipedia_url': entity.wikipedia_url}
for entity in entities]
news['text_original'] = text_original
news['authors'] = authors
news['text_en'] = text_en
news['translator'] = translator
news['language'] = language
news['sentiment_score'] = sentiment_score
news['sentiment_magnitude'] = sentiment_magnitude
news['entities'] = entities_dict
news['extractor'] = extractor
url = '%s/%s' % (self._base_url, news['id'])
response = put(url, json=news)
self._assert_response(response)
def save_error(self, *, news, error_message, error_class):
"""
Arguments:
news (dict): The complete news object
error_message (str)
error_class (str)
"""
assert news['short_url'] is not None and len(news['short_url']) > 0
assert news['id'] is not None and len(news['id']) > 0
if 'text_analysed' in news:
del news['text_analysed']
news['error_message'] = error_message
news['error_class'] = error_class
url = '%s/%s' % (self._base_url, news['id'])
response = put(url, json=news)
self._assert_response(response)
def import_news(self, news):
news['id'] = str(news.pop('_id'))
if 'tokens' in news:
del news['tokens']
if 'sentences' in news:
del news['sentences']
url = '%s/%s/_create' % (self._base_url, news['id'])
response = put(url, json=news)
if response.status_code == 409:
print('Document [%s] was already present.', news['id'])
return
else:
self._assert_response(response)
def find_for_text_analysis(self, include_errors=False):
must_not = [{'term': {'skip': 'true'}},
{'term': {'text_analysed': 'true'}}]
if not include_errors:
must_not.append({'exists': {'field': 'error_class'}})
query = {'size': self._MAX_FETCH_SIZE,
'query':
{'constant_score': {'filter': {'bool': {
'must_not': must_not}}}}}
response = get('%s/_search' % self._base_url, json=query)
data = self._assert_response(response).json()
if data['hits']['total'] > 0:
for hit in data['hits']['hits']:
yield hit['_source']
else:
return []
|
python/scripts/dao_elastic.py
|
from datetime import datetime
from json import load
from requests import get, head, post, put
from urllib.parse import quote_plus
from uuid import uuid4
class DaoElastic(object):
_INDEX = 'unfact'
_TYPE = 'news'
_MAX_FETCH_SIZE = 300
_BASE_HOST = 'http://127.0.0.1:9200'
_MAPPING_FILE = '../resources/mapping.json'
@staticmethod
def _assert_response(response):
assert response.status_code in [200, 201], \
'Unexpected response [%d]: [%s]' % (
response.status_code, response.json())
return response
def __init__(self, base_host=None):
self._base_host = self._BASE_HOST if base_host is None else base_host
self._base_index = self._base_host + '/' + self._INDEX
self._base_url = self._base_index + '/' + self._TYPE
self._init_schema()
def _init_schema(self):
"""Sets up the index schema."""
response = head('%s/_mapping/%s' % (self._base_index, self._TYPE))
if response.status_code == 404:
print('Index not found, creating mapping.')
with open(self._MAPPING_FILE) as file:
json = load(file)
response = put(self._base_index, json=json)
self._assert_response(response)
elif response.status_code != 200:
raise ValueError('Connection error to [%s]: [%r]' % (
self._base_url, response.text))
def save_new_link(
self, *, short_url, full_url, domain, skip, newsletter_date):
"""
Arguments:
short_url (str)
full_url (str)
domain (str)
skip (boolean)
newsletter_date (datetime)
"""
assert short_url is not None and len(short_url) > 0
assert full_url is not None and len(full_url) > 0
assert skip is not None
assert newsletter_date is not None
date_str = newsletter_date.strftime('%Y-%m-%d')
news_id = str(uuid4()).replace('-', '')
url = '%s/%s/_create' % (self._base_url, news_id)
response = post(url, json={'id': news_id,
'short_url': short_url,
'full_url': full_url,
'domain': domain,
'skip': skip,
'newsletter_date': date_str})
self._assert_response(response)
def exists_short_url(self, *, short_url):
assert short_url is not None
url = '%s/_search' % self._base_url
query = {'query': {'constant_score': {'filter': {
'term': {'short_url': short_url}}}}}
response = get(url, json=query)
self._assert_response(response)
return response.json()['hits']['total'] > 0
def exists_full_url(self, *, full_url):
assert full_url is not None
url = '%s/_search' % self._base_url
query = {'query': {'constant_score': {'filter': {
'term': {'full_url': full_url}}}}}
response = get(url, json=query)
self._assert_response(response)
return response.json()['hits']['total'] > 0
def save_text_analysis(self, news, text_original, authors, text_en,
translator, language, sentiment_score,
sentiment_magnitude, entities, extractor):
"""
Arguments:
news (dict): The complete news object
text_original (str)
authors (str): Comma separated list of authors
text_en (str)
translator (str)
language (str)
sentiment_score (str)
sentiment_magnitude (str)
entities (list of obj)
extractor (str)
"""
assert news['short_url'] is not None and len(news['short_url']) > 0
assert news['id'] is not None and len(news['id']) > 0
assert text_original is not None and len(text_original) > 0
assert text_en is not None and len(text_en) > 0
assert language is not None and len(language) > 0
assert entities, 'Missing entities'
entities_dict = [{'name': entity.name,
'type': entity.entity_type,
'salience': entity.salience,
'wikipedia_url': entity.wikipedia_url}
for entity in entities]
news['text_original'] = text_original
news['authors'] = authors
news['text_en'] = text_en
news['translator'] = translator
news['language'] = language
news['sentiment_score'] = sentiment_score
news['sentiment_magnitude'] = sentiment_magnitude
news['entities'] = entities_dict
news['extractor'] = extractor
url = '%s/%s' % (self._base_url, news['id'])
response = put(url, json=news)
self._assert_response(response)
def save_error(self, *, news, error_message, error_class):
"""
Arguments:
news (dict): The complete news object
error_message (str)
error_class (str)
"""
assert news['short_url'] is not None and len(news['short_url']) > 0
assert news['id'] is not None and len(news['id']) > 0
if 'text_analysed' in news:
del news['text_analysed']
news['error_message'] = error_message
news['error_class'] = error_class
url = '%s/%s' % (self._base_url, news['id'])
response = put(url, json=news)
self._assert_response(response)
def import_news(self, news):
news['id'] = str(news.pop('_id'))
if 'tokens' in news:
del news['tokens']
if 'sentences' in news:
del news['sentences']
url = '%s/%s/_create' % (self._base_url, news['id'])
response = put(url, json=news)
if response.status_code == 409:
print('Document [%s] was already present.', news['id'])
return
else:
self._assert_response(response)
def find_for_text_analysis(self, include_errors=False):
must_not = [{'term': {'skip': 'true'}},
{'term': {'text_analysed': 'true'}}]
if not include_errors:
must_not.append({'exists': {'field': 'error_class'}})
query = {'size': self._MAX_FETCH_SIZE,
'query':
{'constant_score': {'filter': {'bool': {
'must_not': must_not}}}}}
response = get('%s/_search' % self._base_url, json=query)
data = self._assert_response(response).json()
if data['hits']['total'] > 0:
for hit in data['hits']['hits']:
yield hit['_source']
else:
return []
| 0.640748 | 0.168515 |
number_string = """08 02 22 97 38 15 00 40 00 75 04 05 07 78 52 12 50 77 91 08
49 49 99 40 17 81 18 57 60 87 17 40 98 43 69 48 04 56 62 00
81 49 31 73 55 79 14 29 93 71 40 67 53 88 30 03 49 13 36 65
52 70 95 23 04 60 11 42 69 24 68 56 01 32 56 71 37 02 36 91
22 31 16 71 51 67 63 89 41 92 36 54 22 40 40 28 66 33 13 80
24 47 32 60 99 03 45 02 44 75 33 53 78 36 84 20 35 17 12 50
32 98 81 28 64 23 67 10 26 38 40 67 59 54 70 66 18 38 64 70
67 26 20 68 02 62 12 20 95 63 94 39 63 08 40 91 66 49 94 21
24 55 58 05 66 73 99 26 97 17 78 78 96 83 14 88 34 89 63 72
21 36 23 09 75 00 76 44 20 45 35 14 00 61 33 97 34 31 33 95
78 17 53 28 22 75 31 67 15 94 03 80 04 62 16 14 09 53 56 92
16 39 05 42 96 35 31 47 55 58 88 24 00 17 54 24 36 29 85 57
86 56 00 48 35 71 89 07 05 44 44 37 44 60 21 58 51 54 17 58
19 80 81 68 05 94 47 69 28 73 92 13 86 52 17 77 04 89 55 40
04 52 08 83 97 35 99 16 07 97 57 32 16 26 26 79 33 27 98 66
88 36 68 87 57 62 20 72 03 46 33 67 46 55 12 32 63 93 53 69
04 42 16 73 38 25 39 11 24 94 72 18 08 46 29 32 40 62 76 36
20 69 36 41 72 30 23 88 34 62 99 69 82 67 59 85 74 04 36 16
20 73 35 29 78 31 90 01 74 31 49 71 48 86 81 16 23 57 05 54
01 70 54 71 83 51 54 69 16 92 33 48 61 43 52 01 89 19 67 48"""
# convert the big block number string into a two dimensional array of integers
# This list comprehension parses the rows first and then each column, which means that we will
# end up with matrix[y][x] instead of matrix[x][y] which would have been more intuitive
int_matrix = [[int(number_string) for number_string in row_string.split(" ")] for row_string in number_string.split("\n")]
def get_cell(x, y):
if (0 <= x <= 19 and 0 <= y <= 19):
# reversed coordinate axis (use y,x instead of x,y) due to parsing
return int_matrix[y][x]
else:
# hack to make sure products involving this cell value will be zero
# wow this is sooo ugly :-(
return 0
def check_vertical(x, y):
return get_cell(x,y) * get_cell(x,y+1) * get_cell(x,y+2) * get_cell(x,y+3)
def check_horizontal(x, y):
return get_cell(x,y) * get_cell(x+1,y) * get_cell(x+2,y) * get_cell(x+3,y)
# south west (sw) to north east (ne)
def check_nw_se_diagonal(x, y):
return get_cell(x,y) * get_cell(x+1,y+1) * get_cell(x+2,y+2) * get_cell(x+3,y+3)
# north east (ne) to south west (sw)
def check_ne_sw_diagonal(x, y):
return get_cell(x,y) * get_cell(x-1,y+1) * get_cell(x-2,y+2) * get_cell(x-3,y+3)
def get_highest_cell_product(x, y):
return max(check_vertical(x, y), check_horizontal(x, y), check_nw_se_diagonal(x, y), check_ne_sw_diagonal(x, y))
for y in xrange(0,20):
for x in xrange(0,20):
print str(get_cell(x,y)).zfill(2),
print ""
greatest_cell_product = 0
for y in xrange(0,20):
for x in xrange(0,20):
cell_product = get_highest_cell_product(x, y)
if (cell_product > greatest_cell_product):
greatest_cell_product = cell_product
print "greatest_product==", greatest_cell_product
|
python/problem11.py
|
number_string = """08 02 22 97 38 15 00 40 00 75 04 05 07 78 52 12 50 77 91 08
49 49 99 40 17 81 18 57 60 87 17 40 98 43 69 48 04 56 62 00
81 49 31 73 55 79 14 29 93 71 40 67 53 88 30 03 49 13 36 65
52 70 95 23 04 60 11 42 69 24 68 56 01 32 56 71 37 02 36 91
22 31 16 71 51 67 63 89 41 92 36 54 22 40 40 28 66 33 13 80
24 47 32 60 99 03 45 02 44 75 33 53 78 36 84 20 35 17 12 50
32 98 81 28 64 23 67 10 26 38 40 67 59 54 70 66 18 38 64 70
67 26 20 68 02 62 12 20 95 63 94 39 63 08 40 91 66 49 94 21
24 55 58 05 66 73 99 26 97 17 78 78 96 83 14 88 34 89 63 72
21 36 23 09 75 00 76 44 20 45 35 14 00 61 33 97 34 31 33 95
78 17 53 28 22 75 31 67 15 94 03 80 04 62 16 14 09 53 56 92
16 39 05 42 96 35 31 47 55 58 88 24 00 17 54 24 36 29 85 57
86 56 00 48 35 71 89 07 05 44 44 37 44 60 21 58 51 54 17 58
19 80 81 68 05 94 47 69 28 73 92 13 86 52 17 77 04 89 55 40
04 52 08 83 97 35 99 16 07 97 57 32 16 26 26 79 33 27 98 66
88 36 68 87 57 62 20 72 03 46 33 67 46 55 12 32 63 93 53 69
04 42 16 73 38 25 39 11 24 94 72 18 08 46 29 32 40 62 76 36
20 69 36 41 72 30 23 88 34 62 99 69 82 67 59 85 74 04 36 16
20 73 35 29 78 31 90 01 74 31 49 71 48 86 81 16 23 57 05 54
01 70 54 71 83 51 54 69 16 92 33 48 61 43 52 01 89 19 67 48"""
# convert the big block number string into a two dimensional array of integers
# This list comprehension parses the rows first and then each column, which means that we will
# end up with matrix[y][x] instead of matrix[x][y] which would have been more intuitive
int_matrix = [[int(number_string) for number_string in row_string.split(" ")] for row_string in number_string.split("\n")]
def get_cell(x, y):
if (0 <= x <= 19 and 0 <= y <= 19):
# reversed coordinate axis (use y,x instead of x,y) due to parsing
return int_matrix[y][x]
else:
# hack to make sure products involving this cell value will be zero
# wow this is sooo ugly :-(
return 0
def check_vertical(x, y):
return get_cell(x,y) * get_cell(x,y+1) * get_cell(x,y+2) * get_cell(x,y+3)
def check_horizontal(x, y):
return get_cell(x,y) * get_cell(x+1,y) * get_cell(x+2,y) * get_cell(x+3,y)
# south west (sw) to north east (ne)
def check_nw_se_diagonal(x, y):
return get_cell(x,y) * get_cell(x+1,y+1) * get_cell(x+2,y+2) * get_cell(x+3,y+3)
# north east (ne) to south west (sw)
def check_ne_sw_diagonal(x, y):
return get_cell(x,y) * get_cell(x-1,y+1) * get_cell(x-2,y+2) * get_cell(x-3,y+3)
def get_highest_cell_product(x, y):
return max(check_vertical(x, y), check_horizontal(x, y), check_nw_se_diagonal(x, y), check_ne_sw_diagonal(x, y))
for y in xrange(0,20):
for x in xrange(0,20):
print str(get_cell(x,y)).zfill(2),
print ""
greatest_cell_product = 0
for y in xrange(0,20):
for x in xrange(0,20):
cell_product = get_highest_cell_product(x, y)
if (cell_product > greatest_cell_product):
greatest_cell_product = cell_product
print "greatest_product==", greatest_cell_product
| 0.362066 | 0.335569 |
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
from six.moves import xrange
import logging
import numpy as np
from keras import backend as K
from keras import optimizers
from keras import objectives
from keras.layers import Input, Concatenate, MaxPooling1D
from keras.models import Model, load_model, model_from_json
from .. import objectives as hyp_obj
from ..keras_utils import *
from ..layers import *
from ..losses import categorical_mbr
from ...hyp_model import HypModel
class SeqEmbed(HypModel):
def __init__(self, enc_net, pt_net,
loss='categorical_crossentropy',
pooling='mean+std',
left_context=0,
right_context=0,
begin_context=None,
end_context=None,
enc_downsampling=None,
**kwargs):
super(SeqEmbed, self).__init__(**kwargs)
self.enc_net = enc_net
self.pt_net = pt_net
self.pooling = pooling
self.loss = loss
self.model = None
self.pool_net = None
self.left_context = left_context
self.right_context = right_context
self.begin_context = left_context if begin_context is None else begin_context
self.end_context = right_context if end_context is None else end_context
self._enc_downsampling = enc_downsampling
self.max_seq_length = None
@property
def x_dim(self):
return self.enc_net.get_input_shape_at(0)[-1]
@property
def num_classes(self):
return self.pt_net.get_output_shape_at(0)[-1]
@property
def pool_in_dim(self):
return self.enc_net.get_output_shape_at(0)[-1]
@property
def pool_out_dim(self):
return self.pt_net.get_input_shape_at(0)[-1]
@property
def in_length(self):
if self.max_seq_length is None:
return self.enc_net.get_input_shape_at(0)[-2]
return self.max_seq_length
@property
def pool_in_length(self):
pool_length = self.enc_net.get_output_shape_at(0)[-2]
if pool_length is None:
in_length = self.in_length
if in_length is None:
return None
x = Input(shape=(in_length, self.x_dim))
net = Model(x, self.enc_net(x))
pool_length = net.get_output_shape_at(0)[-2]
return pool_length
@property
def enc_downsampling(self):
if self._enc_downsampling is None:
assert self.in_length is not None
assert self.pool_in_length is not None
r = self.in_length/self.pool_in_length
assert np.ceil(r) == np.floor(r)
self._enc_downsampling = int(r)
return self._enc_downsampling
def _apply_pooling(self, x, mask):
if self.pooling == 'mean+std':
pool = Concatenate(axis=-1, name='pooling')(
GlobalWeightedMeanStdPooling1D(name='mean--std')([x, mask]))
elif self.pooling == 'mean+logvar':
pool = Concatenate(axis=-1, name='pooling')(
GlobalWeightedMeanLogVarPooling1D(name='mean--logvar')([x, mask]))
elif self.pooling == 'mean':
pool = GlobalWeightedAveragePooling1D(name='pooling')([x, mask])
else:
raise ValueError('Invalid pooling %s' % self.pooling)
return pool
def compile(self, metrics=None, **kwargs):
if self.loss == 'categorical_mbr':
loss = categorical_mbr
else:
loss = self.loss
if metrics is None:
self.model.compile(loss=loss, **kwargs)
else:
self.model.compile(loss=loss,
metrics=metrics,
weighted_metrics=metrics, **kwargs)
def freeze_enc_net(self):
self.enc_net.trainable = False
def freeze_enc_net_layers(self, layers):
for layer_name in layers:
self.enc_net.get_layer(layer_name).trainable = False
def freeze_pt_net_layers(self, layers):
for layer_name in layers:
self.pt_net.get_layer(layer_name).trainable = False
def build(self, max_seq_length=None):
if max_seq_length is None:
max_seq_length = self.enc_net.get_input_shape_at(0)[-2]
self.max_seq_length = max_seq_length
x = Input(shape=(max_seq_length, self.x_dim,))
mask = CreateMask(0)(x)
frame_embed = self.enc_net(x)
dec_ratio = int(max_seq_length/frame_embed._keras_shape[1])
if dec_ratio > 1:
mask = MaxPooling1D(dec_ratio, padding='same')(mask)
pool = self._apply_pooling(frame_embed, mask)
y = self.pt_net(pool)
self.model = Model(x, y)
self.model.summary()
def build_embed(self, layers):
frame_embed = Input(shape=(None, self.pool_in_dim,))
mask = Input(shape=(None,))
pool = self._apply_pooling(frame_embed, mask)
outputs = []
for layer_name in layers:
embed_i = Model(self.pt_net.get_input_at(0),
self.pt_net.get_layer(layer_name).get_output_at(0))(pool)
outputs.append(embed_i)
self.pool_net = Model([frame_embed, mask], outputs)
self.pool_net.summary()
def predict_embed(self, x, **kwargs):
in_seq_length = self.in_length
pool_seq_length = self.pool_in_length
r = self.enc_downsampling
assert np.ceil(self.left_context/r) == np.floor(self.left_context/r)
assert np.ceil(self.right_context/r) == np.floor(self.right_context/r)
assert np.ceil(self.begin_context/r) == np.floor(self.begin_context/r)
assert np.ceil(self.end_context/r) == np.floor(self.end_context/r)
pool_begin_context = int(self.begin_context/r)
pool_end_context = int(self.end_context/r)
pool_left_context = int(self.left_context/r)
pool_right_context = int(self.right_context/r)
in_length = x.shape[-2]
pool_length = int(in_length/r)
in_shift = in_seq_length - self.left_context - self.right_context
pool_shift = int(in_shift/r)
y = np.zeros((pool_length, self.pool_in_dim), dtype=float_keras())
mask = np.ones((1, pool_length), dtype=float_keras())
mask[0,:pool_begin_context] = 0
mask[0,pool_length - pool_end_context:] = 0
num_batches = max(int(np.ceil((in_length-in_seq_length)/in_shift+1)), 1)
x_i = np.zeros((1,in_seq_length, x.shape[-1]), dtype=float_keras())
j_in = 0
j_out = 0
for i in xrange(num_batches):
k_in = min(j_in+in_seq_length, in_length)
k_out = min(j_out+pool_seq_length, pool_length)
l_in = k_in - j_in
l_out = k_out - j_out
x_i[0,:l_in] = x[j_in:k_in]
y_i = self.enc_net.predict(x_i, batch_size=1, **kwargs)[0]
y[j_out:k_out] = y_i[:l_out]
j_in += in_shift
j_out += pool_shift
if i==0:
j_out += pool_left_context
logging.debug(pool_seq_length)
logging.debug(pool_left_context)
logging.debug(pool_right_context)
logging.debug(pool_begin_context)
logging.debug(pool_end_context)
logging.debug('embed2 %d %d %d' % (pool_length, j_out-pool_shift, j_out-pool_shift+l_out))
y = np.expand_dims(y, axis=0)
embeds = self.pool_net.predict([y, mask], batch_size=1, **kwargs)
return np.hstack(tuple(embeds))
@property
def embed_dim(self):
if self.pool_net is None:
return None
embed_dim=0
for node in xrange(len(self.pool_net._inbound_nodes)):
output_shape = self.pool_net.get_output_shape_at(node)
if isinstance(output_shape, list):
for shape in output_shape:
embed_dim += shape[-1]
else:
embed_dim += output_shape[-1]
return embed_dim
def build_eval(self):
frame_embed = Input(shape=(None, self.pool_in_dim,))
mask = Input(shape=(None,))
pool = self._apply_pooling(frame_embed, mask)
score = self.pt_net(pool)
self.pool_net = Model([frame_embed, mask], score)
self.pool_net.summary()
def predict_eval(self, x, **kwargs):
return np.log(self.predict_embed(x, **kwargs)+1e-10)
def fit(self, x, y, **kwargs):
self.model.fit(x, y, **kwargs)
def fit_generator(self, generator, steps_per_epoch, **kwargs):
self.model.fit_generator(generator, steps_per_epoch, **kwargs)
def get_config(self):
config = { 'pooling': self.pooling,
'loss': self.loss,
'left_context': self.left_context,
'right_context': self.right_context,
'begin_context': self.begin_context,
'end_context': self.end_context}
base_config = super(SeqEmbed, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
def save(self, file_path):
file_model = '%s.json' % (file_path)
with open(file_model, 'w') as f:
f.write(self.to_json())
file_model = '%s.enc.h5' % (file_path)
self.enc_net.save(file_model)
file_model = '%s.pt.h5' % (file_path)
self.pt_net.save(file_model)
@classmethod
def load(cls, file_path):
file_config = '%s.json' % (file_path)
config = SeqEmbed.load_config(file_config)
file_model = '%s.enc.h5' % (file_path)
enc_net = load_model(file_model, custom_objects=get_keras_custom_obj())
file_model = '%s.pt.h5' % (file_path)
pt_net = load_model(file_model, custom_objects=get_keras_custom_obj())
filter_args = ('loss', 'pooling',
'left_context', 'right_context',
'begin_context', 'end_context', 'name')
kwargs = {k: config[k] for k in filter_args if k in config }
return cls(enc_net, pt_net, **kwargs)
@staticmethod
def filter_args(prefix=None, **kwargs):
if prefix is None:
p = ''
else:
p = prefix + '_'
valid_args = ('pooling', 'left_context', 'right_context',
'begin_context', 'end_context')
return dict((k, kwargs[p+k])
for k in valid_args if p+k in kwargs)
@staticmethod
def add_argparse_args(parser, prefix=None):
if prefix is None:
p1 = '--'
p2 = ''
else:
p1 = '--' + prefix + '-'
p2 = prefix + '_'
parser.add_argument(p1+'pooling', dest=p2+'pooling', default='mean+std',
choices=['mean+std', 'mean+logvar', 'mean'])
parser.add_argument(p1+'left-context', dest=(p2+'left_context'),
default=0, type=int)
parser.add_argument(p1+'right-context', dest=(p2+'right_context'),
default=0, type=int)
parser.add_argument(p1+'begin-context', dest=(p2+'begin_context'),
default=None, type=int)
parser.add_argument(p1+'end-context', dest=(p2+'end_context'),
default=None, type=int)
|
hyperion/keras/embed/seq_embed.py
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
from six.moves import xrange
import logging
import numpy as np
from keras import backend as K
from keras import optimizers
from keras import objectives
from keras.layers import Input, Concatenate, MaxPooling1D
from keras.models import Model, load_model, model_from_json
from .. import objectives as hyp_obj
from ..keras_utils import *
from ..layers import *
from ..losses import categorical_mbr
from ...hyp_model import HypModel
class SeqEmbed(HypModel):
def __init__(self, enc_net, pt_net,
loss='categorical_crossentropy',
pooling='mean+std',
left_context=0,
right_context=0,
begin_context=None,
end_context=None,
enc_downsampling=None,
**kwargs):
super(SeqEmbed, self).__init__(**kwargs)
self.enc_net = enc_net
self.pt_net = pt_net
self.pooling = pooling
self.loss = loss
self.model = None
self.pool_net = None
self.left_context = left_context
self.right_context = right_context
self.begin_context = left_context if begin_context is None else begin_context
self.end_context = right_context if end_context is None else end_context
self._enc_downsampling = enc_downsampling
self.max_seq_length = None
@property
def x_dim(self):
return self.enc_net.get_input_shape_at(0)[-1]
@property
def num_classes(self):
return self.pt_net.get_output_shape_at(0)[-1]
@property
def pool_in_dim(self):
return self.enc_net.get_output_shape_at(0)[-1]
@property
def pool_out_dim(self):
return self.pt_net.get_input_shape_at(0)[-1]
@property
def in_length(self):
if self.max_seq_length is None:
return self.enc_net.get_input_shape_at(0)[-2]
return self.max_seq_length
@property
def pool_in_length(self):
pool_length = self.enc_net.get_output_shape_at(0)[-2]
if pool_length is None:
in_length = self.in_length
if in_length is None:
return None
x = Input(shape=(in_length, self.x_dim))
net = Model(x, self.enc_net(x))
pool_length = net.get_output_shape_at(0)[-2]
return pool_length
@property
def enc_downsampling(self):
if self._enc_downsampling is None:
assert self.in_length is not None
assert self.pool_in_length is not None
r = self.in_length/self.pool_in_length
assert np.ceil(r) == np.floor(r)
self._enc_downsampling = int(r)
return self._enc_downsampling
def _apply_pooling(self, x, mask):
if self.pooling == 'mean+std':
pool = Concatenate(axis=-1, name='pooling')(
GlobalWeightedMeanStdPooling1D(name='mean--std')([x, mask]))
elif self.pooling == 'mean+logvar':
pool = Concatenate(axis=-1, name='pooling')(
GlobalWeightedMeanLogVarPooling1D(name='mean--logvar')([x, mask]))
elif self.pooling == 'mean':
pool = GlobalWeightedAveragePooling1D(name='pooling')([x, mask])
else:
raise ValueError('Invalid pooling %s' % self.pooling)
return pool
def compile(self, metrics=None, **kwargs):
if self.loss == 'categorical_mbr':
loss = categorical_mbr
else:
loss = self.loss
if metrics is None:
self.model.compile(loss=loss, **kwargs)
else:
self.model.compile(loss=loss,
metrics=metrics,
weighted_metrics=metrics, **kwargs)
def freeze_enc_net(self):
self.enc_net.trainable = False
def freeze_enc_net_layers(self, layers):
for layer_name in layers:
self.enc_net.get_layer(layer_name).trainable = False
def freeze_pt_net_layers(self, layers):
for layer_name in layers:
self.pt_net.get_layer(layer_name).trainable = False
def build(self, max_seq_length=None):
if max_seq_length is None:
max_seq_length = self.enc_net.get_input_shape_at(0)[-2]
self.max_seq_length = max_seq_length
x = Input(shape=(max_seq_length, self.x_dim,))
mask = CreateMask(0)(x)
frame_embed = self.enc_net(x)
dec_ratio = int(max_seq_length/frame_embed._keras_shape[1])
if dec_ratio > 1:
mask = MaxPooling1D(dec_ratio, padding='same')(mask)
pool = self._apply_pooling(frame_embed, mask)
y = self.pt_net(pool)
self.model = Model(x, y)
self.model.summary()
def build_embed(self, layers):
frame_embed = Input(shape=(None, self.pool_in_dim,))
mask = Input(shape=(None,))
pool = self._apply_pooling(frame_embed, mask)
outputs = []
for layer_name in layers:
embed_i = Model(self.pt_net.get_input_at(0),
self.pt_net.get_layer(layer_name).get_output_at(0))(pool)
outputs.append(embed_i)
self.pool_net = Model([frame_embed, mask], outputs)
self.pool_net.summary()
def predict_embed(self, x, **kwargs):
in_seq_length = self.in_length
pool_seq_length = self.pool_in_length
r = self.enc_downsampling
assert np.ceil(self.left_context/r) == np.floor(self.left_context/r)
assert np.ceil(self.right_context/r) == np.floor(self.right_context/r)
assert np.ceil(self.begin_context/r) == np.floor(self.begin_context/r)
assert np.ceil(self.end_context/r) == np.floor(self.end_context/r)
pool_begin_context = int(self.begin_context/r)
pool_end_context = int(self.end_context/r)
pool_left_context = int(self.left_context/r)
pool_right_context = int(self.right_context/r)
in_length = x.shape[-2]
pool_length = int(in_length/r)
in_shift = in_seq_length - self.left_context - self.right_context
pool_shift = int(in_shift/r)
y = np.zeros((pool_length, self.pool_in_dim), dtype=float_keras())
mask = np.ones((1, pool_length), dtype=float_keras())
mask[0,:pool_begin_context] = 0
mask[0,pool_length - pool_end_context:] = 0
num_batches = max(int(np.ceil((in_length-in_seq_length)/in_shift+1)), 1)
x_i = np.zeros((1,in_seq_length, x.shape[-1]), dtype=float_keras())
j_in = 0
j_out = 0
for i in xrange(num_batches):
k_in = min(j_in+in_seq_length, in_length)
k_out = min(j_out+pool_seq_length, pool_length)
l_in = k_in - j_in
l_out = k_out - j_out
x_i[0,:l_in] = x[j_in:k_in]
y_i = self.enc_net.predict(x_i, batch_size=1, **kwargs)[0]
y[j_out:k_out] = y_i[:l_out]
j_in += in_shift
j_out += pool_shift
if i==0:
j_out += pool_left_context
logging.debug(pool_seq_length)
logging.debug(pool_left_context)
logging.debug(pool_right_context)
logging.debug(pool_begin_context)
logging.debug(pool_end_context)
logging.debug('embed2 %d %d %d' % (pool_length, j_out-pool_shift, j_out-pool_shift+l_out))
y = np.expand_dims(y, axis=0)
embeds = self.pool_net.predict([y, mask], batch_size=1, **kwargs)
return np.hstack(tuple(embeds))
@property
def embed_dim(self):
if self.pool_net is None:
return None
embed_dim=0
for node in xrange(len(self.pool_net._inbound_nodes)):
output_shape = self.pool_net.get_output_shape_at(node)
if isinstance(output_shape, list):
for shape in output_shape:
embed_dim += shape[-1]
else:
embed_dim += output_shape[-1]
return embed_dim
def build_eval(self):
frame_embed = Input(shape=(None, self.pool_in_dim,))
mask = Input(shape=(None,))
pool = self._apply_pooling(frame_embed, mask)
score = self.pt_net(pool)
self.pool_net = Model([frame_embed, mask], score)
self.pool_net.summary()
def predict_eval(self, x, **kwargs):
return np.log(self.predict_embed(x, **kwargs)+1e-10)
def fit(self, x, y, **kwargs):
self.model.fit(x, y, **kwargs)
def fit_generator(self, generator, steps_per_epoch, **kwargs):
self.model.fit_generator(generator, steps_per_epoch, **kwargs)
def get_config(self):
config = { 'pooling': self.pooling,
'loss': self.loss,
'left_context': self.left_context,
'right_context': self.right_context,
'begin_context': self.begin_context,
'end_context': self.end_context}
base_config = super(SeqEmbed, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
def save(self, file_path):
file_model = '%s.json' % (file_path)
with open(file_model, 'w') as f:
f.write(self.to_json())
file_model = '%s.enc.h5' % (file_path)
self.enc_net.save(file_model)
file_model = '%s.pt.h5' % (file_path)
self.pt_net.save(file_model)
@classmethod
def load(cls, file_path):
file_config = '%s.json' % (file_path)
config = SeqEmbed.load_config(file_config)
file_model = '%s.enc.h5' % (file_path)
enc_net = load_model(file_model, custom_objects=get_keras_custom_obj())
file_model = '%s.pt.h5' % (file_path)
pt_net = load_model(file_model, custom_objects=get_keras_custom_obj())
filter_args = ('loss', 'pooling',
'left_context', 'right_context',
'begin_context', 'end_context', 'name')
kwargs = {k: config[k] for k in filter_args if k in config }
return cls(enc_net, pt_net, **kwargs)
@staticmethod
def filter_args(prefix=None, **kwargs):
if prefix is None:
p = ''
else:
p = prefix + '_'
valid_args = ('pooling', 'left_context', 'right_context',
'begin_context', 'end_context')
return dict((k, kwargs[p+k])
for k in valid_args if p+k in kwargs)
@staticmethod
def add_argparse_args(parser, prefix=None):
if prefix is None:
p1 = '--'
p2 = ''
else:
p1 = '--' + prefix + '-'
p2 = prefix + '_'
parser.add_argument(p1+'pooling', dest=p2+'pooling', default='mean+std',
choices=['mean+std', 'mean+logvar', 'mean'])
parser.add_argument(p1+'left-context', dest=(p2+'left_context'),
default=0, type=int)
parser.add_argument(p1+'right-context', dest=(p2+'right_context'),
default=0, type=int)
parser.add_argument(p1+'begin-context', dest=(p2+'begin_context'),
default=None, type=int)
parser.add_argument(p1+'end-context', dest=(p2+'end_context'),
default=None, type=int)
| 0.808029 | 0.176193 |
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import utility
from utils.loss import discriminator
class Adversarial(nn.Module):
def __init__(self, args, gan_type):
super(Adversarial, self).__init__()
self.gan_type = gan_type
self.gan_k = args.gan_k
self.discriminator = discriminator.Discriminator(args, gan_type)
if gan_type != 'WGAN_GP':
self.optimizer = utility.make_optimizer(args, self.discriminator)
else:
self.optimizer = optim.Adam(
self.discriminator.parameters(),
betas=(0, 0.9), eps=1e-8, lr=1e-5
)
self.scheduler = utility.make_scheduler(args, self.optimizer)
def forward(self, fake, real):
fake_detach = fake.detach()
self.loss = 0
for _ in range(self.gan_k):
self.optimizer.zero_grad()
d_fake = self.discriminator(fake_detach)
d_real = self.discriminator(real)
if self.gan_type == 'GAN':
label_fake = torch.zeros_like(d_fake)
label_real = torch.ones_like(d_real)
loss_d \
= F.binary_cross_entropy_with_logits(d_fake, label_fake) \
+ F.binary_cross_entropy_with_logits(d_real, label_real)
elif self.gan_type.find('WGAN') >= 0:
loss_d = (d_fake - d_real).mean()
if self.gan_type.find('GP') >= 0:
epsilon = torch.rand_like(fake).view(-1, 1, 1, 1)
hat = fake_detach.mul(1 - epsilon) + real.mul(epsilon)
hat.requires_grad = True
d_hat = self.discriminator(hat)
gradients = torch.autograd.grad(
outputs=d_hat.sum(), inputs=hat,
retain_graph=True, create_graph=True, only_inputs=True
)[0]
gradients = gradients.view(gradients.size(0), -1)
gradient_norm = gradients.norm(2, dim=1)
gradient_penalty = 10 * gradient_norm.sub(1).pow(2).mean()
loss_d += gradient_penalty
# Discriminator update
self.loss += loss_d.item()
loss_d.backward()
self.optimizer.step()
if self.gan_type == 'WGAN':
for p in self.discriminator.parameters():
p.data.clamp_(-1, 1)
self.loss /= self.gan_k
d_fake_for_g = self.discriminator(fake)
if self.gan_type == 'GAN':
loss_g = F.binary_cross_entropy_with_logits(
d_fake_for_g, label_real
)
elif self.gan_type.find('WGAN') >= 0:
loss_g = -d_fake_for_g.mean()
# Generator loss
return loss_g
def state_dict(self, *args, **kwargs):
state_discriminator = self.discriminator.state_dict(*args, **kwargs)
state_optimizer = self.optimizer.state_dict()
return dict(**state_discriminator, **state_optimizer)
# Some references
# https://github.com/kuc2477/pytorch-wgan-gp/blob/master/model.py
# OR
# https://github.com/caogang/wgan-gp/blob/master/gan_cifar10.py
|
utils/loss/adversarial.py
|
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import utility
from utils.loss import discriminator
class Adversarial(nn.Module):
def __init__(self, args, gan_type):
super(Adversarial, self).__init__()
self.gan_type = gan_type
self.gan_k = args.gan_k
self.discriminator = discriminator.Discriminator(args, gan_type)
if gan_type != 'WGAN_GP':
self.optimizer = utility.make_optimizer(args, self.discriminator)
else:
self.optimizer = optim.Adam(
self.discriminator.parameters(),
betas=(0, 0.9), eps=1e-8, lr=1e-5
)
self.scheduler = utility.make_scheduler(args, self.optimizer)
def forward(self, fake, real):
fake_detach = fake.detach()
self.loss = 0
for _ in range(self.gan_k):
self.optimizer.zero_grad()
d_fake = self.discriminator(fake_detach)
d_real = self.discriminator(real)
if self.gan_type == 'GAN':
label_fake = torch.zeros_like(d_fake)
label_real = torch.ones_like(d_real)
loss_d \
= F.binary_cross_entropy_with_logits(d_fake, label_fake) \
+ F.binary_cross_entropy_with_logits(d_real, label_real)
elif self.gan_type.find('WGAN') >= 0:
loss_d = (d_fake - d_real).mean()
if self.gan_type.find('GP') >= 0:
epsilon = torch.rand_like(fake).view(-1, 1, 1, 1)
hat = fake_detach.mul(1 - epsilon) + real.mul(epsilon)
hat.requires_grad = True
d_hat = self.discriminator(hat)
gradients = torch.autograd.grad(
outputs=d_hat.sum(), inputs=hat,
retain_graph=True, create_graph=True, only_inputs=True
)[0]
gradients = gradients.view(gradients.size(0), -1)
gradient_norm = gradients.norm(2, dim=1)
gradient_penalty = 10 * gradient_norm.sub(1).pow(2).mean()
loss_d += gradient_penalty
# Discriminator update
self.loss += loss_d.item()
loss_d.backward()
self.optimizer.step()
if self.gan_type == 'WGAN':
for p in self.discriminator.parameters():
p.data.clamp_(-1, 1)
self.loss /= self.gan_k
d_fake_for_g = self.discriminator(fake)
if self.gan_type == 'GAN':
loss_g = F.binary_cross_entropy_with_logits(
d_fake_for_g, label_real
)
elif self.gan_type.find('WGAN') >= 0:
loss_g = -d_fake_for_g.mean()
# Generator loss
return loss_g
def state_dict(self, *args, **kwargs):
state_discriminator = self.discriminator.state_dict(*args, **kwargs)
state_optimizer = self.optimizer.state_dict()
return dict(**state_discriminator, **state_optimizer)
# Some references
# https://github.com/kuc2477/pytorch-wgan-gp/blob/master/model.py
# OR
# https://github.com/caogang/wgan-gp/blob/master/gan_cifar10.py
| 0.930616 | 0.292861 |
from glue.viewers.image.composite_array import CompositeArray
from bqplot_image_gl.viewlistener import ViewListener
from ...link import on_change
from ..common.viewer import BqplotBaseView
from ..scatter.layer_artist import BqplotScatterLayerArtist
from .layer_artist import BqplotImageLayerArtist, BqplotImageSubsetLayerArtist
from .frb_mark import FRBImage
from glue_jupyter.bqplot.image.state import BqplotImageViewerState
from glue_jupyter.common.state_widgets.layer_scatter import ScatterLayerStateWidget
from glue_jupyter.common.state_widgets.layer_image import (ImageLayerStateWidget,
ImageSubsetLayerStateWidget)
from glue_jupyter.common.state_widgets.viewer_image import ImageViewerStateWidget
__all__ = ['BqplotImageView']
class BqplotImageView(BqplotBaseView):
allow_duplicate_data = False
allow_duplicate_subset = False
large_data_size = 2e7
_layer_style_widget_cls = {BqplotImageLayerArtist: ImageLayerStateWidget,
BqplotImageSubsetLayerArtist: ImageSubsetLayerStateWidget,
BqplotScatterLayerArtist: ScatterLayerStateWidget}
_state_cls = BqplotImageViewerState
_options_cls = ImageViewerStateWidget
tools = ['bqplot:home', 'bqplot:panzoom', 'bqplot:rectangle', 'bqplot:circle']
def __init__(self, session):
super(BqplotImageView, self).__init__(session)
self.shape = None
self._composite = CompositeArray()
self._composite_image = FRBImage(self, self._composite)
self.figure.marks = list(self.figure.marks) + [self._composite_image]
self.state.add_callback('reference_data', self._reset_limits)
self.state.add_callback('x_att', self._reset_limits)
self.state.add_callback('y_att', self._reset_limits)
self._setup_view_listener()
on_change([(self.state, 'aspect')])(self._sync_figure_aspect)
self._sync_figure_aspect()
def _setup_view_listener(self):
self._vl = ViewListener(widget=self.figure,
css_selector=".plotarea_events")
self._vl.observe(self._on_view_change, names=['view_data'])
def _reset_limits(self, *args):
self.state.reset_limits()
def _on_view_change(self, *args):
views = sorted(self._vl.view_data)
if len(views) > 0:
first_view = self._vl.view_data[views[0]]
self.shape = (int(first_view['height']), int(first_view['width']))
self._composite_image.update()
else:
self.shape = None
self._sync_figure_aspect()
def _sync_figure_aspect(self, *args, **kwargs):
with self.figure.hold_trait_notifications():
if self.state.aspect == 'equal':
if self.shape is None:
axes_ratio = None
else:
height, width = self._composite_image.shape
axes_ratio = height / width
else:
axes_ratio = None
self.state._set_axes_aspect_ratio(axes_ratio)
def get_data_layer_artist(self, layer=None, layer_state=None):
if layer.ndim == 1:
cls = BqplotScatterLayerArtist
else:
cls = BqplotImageLayerArtist
return self.get_layer_artist(cls, layer=layer, layer_state=layer_state)
def get_subset_layer_artist(self, layer=None, layer_state=None):
if layer.ndim == 1:
cls = BqplotScatterLayerArtist
else:
cls = BqplotImageSubsetLayerArtist
return self.get_layer_artist(cls, layer=layer, layer_state=layer_state)
|
glue_jupyter/bqplot/image/viewer.py
|
from glue.viewers.image.composite_array import CompositeArray
from bqplot_image_gl.viewlistener import ViewListener
from ...link import on_change
from ..common.viewer import BqplotBaseView
from ..scatter.layer_artist import BqplotScatterLayerArtist
from .layer_artist import BqplotImageLayerArtist, BqplotImageSubsetLayerArtist
from .frb_mark import FRBImage
from glue_jupyter.bqplot.image.state import BqplotImageViewerState
from glue_jupyter.common.state_widgets.layer_scatter import ScatterLayerStateWidget
from glue_jupyter.common.state_widgets.layer_image import (ImageLayerStateWidget,
ImageSubsetLayerStateWidget)
from glue_jupyter.common.state_widgets.viewer_image import ImageViewerStateWidget
__all__ = ['BqplotImageView']
class BqplotImageView(BqplotBaseView):
allow_duplicate_data = False
allow_duplicate_subset = False
large_data_size = 2e7
_layer_style_widget_cls = {BqplotImageLayerArtist: ImageLayerStateWidget,
BqplotImageSubsetLayerArtist: ImageSubsetLayerStateWidget,
BqplotScatterLayerArtist: ScatterLayerStateWidget}
_state_cls = BqplotImageViewerState
_options_cls = ImageViewerStateWidget
tools = ['bqplot:home', 'bqplot:panzoom', 'bqplot:rectangle', 'bqplot:circle']
def __init__(self, session):
super(BqplotImageView, self).__init__(session)
self.shape = None
self._composite = CompositeArray()
self._composite_image = FRBImage(self, self._composite)
self.figure.marks = list(self.figure.marks) + [self._composite_image]
self.state.add_callback('reference_data', self._reset_limits)
self.state.add_callback('x_att', self._reset_limits)
self.state.add_callback('y_att', self._reset_limits)
self._setup_view_listener()
on_change([(self.state, 'aspect')])(self._sync_figure_aspect)
self._sync_figure_aspect()
def _setup_view_listener(self):
self._vl = ViewListener(widget=self.figure,
css_selector=".plotarea_events")
self._vl.observe(self._on_view_change, names=['view_data'])
def _reset_limits(self, *args):
self.state.reset_limits()
def _on_view_change(self, *args):
views = sorted(self._vl.view_data)
if len(views) > 0:
first_view = self._vl.view_data[views[0]]
self.shape = (int(first_view['height']), int(first_view['width']))
self._composite_image.update()
else:
self.shape = None
self._sync_figure_aspect()
def _sync_figure_aspect(self, *args, **kwargs):
with self.figure.hold_trait_notifications():
if self.state.aspect == 'equal':
if self.shape is None:
axes_ratio = None
else:
height, width = self._composite_image.shape
axes_ratio = height / width
else:
axes_ratio = None
self.state._set_axes_aspect_ratio(axes_ratio)
def get_data_layer_artist(self, layer=None, layer_state=None):
if layer.ndim == 1:
cls = BqplotScatterLayerArtist
else:
cls = BqplotImageLayerArtist
return self.get_layer_artist(cls, layer=layer, layer_state=layer_state)
def get_subset_layer_artist(self, layer=None, layer_state=None):
if layer.ndim == 1:
cls = BqplotScatterLayerArtist
else:
cls = BqplotImageSubsetLayerArtist
return self.get_layer_artist(cls, layer=layer, layer_state=layer_state)
| 0.647241 | 0.354852 |
"""Command for creating target HTTP proxies."""
from googlecloudapis.compute.v1 import compute_v1_messages as messages
from googlecloudsdk.compute.lib import base_classes
class Create(base_classes.BaseAsyncMutator):
"""Create a target HTTP proxy."""
@staticmethod
def Args(parser):
parser.add_argument(
'--description',
help='An optional, textual description for the target HTTP proxy.')
url_map = parser.add_argument(
'--url-map',
required=True,
help=('A reference to a URL map resource that defines the mapping of '
'URLs to backend services.'))
url_map.detailed_help = """\
A reference to a URL map resource that defines the mapping of
URLs to backend services. The URL map must exist and cannot be
deleted while referenced by a target HTTP proxy.
"""
parser.add_argument(
'name',
help='The name of the target HTTP proxy.')
@property
def service(self):
return self.context['compute'].targetHttpProxies
@property
def method(self):
return 'Insert'
@property
def print_resource_type(self):
return 'targetHttpProxies'
def CreateRequests(self, args):
url_map_uri = self.context['uri-builder'].Build(
'global', 'urlMaps', args.url_map)
request = messages.ComputeTargetHttpProxiesInsertRequest(
project=self.context['project'],
targetHttpProxy=messages.TargetHttpProxy(
description=args.description,
name=args.name,
urlMap=url_map_uri))
return [request]
Create.detailed_help = {
'brief': 'Create a target HTTP proxy',
'DESCRIPTION': """
*{command}* is used to create target HTTP proxies. A target
HTTP proxy is referenced by one or more forwarding rules which
define which packets the proxy is responsible for routing. The
target HTTP proxy points to a URL map that defines the rules
for routing the requests. The URL map's job is to map URLs to
backend services which handle the actual requests.
""",
}
|
lib/googlecloudsdk/compute/subcommands/target_http_proxies/create.py
|
"""Command for creating target HTTP proxies."""
from googlecloudapis.compute.v1 import compute_v1_messages as messages
from googlecloudsdk.compute.lib import base_classes
class Create(base_classes.BaseAsyncMutator):
"""Create a target HTTP proxy."""
@staticmethod
def Args(parser):
parser.add_argument(
'--description',
help='An optional, textual description for the target HTTP proxy.')
url_map = parser.add_argument(
'--url-map',
required=True,
help=('A reference to a URL map resource that defines the mapping of '
'URLs to backend services.'))
url_map.detailed_help = """\
A reference to a URL map resource that defines the mapping of
URLs to backend services. The URL map must exist and cannot be
deleted while referenced by a target HTTP proxy.
"""
parser.add_argument(
'name',
help='The name of the target HTTP proxy.')
@property
def service(self):
return self.context['compute'].targetHttpProxies
@property
def method(self):
return 'Insert'
@property
def print_resource_type(self):
return 'targetHttpProxies'
def CreateRequests(self, args):
url_map_uri = self.context['uri-builder'].Build(
'global', 'urlMaps', args.url_map)
request = messages.ComputeTargetHttpProxiesInsertRequest(
project=self.context['project'],
targetHttpProxy=messages.TargetHttpProxy(
description=args.description,
name=args.name,
urlMap=url_map_uri))
return [request]
Create.detailed_help = {
'brief': 'Create a target HTTP proxy',
'DESCRIPTION': """
*{command}* is used to create target HTTP proxies. A target
HTTP proxy is referenced by one or more forwarding rules which
define which packets the proxy is responsible for routing. The
target HTTP proxy points to a URL map that defines the rules
for routing the requests. The URL map's job is to map URLs to
backend services which handle the actual requests.
""",
}
| 0.881341 | 0.16099 |
from __future__ import print_function
import time
import numpy as np
import sympy as sy
from bokeh.browserlib import view
from bokeh.document import Document
from bokeh.glyphs import Line
from bokeh.objects import Plot, DataRange1d, LinearAxis, ColumnDataSource, Grid, Legend
from bokeh.session import Session
from bokeh.widgets import Slider, TextInput, HBox, VBox, Dialog
from requests.exceptions import ConnectionError
document = Document()
session = Session()
session.use_doc('taylor_server')
session.load_document(document)
xs = sy.Symbol('x')
expr = sy.exp(-xs)*sy.sin(xs)
order = 1
def taylor(fx, xs, order, x_range=(0, 1), n=200):
x0, x1 = x_range
x = np.linspace(float(x0), float(x1), n)
fy = sy.lambdify(xs, fx, modules=['numpy'])(x)
tx = fx.series(xs, n=order).removeO()
if tx.is_Number:
ty = np.zeros_like(x)
ty.fill(float(tx))
else:
ty = sy.lambdify(xs, tx, modules=['numpy'])(x)
return x, fy, ty
def update_data():
x, fy, ty = taylor(expr, xs, order, (-2*sy.pi, 2*sy.pi), 200)
plot.title = "%s vs. taylor(%s, n=%d)" % (expr, expr, order)
legend.legends = {
"%s" % expr: [line_f_glyph],
"taylor(%s)" % expr: [line_t_glyph],
}
source.data = dict(x=x, fy=fy, ty=ty)
slider.value = order
session.store_document(document)
source = ColumnDataSource(data=dict(x=[], fy=[], ty=[]))
xdr = DataRange1d(sources=[source.columns("x")])
ydr = DataRange1d(sources=[source.columns("fy")])
plot = Plot(x_range=xdr, y_range=ydr, plot_width=800, plot_height=400)
line_f = Line(x="x", y="fy", line_color="blue", line_width=2)
line_f_glyph = plot.add_glyph(source, line_f)
plot.add_layout(line_f_glyph)
line_t = Line(x="x", y="ty", line_color="red", line_width=2)
line_t_glyph = plot.add_glyph(source, line_t)
plot.add_layout(line_t_glyph)
xaxis = LinearAxis()
plot.add_layout(xaxis, 'below')
yaxis = LinearAxis()
plot.add_layout(yaxis, 'left')
xgrid = Grid(dimension=0, ticker=xaxis.ticker)
ygrid = Grid(dimension=1, ticker=yaxis.ticker)
legend = Legend(orientation="bottom_left")
plot.add_layout(legend)
def on_slider_value_change(obj, attr, old, new):
global order
order = int(new)
update_data()
def on_text_value_change(obj, attr, old, new):
try:
global expr
expr = sy.sympify(new, dict(x=xs))
except (sy.SympifyError, TypeError, ValueError) as exception:
dialog.content = str(exception)
dialog.visible = True
session.store_objects(dialog)
else:
update_data()
dialog = Dialog(title="Invalid expression", buttons=["Close"])
slider = Slider(start=1, end=20, value=order, step=1, title="Order:")
slider.on_change('value', on_slider_value_change)
text = TextInput(value=str(expr), title="Expression:")
text.on_change('value', on_text_value_change)
inputs = HBox(children=[slider, text])
layout = VBox(children=[inputs, plot, dialog])
document.add(layout)
update_data()
if __name__ == "__main__":
link = session.object_link(document.context)
print("Please visit %s to see the plots" % link)
view (link)
print("\npress ctrl-C to exit")
try:
while True:
session.load_document(document)
time.sleep(0.5)
except KeyboardInterrupt:
print()
except ConnectionError:
print("Connection to bokeh-server was terminated")
|
examples/glyphs/taylor_server.py
|
from __future__ import print_function
import time
import numpy as np
import sympy as sy
from bokeh.browserlib import view
from bokeh.document import Document
from bokeh.glyphs import Line
from bokeh.objects import Plot, DataRange1d, LinearAxis, ColumnDataSource, Grid, Legend
from bokeh.session import Session
from bokeh.widgets import Slider, TextInput, HBox, VBox, Dialog
from requests.exceptions import ConnectionError
document = Document()
session = Session()
session.use_doc('taylor_server')
session.load_document(document)
xs = sy.Symbol('x')
expr = sy.exp(-xs)*sy.sin(xs)
order = 1
def taylor(fx, xs, order, x_range=(0, 1), n=200):
x0, x1 = x_range
x = np.linspace(float(x0), float(x1), n)
fy = sy.lambdify(xs, fx, modules=['numpy'])(x)
tx = fx.series(xs, n=order).removeO()
if tx.is_Number:
ty = np.zeros_like(x)
ty.fill(float(tx))
else:
ty = sy.lambdify(xs, tx, modules=['numpy'])(x)
return x, fy, ty
def update_data():
x, fy, ty = taylor(expr, xs, order, (-2*sy.pi, 2*sy.pi), 200)
plot.title = "%s vs. taylor(%s, n=%d)" % (expr, expr, order)
legend.legends = {
"%s" % expr: [line_f_glyph],
"taylor(%s)" % expr: [line_t_glyph],
}
source.data = dict(x=x, fy=fy, ty=ty)
slider.value = order
session.store_document(document)
source = ColumnDataSource(data=dict(x=[], fy=[], ty=[]))
xdr = DataRange1d(sources=[source.columns("x")])
ydr = DataRange1d(sources=[source.columns("fy")])
plot = Plot(x_range=xdr, y_range=ydr, plot_width=800, plot_height=400)
line_f = Line(x="x", y="fy", line_color="blue", line_width=2)
line_f_glyph = plot.add_glyph(source, line_f)
plot.add_layout(line_f_glyph)
line_t = Line(x="x", y="ty", line_color="red", line_width=2)
line_t_glyph = plot.add_glyph(source, line_t)
plot.add_layout(line_t_glyph)
xaxis = LinearAxis()
plot.add_layout(xaxis, 'below')
yaxis = LinearAxis()
plot.add_layout(yaxis, 'left')
xgrid = Grid(dimension=0, ticker=xaxis.ticker)
ygrid = Grid(dimension=1, ticker=yaxis.ticker)
legend = Legend(orientation="bottom_left")
plot.add_layout(legend)
def on_slider_value_change(obj, attr, old, new):
global order
order = int(new)
update_data()
def on_text_value_change(obj, attr, old, new):
try:
global expr
expr = sy.sympify(new, dict(x=xs))
except (sy.SympifyError, TypeError, ValueError) as exception:
dialog.content = str(exception)
dialog.visible = True
session.store_objects(dialog)
else:
update_data()
dialog = Dialog(title="Invalid expression", buttons=["Close"])
slider = Slider(start=1, end=20, value=order, step=1, title="Order:")
slider.on_change('value', on_slider_value_change)
text = TextInput(value=str(expr), title="Expression:")
text.on_change('value', on_text_value_change)
inputs = HBox(children=[slider, text])
layout = VBox(children=[inputs, plot, dialog])
document.add(layout)
update_data()
if __name__ == "__main__":
link = session.object_link(document.context)
print("Please visit %s to see the plots" % link)
view (link)
print("\npress ctrl-C to exit")
try:
while True:
session.load_document(document)
time.sleep(0.5)
except KeyboardInterrupt:
print()
except ConnectionError:
print("Connection to bokeh-server was terminated")
| 0.57081 | 0.381969 |
import io
import sys
import pyxb
import pyxb.binding
# Import bindings for namespaces imported into schema
import pyxb.binding.datatypes
import pyxb.binding.saxer
import pyxb.utils.domutils
import pyxb.utils.six
import pyxb.utils.utility
from . import dataoneTypes_v1 as _ImportedBinding_dataoneTypes_v1
# Unique identifier for bindings created at the same time
_GenerationUID = pyxb.utils.utility.UniqueIdentifier(
'urn:uuid:c90f2764-b359-11e7-b444-080027018ba0'
)
# Version of PyXB used to generate the bindings
_PyXBVersion = '1.2.6'
# Generated bindings are not compatible across PyXB versions
if pyxb.__version__ != _PyXBVersion:
raise pyxb.PyXBVersionError(_PyXBVersion)
# A holder for module-level binding classes so we can access them from
# inside class definitions where property names may conflict.
_module_typeBindings = pyxb.utils.utility.Object()
# NOTE: All namespace declarations are reserved within the binding
Namespace = pyxb.namespace.NamespaceForURI(
'http://ns.dataone.org/service/types/v1.1', create_if_missing=True
)
Namespace.configureCategories(['typeBinding', 'elementBinding'])
def CreateFromDocument(xml_text, default_namespace=None, location_base=None):
"""Parse the given XML and use the document element to create a Python instance.
@param xml_text An XML document. This should be data (Python 2
str or Python 3 bytes), or a text (Python 2 unicode or Python 3
str) in the L{pyxb._InputEncoding} encoding.
@keyword default_namespace The L{pyxb.Namespace} instance to use as the
default namespace where there is no default namespace in scope.
If unspecified or C{None}, the namespace of the module containing
this function will be used.
@keyword location_base: An object to be recorded as the base of all
L{pyxb.utils.utility.Location} instances associated with events and
objects handled by the parser. You might pass the URI from which
the document was obtained.
"""
if pyxb.XMLStyle_saxer != pyxb._XMLStyle:
dom = pyxb.utils.domutils.StringToDOM(xml_text)
return CreateFromDOM(dom.documentElement, default_namespace=default_namespace)
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
saxer = pyxb.binding.saxer.make_parser(
fallback_namespace=default_namespace, location_base=location_base
)
handler = saxer.getContentHandler()
xmld = xml_text
if isinstance(xmld, pyxb.utils.six.text_type):
xmld = xmld.encode(pyxb._InputEncoding)
saxer.parse(io.BytesIO(xmld))
instance = handler.rootObject()
return instance
def CreateFromDOM(node, default_namespace=None):
"""Create a Python instance from the given DOM node. The node tag must correspond to
an element declaration in this module.
@deprecated: Forcing use of DOM interface is unnecessary; use L{CreateFromDocument}.
"""
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
return pyxb.binding.basis.element.AnyCreateFromDOM(node, default_namespace)
# Complex type {http://ns.dataone.org/service/types/v1.1}QueryEngineDescription with content type ELEMENT_ONLY
class QueryEngineDescription(pyxb.binding.basis.complexTypeDefinition):
"""Describes a query engine that can be used to search content on the node.
Query engines may be general purpose or specialized for particular communities or
domains.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'QueryEngineDescription')
_XSDLocation = pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
72,
2,
)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element queryEngineVersion uses Python identifier queryEngineVersion
__queryEngineVersion = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'queryEngineVersion'),
'queryEngineVersion',
'__httpns_dataone_orgservicetypesv1_1_QueryEngineDescription_queryEngineVersion',
False,
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
78,
6,
),
)
queryEngineVersion = property(
__queryEngineVersion.value,
__queryEngineVersion.set,
None,
'The version of the underlying query engine. Used by clients to determine possible\n compatibility concerns or features available.',
)
# Element querySchemaVersion uses Python identifier querySchemaVersion
__querySchemaVersion = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'querySchemaVersion'),
'querySchemaVersion',
'__httpns_dataone_orgservicetypesv1_1_QueryEngineDescription_querySchemaVersion',
False,
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
84,
6,
),
)
querySchemaVersion = property(
__querySchemaVersion.value,
__querySchemaVersion.set,
None,
'Version of the schema in use by the query engine, e.g. "1.0.1"',
)
# Element name uses Python identifier name
__name = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'name'),
'name',
'__httpns_dataone_orgservicetypesv1_1_QueryEngineDescription_name',
False,
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
89,
6,
),
)
name = property(
__name.value,
__name.set,
None,
'The full, human readable name of the query engine. For example: \n "Apache SOLR"',
)
# Element additionalInfo uses Python identifier additionalInfo
__additionalInfo = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'additionalInfo'),
'additionalInfo',
'__httpns_dataone_orgservicetypesv1_1_QueryEngineDescription_additionalInfo',
True,
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
95,
6,
),
)
additionalInfo = property(
__additionalInfo.value,
__additionalInfo.set,
None,
'An optional human readable description of the query engine. This can be \n used to describe any special capabilities or intended uses for the query engine. For example, \n a query engine may be tuned to suit a particular audience or domain as opposed to providing \n a general purpose discovery mechanism.This field may also contain links to additional information about the query engine, \n such as documentation for the search syntax provided by the query engine implemntors.',
)
# Element queryField uses Python identifier queryField
__queryField = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'queryField'),
'queryField',
'__httpns_dataone_orgservicetypesv1_1_QueryEngineDescription_queryField',
True,
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
105,
6,
),
)
queryField = property(
__queryField.value,
__queryField.set,
None,
'A list of query fields supported by the query engine.',
)
_ElementMap.update(
{
__queryEngineVersion.name(): __queryEngineVersion,
__querySchemaVersion.name(): __querySchemaVersion,
__name.name(): __name,
__additionalInfo.name(): __additionalInfo,
__queryField.name(): __queryField,
}
)
_AttributeMap.update({})
_module_typeBindings.QueryEngineDescription = QueryEngineDescription
Namespace.addCategoryObject(
'typeBinding', 'QueryEngineDescription', QueryEngineDescription
)
# Complex type {http://ns.dataone.org/service/types/v1.1}QueryEngineList with content type ELEMENT_ONLY
class QueryEngineList(pyxb.binding.basis.complexTypeDefinition):
"""A list of query engine names that indicate the possible values for
CNRead.getQueryEngineDescription and CNRead.query REST API endpoints."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'QueryEngineList')
_XSDLocation = pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
114,
2,
)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element queryEngine uses Python identifier queryEngine
__queryEngine = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'queryEngine'),
'queryEngine',
'__httpns_dataone_orgservicetypesv1_1_QueryEngineList_queryEngine',
True,
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
120,
6,
),
)
queryEngine = property(
__queryEngine.value,
__queryEngine.set,
None,
'The name of a queryEngine. This value will be used as a path element in \n REST API calls and so should not contain characters that will need to be escaped.',
)
_ElementMap.update({__queryEngine.name(): __queryEngine})
_AttributeMap.update({})
_module_typeBindings.QueryEngineList = QueryEngineList
Namespace.addCategoryObject('typeBinding', 'QueryEngineList', QueryEngineList)
# Complex type {http://ns.dataone.org/service/types/v1.1}QueryField with content type ELEMENT_ONLY
class QueryField(pyxb.binding.basis.complexTypeDefinition):
""""""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'QueryField')
_XSDLocation = pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
131,
2,
)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element name uses Python identifier name
__name = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'name'),
'name',
'__httpns_dataone_orgservicetypesv1_1_QueryField_name',
False,
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
136,
6,
),
)
name = property(
__name.value,
__name.set,
None,
'The name of the field as used programmatically when \n constructing queries or other rferences to the field.',
)
# Element description uses Python identifier description
__description = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'description'),
'description',
'__httpns_dataone_orgservicetypesv1_1_QueryField_description',
True,
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
142,
6,
),
)
description = property(
__description.value,
__description.set,
None,
'An optional, repeatable, brief description of the field that can be\n used to help guide developers or end users in appropriate use of the field. May for \n example, contain a links to additional documentation.',
)
# Element type uses Python identifier type
__type = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'type'),
'type',
'__httpns_dataone_orgservicetypesv1_1_QueryField_type',
False,
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
149,
6,
),
)
type = property(
__type.value,
__type.set,
None,
'The type of the field, expressed in the language peculiar to the \n query engine being described.',
)
# Element searchable uses Python identifier searchable
__searchable = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'searchable'),
'searchable',
'__httpns_dataone_orgservicetypesv1_1_QueryField_searchable',
False,
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
155,
6,
),
)
searchable = property(
__searchable.value,
__searchable.set,
None,
'Indicates if the field may be used in constructing queries (as opposed \n to only appearing in results)',
)
# Element returnable uses Python identifier returnable
__returnable = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'returnable'),
'returnable',
'__httpns_dataone_orgservicetypesv1_1_QueryField_returnable',
False,
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
161,
6,
),
)
returnable = property(
__returnable.value,
__returnable.set,
None,
'Indicates if the field values may be returned in search results.',
)
# Element sortable uses Python identifier sortable
__sortable = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'sortable'),
'sortable',
'__httpns_dataone_orgservicetypesv1_1_QueryField_sortable',
False,
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
166,
6,
),
)
sortable = property(
__sortable.value,
__sortable.set,
None,
'Indicates if the field can be used for sorting results.',
)
# Element multivalued uses Python identifier multivalued
__multivalued = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'multivalued'),
'multivalued',
'__httpns_dataone_orgservicetypesv1_1_QueryField_multivalued',
False,
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
171,
6,
),
)
multivalued = property(
__multivalued.value,
__multivalued.set,
None,
'Indicates if the field may contain multiple values. Some query engines\n such as SOLR support this capability.',
)
_ElementMap.update(
{
__name.name(): __name,
__description.name(): __description,
__type.name(): __type,
__searchable.name(): __searchable,
__returnable.name(): __returnable,
__sortable.name(): __sortable,
__multivalued.name(): __multivalued,
}
)
_AttributeMap.update({})
_module_typeBindings.QueryField = QueryField
Namespace.addCategoryObject('typeBinding', 'QueryField', QueryField)
queryEngineList = pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(Namespace, 'queryEngineList'),
QueryEngineList,
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
194,
2,
),
)
Namespace.addCategoryObject(
'elementBinding', queryEngineList.name().localName(), queryEngineList
)
queryEngineDescription = pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(Namespace, 'queryEngineDescription'),
QueryEngineDescription,
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
195,
2,
),
)
Namespace.addCategoryObject(
'elementBinding', queryEngineDescription.name().localName(), queryEngineDescription
)
QueryEngineDescription._AddElement(
pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(None, 'queryEngineVersion'),
pyxb.binding.datatypes.string,
scope=QueryEngineDescription,
documentation='The version of the underlying query engine. Used by clients to determine possible\n compatibility concerns or features available.',
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
78,
6,
),
)
)
QueryEngineDescription._AddElement(
pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(None, 'querySchemaVersion'),
pyxb.binding.datatypes.string,
scope=QueryEngineDescription,
documentation='Version of the schema in use by the query engine, e.g. "1.0.1"',
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
84,
6,
),
)
)
QueryEngineDescription._AddElement(
pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(None, 'name'),
pyxb.binding.datatypes.string,
scope=QueryEngineDescription,
documentation='The full, human readable name of the query engine. For example: \n "Apache SOLR"',
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
89,
6,
),
)
)
QueryEngineDescription._AddElement(
pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(None, 'additionalInfo'),
_ImportedBinding_dataoneTypes_v1.NonEmptyString,
scope=QueryEngineDescription,
documentation='An optional human readable description of the query engine. This can be \n used to describe any special capabilities or intended uses for the query engine. For example, \n a query engine may be tuned to suit a particular audience or domain as opposed to providing \n a general purpose discovery mechanism.This field may also contain links to additional information about the query engine, \n such as documentation for the search syntax provided by the query engine implemntors.',
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
95,
6,
),
)
)
QueryEngineDescription._AddElement(
pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(None, 'queryField'),
QueryField,
scope=QueryEngineDescription,
documentation='A list of query fields supported by the query engine.',
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
105,
6,
),
)
)
def _BuildAutomaton():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton
del _BuildAutomaton
import pyxb.utils.fac
counters = set()
cc_0 = pyxb.utils.fac.CounterCondition(
min=0,
max=1,
metadata=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
84,
6,
),
)
counters.add(cc_0)
cc_1 = pyxb.utils.fac.CounterCondition(
min=0,
max=None,
metadata=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
95,
6,
),
)
counters.add(cc_1)
cc_2 = pyxb.utils.fac.CounterCondition(
min=0,
max=None,
metadata=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
105,
6,
),
)
counters.add(cc_2)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(
QueryEngineDescription._UseForTag(
pyxb.namespace.ExpandedName(None, 'queryEngineVersion')
),
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
78,
6,
),
)
st_0 = pyxb.utils.fac.State(
symbol,
is_initial=True,
final_update=final_update,
is_unordered_catenation=False,
)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(
QueryEngineDescription._UseForTag(
pyxb.namespace.ExpandedName(None, 'querySchemaVersion')
),
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
84,
6,
),
)
st_1 = pyxb.utils.fac.State(
symbol,
is_initial=False,
final_update=final_update,
is_unordered_catenation=False,
)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(
QueryEngineDescription._UseForTag(pyxb.namespace.ExpandedName(None, 'name')),
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
89,
6,
),
)
st_2 = pyxb.utils.fac.State(
symbol,
is_initial=False,
final_update=final_update,
is_unordered_catenation=False,
)
states.append(st_2)
final_update = set()
final_update.add(pyxb.utils.fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(
QueryEngineDescription._UseForTag(
pyxb.namespace.ExpandedName(None, 'additionalInfo')
),
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
95,
6,
),
)
st_3 = pyxb.utils.fac.State(
symbol,
is_initial=False,
final_update=final_update,
is_unordered_catenation=False,
)
states.append(st_3)
final_update = set()
final_update.add(pyxb.utils.fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(
QueryEngineDescription._UseForTag(
pyxb.namespace.ExpandedName(None, 'queryField')
),
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
105,
6,
),
)
st_4 = pyxb.utils.fac.State(
symbol,
is_initial=False,
final_update=final_update,
is_unordered_catenation=False,
)
states.append(st_4)
transitions = []
transitions.append(pyxb.utils.fac.Transition(st_1, []))
transitions.append(pyxb.utils.fac.Transition(st_2, []))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(
pyxb.utils.fac.Transition(st_1, [pyxb.utils.fac.UpdateInstruction(cc_0, True)])
)
transitions.append(
pyxb.utils.fac.Transition(st_2, [pyxb.utils.fac.UpdateInstruction(cc_0, False)])
)
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(pyxb.utils.fac.Transition(st_3, []))
transitions.append(pyxb.utils.fac.Transition(st_4, []))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(
pyxb.utils.fac.Transition(st_3, [pyxb.utils.fac.UpdateInstruction(cc_1, True)])
)
transitions.append(
pyxb.utils.fac.Transition(st_4, [pyxb.utils.fac.UpdateInstruction(cc_1, False)])
)
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(
pyxb.utils.fac.Transition(st_4, [pyxb.utils.fac.UpdateInstruction(cc_2, True)])
)
st_4._set_transitionSet(transitions)
return pyxb.utils.fac.Automaton(states, counters, False, containing_state=None)
QueryEngineDescription._Automaton = _BuildAutomaton()
QueryEngineList._AddElement(
pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(None, 'queryEngine'),
_ImportedBinding_dataoneTypes_v1.NonEmptyString,
scope=QueryEngineList,
documentation='The name of a queryEngine. This value will be used as a path element in \n REST API calls and so should not contain characters that will need to be escaped.',
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
120,
6,
),
)
)
def _BuildAutomaton_():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_
del _BuildAutomaton_
import pyxb.utils.fac
counters = set()
cc_0 = pyxb.utils.fac.CounterCondition(
min=0,
max=None,
metadata=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
120,
6,
),
)
counters.add(cc_0)
states = []
final_update = set()
final_update.add(pyxb.utils.fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(
QueryEngineList._UseForTag(pyxb.namespace.ExpandedName(None, 'queryEngine')),
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
120,
6,
),
)
st_0 = pyxb.utils.fac.State(
symbol,
is_initial=True,
final_update=final_update,
is_unordered_catenation=False,
)
states.append(st_0)
transitions = []
transitions.append(
pyxb.utils.fac.Transition(st_0, [pyxb.utils.fac.UpdateInstruction(cc_0, True)])
)
st_0._set_transitionSet(transitions)
return pyxb.utils.fac.Automaton(states, counters, True, containing_state=None)
QueryEngineList._Automaton = _BuildAutomaton_()
QueryField._AddElement(
pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(None, 'name'),
_ImportedBinding_dataoneTypes_v1.NonEmptyString,
scope=QueryField,
documentation='The name of the field as used programmatically when \n constructing queries or other rferences to the field.',
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
136,
6,
),
)
)
QueryField._AddElement(
pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(None, 'description'),
pyxb.binding.datatypes.string,
scope=QueryField,
documentation='An optional, repeatable, brief description of the field that can be\n used to help guide developers or end users in appropriate use of the field. May for \n example, contain a links to additional documentation.',
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
142,
6,
),
)
)
QueryField._AddElement(
pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(None, 'type'),
_ImportedBinding_dataoneTypes_v1.NonEmptyString,
scope=QueryField,
documentation='The type of the field, expressed in the language peculiar to the \n query engine being described.',
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
149,
6,
),
)
)
QueryField._AddElement(
pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(None, 'searchable'),
pyxb.binding.datatypes.boolean,
scope=QueryField,
documentation='Indicates if the field may be used in constructing queries (as opposed \n to only appearing in results)',
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
155,
6,
),
)
)
QueryField._AddElement(
pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(None, 'returnable'),
pyxb.binding.datatypes.boolean,
scope=QueryField,
documentation='Indicates if the field values may be returned in search results.',
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
161,
6,
),
)
)
QueryField._AddElement(
pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(None, 'sortable'),
pyxb.binding.datatypes.boolean,
scope=QueryField,
documentation='Indicates if the field can be used for sorting results.',
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
166,
6,
),
)
)
QueryField._AddElement(
pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(None, 'multivalued'),
pyxb.binding.datatypes.boolean,
scope=QueryField,
documentation='Indicates if the field may contain multiple values. Some query engines\n such as SOLR support this capability.',
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
171,
6,
),
)
)
def _BuildAutomaton_2():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_2
del _BuildAutomaton_2
import pyxb.utils.fac
counters = set()
cc_0 = pyxb.utils.fac.CounterCondition(
min=0,
max=None,
metadata=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
142,
6,
),
)
counters.add(cc_0)
cc_1 = pyxb.utils.fac.CounterCondition(
min=0,
max=1,
metadata=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
171,
6,
),
)
counters.add(cc_1)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(
QueryField._UseForTag(pyxb.namespace.ExpandedName(None, 'name')),
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
136,
6,
),
)
st_0 = pyxb.utils.fac.State(
symbol,
is_initial=True,
final_update=final_update,
is_unordered_catenation=False,
)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(
QueryField._UseForTag(pyxb.namespace.ExpandedName(None, 'description')),
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
142,
6,
),
)
st_1 = pyxb.utils.fac.State(
symbol,
is_initial=False,
final_update=final_update,
is_unordered_catenation=False,
)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(
QueryField._UseForTag(pyxb.namespace.ExpandedName(None, 'type')),
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
149,
6,
),
)
st_2 = pyxb.utils.fac.State(
symbol,
is_initial=False,
final_update=final_update,
is_unordered_catenation=False,
)
states.append(st_2)
final_update = None
symbol = pyxb.binding.content.ElementUse(
QueryField._UseForTag(pyxb.namespace.ExpandedName(None, 'searchable')),
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
155,
6,
),
)
st_3 = pyxb.utils.fac.State(
symbol,
is_initial=False,
final_update=final_update,
is_unordered_catenation=False,
)
states.append(st_3)
final_update = None
symbol = pyxb.binding.content.ElementUse(
QueryField._UseForTag(pyxb.namespace.ExpandedName(None, 'returnable')),
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
161,
6,
),
)
st_4 = pyxb.utils.fac.State(
symbol,
is_initial=False,
final_update=final_update,
is_unordered_catenation=False,
)
states.append(st_4)
final_update = set()
symbol = pyxb.binding.content.ElementUse(
QueryField._UseForTag(pyxb.namespace.ExpandedName(None, 'sortable')),
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
166,
6,
),
)
st_5 = pyxb.utils.fac.State(
symbol,
is_initial=False,
final_update=final_update,
is_unordered_catenation=False,
)
states.append(st_5)
final_update = set()
final_update.add(pyxb.utils.fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(
QueryField._UseForTag(pyxb.namespace.ExpandedName(None, 'multivalued')),
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
171,
6,
),
)
st_6 = pyxb.utils.fac.State(
symbol,
is_initial=False,
final_update=final_update,
is_unordered_catenation=False,
)
states.append(st_6)
transitions = []
transitions.append(pyxb.utils.fac.Transition(st_1, []))
transitions.append(pyxb.utils.fac.Transition(st_2, []))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(
pyxb.utils.fac.Transition(st_1, [pyxb.utils.fac.UpdateInstruction(cc_0, True)])
)
transitions.append(
pyxb.utils.fac.Transition(st_2, [pyxb.utils.fac.UpdateInstruction(cc_0, False)])
)
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(pyxb.utils.fac.Transition(st_3, []))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(pyxb.utils.fac.Transition(st_4, []))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(pyxb.utils.fac.Transition(st_5, []))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(pyxb.utils.fac.Transition(st_6, []))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(
pyxb.utils.fac.Transition(st_6, [pyxb.utils.fac.UpdateInstruction(cc_1, True)])
)
st_6._set_transitionSet(transitions)
return pyxb.utils.fac.Automaton(states, counters, False, containing_state=None)
QueryField._Automaton = _BuildAutomaton_2()
|
lib_common/src/d1_common/types/generated/dataoneTypes_v1_1.py
|
import io
import sys
import pyxb
import pyxb.binding
# Import bindings for namespaces imported into schema
import pyxb.binding.datatypes
import pyxb.binding.saxer
import pyxb.utils.domutils
import pyxb.utils.six
import pyxb.utils.utility
from . import dataoneTypes_v1 as _ImportedBinding_dataoneTypes_v1
# Unique identifier for bindings created at the same time
_GenerationUID = pyxb.utils.utility.UniqueIdentifier(
'urn:uuid:c90f2764-b359-11e7-b444-080027018ba0'
)
# Version of PyXB used to generate the bindings
_PyXBVersion = '1.2.6'
# Generated bindings are not compatible across PyXB versions
if pyxb.__version__ != _PyXBVersion:
raise pyxb.PyXBVersionError(_PyXBVersion)
# A holder for module-level binding classes so we can access them from
# inside class definitions where property names may conflict.
_module_typeBindings = pyxb.utils.utility.Object()
# NOTE: All namespace declarations are reserved within the binding
Namespace = pyxb.namespace.NamespaceForURI(
'http://ns.dataone.org/service/types/v1.1', create_if_missing=True
)
Namespace.configureCategories(['typeBinding', 'elementBinding'])
def CreateFromDocument(xml_text, default_namespace=None, location_base=None):
"""Parse the given XML and use the document element to create a Python instance.
@param xml_text An XML document. This should be data (Python 2
str or Python 3 bytes), or a text (Python 2 unicode or Python 3
str) in the L{pyxb._InputEncoding} encoding.
@keyword default_namespace The L{pyxb.Namespace} instance to use as the
default namespace where there is no default namespace in scope.
If unspecified or C{None}, the namespace of the module containing
this function will be used.
@keyword location_base: An object to be recorded as the base of all
L{pyxb.utils.utility.Location} instances associated with events and
objects handled by the parser. You might pass the URI from which
the document was obtained.
"""
if pyxb.XMLStyle_saxer != pyxb._XMLStyle:
dom = pyxb.utils.domutils.StringToDOM(xml_text)
return CreateFromDOM(dom.documentElement, default_namespace=default_namespace)
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
saxer = pyxb.binding.saxer.make_parser(
fallback_namespace=default_namespace, location_base=location_base
)
handler = saxer.getContentHandler()
xmld = xml_text
if isinstance(xmld, pyxb.utils.six.text_type):
xmld = xmld.encode(pyxb._InputEncoding)
saxer.parse(io.BytesIO(xmld))
instance = handler.rootObject()
return instance
def CreateFromDOM(node, default_namespace=None):
"""Create a Python instance from the given DOM node. The node tag must correspond to
an element declaration in this module.
@deprecated: Forcing use of DOM interface is unnecessary; use L{CreateFromDocument}.
"""
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
return pyxb.binding.basis.element.AnyCreateFromDOM(node, default_namespace)
# Complex type {http://ns.dataone.org/service/types/v1.1}QueryEngineDescription with content type ELEMENT_ONLY
class QueryEngineDescription(pyxb.binding.basis.complexTypeDefinition):
"""Describes a query engine that can be used to search content on the node.
Query engines may be general purpose or specialized for particular communities or
domains.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'QueryEngineDescription')
_XSDLocation = pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
72,
2,
)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element queryEngineVersion uses Python identifier queryEngineVersion
__queryEngineVersion = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'queryEngineVersion'),
'queryEngineVersion',
'__httpns_dataone_orgservicetypesv1_1_QueryEngineDescription_queryEngineVersion',
False,
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
78,
6,
),
)
queryEngineVersion = property(
__queryEngineVersion.value,
__queryEngineVersion.set,
None,
'The version of the underlying query engine. Used by clients to determine possible\n compatibility concerns or features available.',
)
# Element querySchemaVersion uses Python identifier querySchemaVersion
__querySchemaVersion = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'querySchemaVersion'),
'querySchemaVersion',
'__httpns_dataone_orgservicetypesv1_1_QueryEngineDescription_querySchemaVersion',
False,
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
84,
6,
),
)
querySchemaVersion = property(
__querySchemaVersion.value,
__querySchemaVersion.set,
None,
'Version of the schema in use by the query engine, e.g. "1.0.1"',
)
# Element name uses Python identifier name
__name = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'name'),
'name',
'__httpns_dataone_orgservicetypesv1_1_QueryEngineDescription_name',
False,
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
89,
6,
),
)
name = property(
__name.value,
__name.set,
None,
'The full, human readable name of the query engine. For example: \n "Apache SOLR"',
)
# Element additionalInfo uses Python identifier additionalInfo
__additionalInfo = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'additionalInfo'),
'additionalInfo',
'__httpns_dataone_orgservicetypesv1_1_QueryEngineDescription_additionalInfo',
True,
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
95,
6,
),
)
additionalInfo = property(
__additionalInfo.value,
__additionalInfo.set,
None,
'An optional human readable description of the query engine. This can be \n used to describe any special capabilities or intended uses for the query engine. For example, \n a query engine may be tuned to suit a particular audience or domain as opposed to providing \n a general purpose discovery mechanism.This field may also contain links to additional information about the query engine, \n such as documentation for the search syntax provided by the query engine implemntors.',
)
# Element queryField uses Python identifier queryField
__queryField = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'queryField'),
'queryField',
'__httpns_dataone_orgservicetypesv1_1_QueryEngineDescription_queryField',
True,
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
105,
6,
),
)
queryField = property(
__queryField.value,
__queryField.set,
None,
'A list of query fields supported by the query engine.',
)
_ElementMap.update(
{
__queryEngineVersion.name(): __queryEngineVersion,
__querySchemaVersion.name(): __querySchemaVersion,
__name.name(): __name,
__additionalInfo.name(): __additionalInfo,
__queryField.name(): __queryField,
}
)
_AttributeMap.update({})
_module_typeBindings.QueryEngineDescription = QueryEngineDescription
Namespace.addCategoryObject(
'typeBinding', 'QueryEngineDescription', QueryEngineDescription
)
# Complex type {http://ns.dataone.org/service/types/v1.1}QueryEngineList with content type ELEMENT_ONLY
class QueryEngineList(pyxb.binding.basis.complexTypeDefinition):
"""A list of query engine names that indicate the possible values for
CNRead.getQueryEngineDescription and CNRead.query REST API endpoints."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'QueryEngineList')
_XSDLocation = pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
114,
2,
)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element queryEngine uses Python identifier queryEngine
__queryEngine = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'queryEngine'),
'queryEngine',
'__httpns_dataone_orgservicetypesv1_1_QueryEngineList_queryEngine',
True,
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
120,
6,
),
)
queryEngine = property(
__queryEngine.value,
__queryEngine.set,
None,
'The name of a queryEngine. This value will be used as a path element in \n REST API calls and so should not contain characters that will need to be escaped.',
)
_ElementMap.update({__queryEngine.name(): __queryEngine})
_AttributeMap.update({})
_module_typeBindings.QueryEngineList = QueryEngineList
Namespace.addCategoryObject('typeBinding', 'QueryEngineList', QueryEngineList)
# Complex type {http://ns.dataone.org/service/types/v1.1}QueryField with content type ELEMENT_ONLY
class QueryField(pyxb.binding.basis.complexTypeDefinition):
""""""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'QueryField')
_XSDLocation = pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
131,
2,
)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element name uses Python identifier name
__name = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'name'),
'name',
'__httpns_dataone_orgservicetypesv1_1_QueryField_name',
False,
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
136,
6,
),
)
name = property(
__name.value,
__name.set,
None,
'The name of the field as used programmatically when \n constructing queries or other rferences to the field.',
)
# Element description uses Python identifier description
__description = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'description'),
'description',
'__httpns_dataone_orgservicetypesv1_1_QueryField_description',
True,
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
142,
6,
),
)
description = property(
__description.value,
__description.set,
None,
'An optional, repeatable, brief description of the field that can be\n used to help guide developers or end users in appropriate use of the field. May for \n example, contain a links to additional documentation.',
)
# Element type uses Python identifier type
__type = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'type'),
'type',
'__httpns_dataone_orgservicetypesv1_1_QueryField_type',
False,
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
149,
6,
),
)
type = property(
__type.value,
__type.set,
None,
'The type of the field, expressed in the language peculiar to the \n query engine being described.',
)
# Element searchable uses Python identifier searchable
__searchable = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'searchable'),
'searchable',
'__httpns_dataone_orgservicetypesv1_1_QueryField_searchable',
False,
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
155,
6,
),
)
searchable = property(
__searchable.value,
__searchable.set,
None,
'Indicates if the field may be used in constructing queries (as opposed \n to only appearing in results)',
)
# Element returnable uses Python identifier returnable
__returnable = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'returnable'),
'returnable',
'__httpns_dataone_orgservicetypesv1_1_QueryField_returnable',
False,
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
161,
6,
),
)
returnable = property(
__returnable.value,
__returnable.set,
None,
'Indicates if the field values may be returned in search results.',
)
# Element sortable uses Python identifier sortable
__sortable = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'sortable'),
'sortable',
'__httpns_dataone_orgservicetypesv1_1_QueryField_sortable',
False,
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
166,
6,
),
)
sortable = property(
__sortable.value,
__sortable.set,
None,
'Indicates if the field can be used for sorting results.',
)
# Element multivalued uses Python identifier multivalued
__multivalued = pyxb.binding.content.ElementDeclaration(
pyxb.namespace.ExpandedName(None, 'multivalued'),
'multivalued',
'__httpns_dataone_orgservicetypesv1_1_QueryField_multivalued',
False,
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
171,
6,
),
)
multivalued = property(
__multivalued.value,
__multivalued.set,
None,
'Indicates if the field may contain multiple values. Some query engines\n such as SOLR support this capability.',
)
_ElementMap.update(
{
__name.name(): __name,
__description.name(): __description,
__type.name(): __type,
__searchable.name(): __searchable,
__returnable.name(): __returnable,
__sortable.name(): __sortable,
__multivalued.name(): __multivalued,
}
)
_AttributeMap.update({})
_module_typeBindings.QueryField = QueryField
Namespace.addCategoryObject('typeBinding', 'QueryField', QueryField)
queryEngineList = pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(Namespace, 'queryEngineList'),
QueryEngineList,
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
194,
2,
),
)
Namespace.addCategoryObject(
'elementBinding', queryEngineList.name().localName(), queryEngineList
)
queryEngineDescription = pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(Namespace, 'queryEngineDescription'),
QueryEngineDescription,
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
195,
2,
),
)
Namespace.addCategoryObject(
'elementBinding', queryEngineDescription.name().localName(), queryEngineDescription
)
QueryEngineDescription._AddElement(
pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(None, 'queryEngineVersion'),
pyxb.binding.datatypes.string,
scope=QueryEngineDescription,
documentation='The version of the underlying query engine. Used by clients to determine possible\n compatibility concerns or features available.',
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
78,
6,
),
)
)
QueryEngineDescription._AddElement(
pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(None, 'querySchemaVersion'),
pyxb.binding.datatypes.string,
scope=QueryEngineDescription,
documentation='Version of the schema in use by the query engine, e.g. "1.0.1"',
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
84,
6,
),
)
)
QueryEngineDescription._AddElement(
pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(None, 'name'),
pyxb.binding.datatypes.string,
scope=QueryEngineDescription,
documentation='The full, human readable name of the query engine. For example: \n "Apache SOLR"',
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
89,
6,
),
)
)
QueryEngineDescription._AddElement(
pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(None, 'additionalInfo'),
_ImportedBinding_dataoneTypes_v1.NonEmptyString,
scope=QueryEngineDescription,
documentation='An optional human readable description of the query engine. This can be \n used to describe any special capabilities or intended uses for the query engine. For example, \n a query engine may be tuned to suit a particular audience or domain as opposed to providing \n a general purpose discovery mechanism.This field may also contain links to additional information about the query engine, \n such as documentation for the search syntax provided by the query engine implemntors.',
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
95,
6,
),
)
)
QueryEngineDescription._AddElement(
pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(None, 'queryField'),
QueryField,
scope=QueryEngineDescription,
documentation='A list of query fields supported by the query engine.',
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
105,
6,
),
)
)
def _BuildAutomaton():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton
del _BuildAutomaton
import pyxb.utils.fac
counters = set()
cc_0 = pyxb.utils.fac.CounterCondition(
min=0,
max=1,
metadata=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
84,
6,
),
)
counters.add(cc_0)
cc_1 = pyxb.utils.fac.CounterCondition(
min=0,
max=None,
metadata=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
95,
6,
),
)
counters.add(cc_1)
cc_2 = pyxb.utils.fac.CounterCondition(
min=0,
max=None,
metadata=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
105,
6,
),
)
counters.add(cc_2)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(
QueryEngineDescription._UseForTag(
pyxb.namespace.ExpandedName(None, 'queryEngineVersion')
),
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
78,
6,
),
)
st_0 = pyxb.utils.fac.State(
symbol,
is_initial=True,
final_update=final_update,
is_unordered_catenation=False,
)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(
QueryEngineDescription._UseForTag(
pyxb.namespace.ExpandedName(None, 'querySchemaVersion')
),
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
84,
6,
),
)
st_1 = pyxb.utils.fac.State(
symbol,
is_initial=False,
final_update=final_update,
is_unordered_catenation=False,
)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(
QueryEngineDescription._UseForTag(pyxb.namespace.ExpandedName(None, 'name')),
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
89,
6,
),
)
st_2 = pyxb.utils.fac.State(
symbol,
is_initial=False,
final_update=final_update,
is_unordered_catenation=False,
)
states.append(st_2)
final_update = set()
final_update.add(pyxb.utils.fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(
QueryEngineDescription._UseForTag(
pyxb.namespace.ExpandedName(None, 'additionalInfo')
),
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
95,
6,
),
)
st_3 = pyxb.utils.fac.State(
symbol,
is_initial=False,
final_update=final_update,
is_unordered_catenation=False,
)
states.append(st_3)
final_update = set()
final_update.add(pyxb.utils.fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(
QueryEngineDescription._UseForTag(
pyxb.namespace.ExpandedName(None, 'queryField')
),
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
105,
6,
),
)
st_4 = pyxb.utils.fac.State(
symbol,
is_initial=False,
final_update=final_update,
is_unordered_catenation=False,
)
states.append(st_4)
transitions = []
transitions.append(pyxb.utils.fac.Transition(st_1, []))
transitions.append(pyxb.utils.fac.Transition(st_2, []))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(
pyxb.utils.fac.Transition(st_1, [pyxb.utils.fac.UpdateInstruction(cc_0, True)])
)
transitions.append(
pyxb.utils.fac.Transition(st_2, [pyxb.utils.fac.UpdateInstruction(cc_0, False)])
)
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(pyxb.utils.fac.Transition(st_3, []))
transitions.append(pyxb.utils.fac.Transition(st_4, []))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(
pyxb.utils.fac.Transition(st_3, [pyxb.utils.fac.UpdateInstruction(cc_1, True)])
)
transitions.append(
pyxb.utils.fac.Transition(st_4, [pyxb.utils.fac.UpdateInstruction(cc_1, False)])
)
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(
pyxb.utils.fac.Transition(st_4, [pyxb.utils.fac.UpdateInstruction(cc_2, True)])
)
st_4._set_transitionSet(transitions)
return pyxb.utils.fac.Automaton(states, counters, False, containing_state=None)
QueryEngineDescription._Automaton = _BuildAutomaton()
QueryEngineList._AddElement(
pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(None, 'queryEngine'),
_ImportedBinding_dataoneTypes_v1.NonEmptyString,
scope=QueryEngineList,
documentation='The name of a queryEngine. This value will be used as a path element in \n REST API calls and so should not contain characters that will need to be escaped.',
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
120,
6,
),
)
)
def _BuildAutomaton_():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_
del _BuildAutomaton_
import pyxb.utils.fac
counters = set()
cc_0 = pyxb.utils.fac.CounterCondition(
min=0,
max=None,
metadata=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
120,
6,
),
)
counters.add(cc_0)
states = []
final_update = set()
final_update.add(pyxb.utils.fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(
QueryEngineList._UseForTag(pyxb.namespace.ExpandedName(None, 'queryEngine')),
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
120,
6,
),
)
st_0 = pyxb.utils.fac.State(
symbol,
is_initial=True,
final_update=final_update,
is_unordered_catenation=False,
)
states.append(st_0)
transitions = []
transitions.append(
pyxb.utils.fac.Transition(st_0, [pyxb.utils.fac.UpdateInstruction(cc_0, True)])
)
st_0._set_transitionSet(transitions)
return pyxb.utils.fac.Automaton(states, counters, True, containing_state=None)
QueryEngineList._Automaton = _BuildAutomaton_()
QueryField._AddElement(
pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(None, 'name'),
_ImportedBinding_dataoneTypes_v1.NonEmptyString,
scope=QueryField,
documentation='The name of the field as used programmatically when \n constructing queries or other rferences to the field.',
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
136,
6,
),
)
)
QueryField._AddElement(
pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(None, 'description'),
pyxb.binding.datatypes.string,
scope=QueryField,
documentation='An optional, repeatable, brief description of the field that can be\n used to help guide developers or end users in appropriate use of the field. May for \n example, contain a links to additional documentation.',
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
142,
6,
),
)
)
QueryField._AddElement(
pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(None, 'type'),
_ImportedBinding_dataoneTypes_v1.NonEmptyString,
scope=QueryField,
documentation='The type of the field, expressed in the language peculiar to the \n query engine being described.',
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
149,
6,
),
)
)
QueryField._AddElement(
pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(None, 'searchable'),
pyxb.binding.datatypes.boolean,
scope=QueryField,
documentation='Indicates if the field may be used in constructing queries (as opposed \n to only appearing in results)',
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
155,
6,
),
)
)
QueryField._AddElement(
pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(None, 'returnable'),
pyxb.binding.datatypes.boolean,
scope=QueryField,
documentation='Indicates if the field values may be returned in search results.',
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
161,
6,
),
)
)
QueryField._AddElement(
pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(None, 'sortable'),
pyxb.binding.datatypes.boolean,
scope=QueryField,
documentation='Indicates if the field can be used for sorting results.',
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
166,
6,
),
)
)
QueryField._AddElement(
pyxb.binding.basis.element(
pyxb.namespace.ExpandedName(None, 'multivalued'),
pyxb.binding.datatypes.boolean,
scope=QueryField,
documentation='Indicates if the field may contain multiple values. Some query engines\n such as SOLR support this capability.',
location=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
171,
6,
),
)
)
def _BuildAutomaton_2():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_2
del _BuildAutomaton_2
import pyxb.utils.fac
counters = set()
cc_0 = pyxb.utils.fac.CounterCondition(
min=0,
max=None,
metadata=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
142,
6,
),
)
counters.add(cc_0)
cc_1 = pyxb.utils.fac.CounterCondition(
min=0,
max=1,
metadata=pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
171,
6,
),
)
counters.add(cc_1)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(
QueryField._UseForTag(pyxb.namespace.ExpandedName(None, 'name')),
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
136,
6,
),
)
st_0 = pyxb.utils.fac.State(
symbol,
is_initial=True,
final_update=final_update,
is_unordered_catenation=False,
)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(
QueryField._UseForTag(pyxb.namespace.ExpandedName(None, 'description')),
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
142,
6,
),
)
st_1 = pyxb.utils.fac.State(
symbol,
is_initial=False,
final_update=final_update,
is_unordered_catenation=False,
)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(
QueryField._UseForTag(pyxb.namespace.ExpandedName(None, 'type')),
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
149,
6,
),
)
st_2 = pyxb.utils.fac.State(
symbol,
is_initial=False,
final_update=final_update,
is_unordered_catenation=False,
)
states.append(st_2)
final_update = None
symbol = pyxb.binding.content.ElementUse(
QueryField._UseForTag(pyxb.namespace.ExpandedName(None, 'searchable')),
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
155,
6,
),
)
st_3 = pyxb.utils.fac.State(
symbol,
is_initial=False,
final_update=final_update,
is_unordered_catenation=False,
)
states.append(st_3)
final_update = None
symbol = pyxb.binding.content.ElementUse(
QueryField._UseForTag(pyxb.namespace.ExpandedName(None, 'returnable')),
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
161,
6,
),
)
st_4 = pyxb.utils.fac.State(
symbol,
is_initial=False,
final_update=final_update,
is_unordered_catenation=False,
)
states.append(st_4)
final_update = set()
symbol = pyxb.binding.content.ElementUse(
QueryField._UseForTag(pyxb.namespace.ExpandedName(None, 'sortable')),
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
166,
6,
),
)
st_5 = pyxb.utils.fac.State(
symbol,
is_initial=False,
final_update=final_update,
is_unordered_catenation=False,
)
states.append(st_5)
final_update = set()
final_update.add(pyxb.utils.fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(
QueryField._UseForTag(pyxb.namespace.ExpandedName(None, 'multivalued')),
pyxb.utils.utility.Location(
'/home/dahl/dev/d1_python/lib_common/src/d1_common/types/schemas/dataoneTypes_v1.1.xsd',
171,
6,
),
)
st_6 = pyxb.utils.fac.State(
symbol,
is_initial=False,
final_update=final_update,
is_unordered_catenation=False,
)
states.append(st_6)
transitions = []
transitions.append(pyxb.utils.fac.Transition(st_1, []))
transitions.append(pyxb.utils.fac.Transition(st_2, []))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(
pyxb.utils.fac.Transition(st_1, [pyxb.utils.fac.UpdateInstruction(cc_0, True)])
)
transitions.append(
pyxb.utils.fac.Transition(st_2, [pyxb.utils.fac.UpdateInstruction(cc_0, False)])
)
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(pyxb.utils.fac.Transition(st_3, []))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(pyxb.utils.fac.Transition(st_4, []))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(pyxb.utils.fac.Transition(st_5, []))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(pyxb.utils.fac.Transition(st_6, []))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(
pyxb.utils.fac.Transition(st_6, [pyxb.utils.fac.UpdateInstruction(cc_1, True)])
)
st_6._set_transitionSet(transitions)
return pyxb.utils.fac.Automaton(states, counters, False, containing_state=None)
QueryField._Automaton = _BuildAutomaton_2()
| 0.551332 | 0.231593 |
import sys
import time
import curses
import argparse
import httplib2
import _thread
import colorama
from bs4 import BeautifulSoup
from urllib.parse import urljoin, urlparse
from threading import Thread
from colorama import Fore, Back, Style
class HttpRequest(Thread):
stop = False
request_depth = 0
lock = _thread.allocate_lock()
requested_count = 0
error_count = 0
def __init__(self, url, max_request, do_like_a_spider, stop_on_error, delay_between_each_call, username, password):
self.url = url
self.max_request = max_request
self.do_like_a_spider = do_like_a_spider
self.stop_on_error = stop_on_error
self.delay_between_each_call = delay_between_each_call
self.username = username
self.password = password
# Create root_url
parse_result = urlparse(url)
self.root_url = parse_result.scheme + '://' + parse_result.netloc
# Call super method
super(HttpRequest, self).__init__()
def run(self):
for i in range(self.max_request):
if (not self.do_request(self.url) and self.stop_on_error) or HttpRequest.stop:
break
def do_request(self, url, depth=0):
time.sleep(self.delay_between_each_call)
if not HttpRequest.stop:
self.print_status(url)
http = httplib2.Http()
if len(self.username) > 0 and len(self.password) > 0:
http.add_credentials(self.username, self.password)
try:
header, content = http.request(url)
if self.do_like_a_spider and depth <= self.request_depth:
links = self.get_links(content.decode(), url, self.root_url)
for link in links:
self.do_request(link, depth + 1)
except:
self.inc_error()
if self.stop_on_error:
HttpRequest.stop = True
return False
return True
else:
return False
@staticmethod
def print_status(url):
HttpRequest.lock.acquire()
HttpRequest.requested_count = HttpRequest.requested_count + 1
print_with_color(1, 0, Fore.GREEN, 'Requested: ' + str(HttpRequest.requested_count).rjust(10) + ' - ' + Fore.RED + 'Error: ' + str(HttpRequest.error_count).rjust(10))
print_with_color((HttpRequest.requested_count - 1) % 10 + 2, 0, Fore.WHITE, 'Requesting..' + url, end='\n\r')
HttpRequest.lock.release()
@staticmethod
def inc_error():
HttpRequest.lock.acquire()
HttpRequest.error_count = HttpRequest.error_count + 1
HttpRequest.lock.release()
@staticmethod
def get_links(html, base_url, root_url):
result = []
html_parser = BeautifulSoup(html, 'lxml')
anchor_tags = html_parser.find_all('a')
for tag in anchor_tags:
url = urljoin(base_url, tag.get('href')).split('#')[0]
url.rstrip(r'/')
try:
result.index(url)
except ValueError:
try:
if url.index(root_url) == 0:
result.append(url)
except:
pass
result.sort()
return result
def pos_escape(y, x):
return '\x1b[%d;%dH' % (y, x)
def clear_screen():
print('\033[2J')
def print_with_color(row, col, color, text, end=''):
print(pos_escape(row, col) + color + text, Style.RESET_ALL, end=end)
sys.stdout.flush()
# Init screen handler
stdscr = curses.initscr()
stdscr.refresh()
colorama.init()
# Parse the arguments
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('--num_of_thread', help='Num of thread', type=int)
arg_parser.add_argument('--max_request_per_thread', help='Max request per thread', type=int)
arg_parser.add_argument('--do_like_a_spider', help='Do like a spider', type=str)
arg_parser.add_argument('--stop_on_error', help='Stop on error', type=str)
arg_parser.add_argument('--delay_between_each_call', help='Delay between each call', type=int)
arg_parser.add_argument('--username', help='Username for Basic-Authentication', type=str)
arg_parser.add_argument('--password', help='Password for Basic-Authentication', type=str)
arg_parser.add_argument('--url', help='Url', type=str, required=True)
args = arg_parser.parse_args()
# Prepare params
num_of_thread = args.num_of_thread if args.num_of_thread else 10
max_request_per_thread = args.max_request_per_thread if args.max_request_per_thread else 100
do_like_a_spider = args.do_like_a_spider == 'true' if args.do_like_a_spider else True
stop_on_error = args.stop_on_error == 'true' if args.stop_on_error else True
delay_between_each_call = args.delay_between_each_call if args.delay_between_each_call else 0
username = args.username if args.username else ''
password = args.password if args.password else ''
url = args.url
# Run..
requests = []
for i in range(num_of_thread):
request = HttpRequest(url, max_request_per_thread, do_like_a_spider, stop_on_error, delay_between_each_call, username, password)
requests.append(request)
request.start()
try:
# Wait for all requests finished
for request in requests:
request.join()
except KeyboardInterrupt:
HttpRequest.stop = True
if HttpRequest.requested_count >= 9:
print_with_color(12, 0, Fore.YELLOW, 'Done..Press ENTER to exit...')
else:
print_with_color((HttpRequest.requested_count % 10) + 3, 0, Fore.YELLOW, 'Done..Press ENTER to exit...')
stdscr.getkey()
curses.endwin()
|
stress_http_server.py
|
import sys
import time
import curses
import argparse
import httplib2
import _thread
import colorama
from bs4 import BeautifulSoup
from urllib.parse import urljoin, urlparse
from threading import Thread
from colorama import Fore, Back, Style
class HttpRequest(Thread):
stop = False
request_depth = 0
lock = _thread.allocate_lock()
requested_count = 0
error_count = 0
def __init__(self, url, max_request, do_like_a_spider, stop_on_error, delay_between_each_call, username, password):
self.url = url
self.max_request = max_request
self.do_like_a_spider = do_like_a_spider
self.stop_on_error = stop_on_error
self.delay_between_each_call = delay_between_each_call
self.username = username
self.password = password
# Create root_url
parse_result = urlparse(url)
self.root_url = parse_result.scheme + '://' + parse_result.netloc
# Call super method
super(HttpRequest, self).__init__()
def run(self):
for i in range(self.max_request):
if (not self.do_request(self.url) and self.stop_on_error) or HttpRequest.stop:
break
def do_request(self, url, depth=0):
time.sleep(self.delay_between_each_call)
if not HttpRequest.stop:
self.print_status(url)
http = httplib2.Http()
if len(self.username) > 0 and len(self.password) > 0:
http.add_credentials(self.username, self.password)
try:
header, content = http.request(url)
if self.do_like_a_spider and depth <= self.request_depth:
links = self.get_links(content.decode(), url, self.root_url)
for link in links:
self.do_request(link, depth + 1)
except:
self.inc_error()
if self.stop_on_error:
HttpRequest.stop = True
return False
return True
else:
return False
@staticmethod
def print_status(url):
HttpRequest.lock.acquire()
HttpRequest.requested_count = HttpRequest.requested_count + 1
print_with_color(1, 0, Fore.GREEN, 'Requested: ' + str(HttpRequest.requested_count).rjust(10) + ' - ' + Fore.RED + 'Error: ' + str(HttpRequest.error_count).rjust(10))
print_with_color((HttpRequest.requested_count - 1) % 10 + 2, 0, Fore.WHITE, 'Requesting..' + url, end='\n\r')
HttpRequest.lock.release()
@staticmethod
def inc_error():
HttpRequest.lock.acquire()
HttpRequest.error_count = HttpRequest.error_count + 1
HttpRequest.lock.release()
@staticmethod
def get_links(html, base_url, root_url):
result = []
html_parser = BeautifulSoup(html, 'lxml')
anchor_tags = html_parser.find_all('a')
for tag in anchor_tags:
url = urljoin(base_url, tag.get('href')).split('#')[0]
url.rstrip(r'/')
try:
result.index(url)
except ValueError:
try:
if url.index(root_url) == 0:
result.append(url)
except:
pass
result.sort()
return result
def pos_escape(y, x):
return '\x1b[%d;%dH' % (y, x)
def clear_screen():
print('\033[2J')
def print_with_color(row, col, color, text, end=''):
print(pos_escape(row, col) + color + text, Style.RESET_ALL, end=end)
sys.stdout.flush()
# Init screen handler
stdscr = curses.initscr()
stdscr.refresh()
colorama.init()
# Parse the arguments
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('--num_of_thread', help='Num of thread', type=int)
arg_parser.add_argument('--max_request_per_thread', help='Max request per thread', type=int)
arg_parser.add_argument('--do_like_a_spider', help='Do like a spider', type=str)
arg_parser.add_argument('--stop_on_error', help='Stop on error', type=str)
arg_parser.add_argument('--delay_between_each_call', help='Delay between each call', type=int)
arg_parser.add_argument('--username', help='Username for Basic-Authentication', type=str)
arg_parser.add_argument('--password', help='Password for Basic-Authentication', type=str)
arg_parser.add_argument('--url', help='Url', type=str, required=True)
args = arg_parser.parse_args()
# Prepare params
num_of_thread = args.num_of_thread if args.num_of_thread else 10
max_request_per_thread = args.max_request_per_thread if args.max_request_per_thread else 100
do_like_a_spider = args.do_like_a_spider == 'true' if args.do_like_a_spider else True
stop_on_error = args.stop_on_error == 'true' if args.stop_on_error else True
delay_between_each_call = args.delay_between_each_call if args.delay_between_each_call else 0
username = args.username if args.username else ''
password = args.password if args.password else ''
url = args.url
# Run..
requests = []
for i in range(num_of_thread):
request = HttpRequest(url, max_request_per_thread, do_like_a_spider, stop_on_error, delay_between_each_call, username, password)
requests.append(request)
request.start()
try:
# Wait for all requests finished
for request in requests:
request.join()
except KeyboardInterrupt:
HttpRequest.stop = True
if HttpRequest.requested_count >= 9:
print_with_color(12, 0, Fore.YELLOW, 'Done..Press ENTER to exit...')
else:
print_with_color((HttpRequest.requested_count % 10) + 3, 0, Fore.YELLOW, 'Done..Press ENTER to exit...')
stdscr.getkey()
curses.endwin()
| 0.179638 | 0.044974 |
from datetime import datetime
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.hybrid import hybrid_method, hybrid_property
from sqlalchemy import create_engine, UniqueConstraint, desc, Index
from sqlalchemy import Column, Integer, String, Date, Time, DateTime, SmallInteger, CHAR, func, text
from args import args
time_now = func.datetime("now", "localtime") if args.localtime else func.now()
# check_same_thread 不检测是否和创建线程为同一线程--可供多线程使用
# echo 输出具体执行的sql语句
engine = create_engine('sqlite:///myData.db?check_same_thread=False',
echo=bool(args.debug))
# 增查改删(CRUD)操作需要使用session进行操作
Session = sessionmaker(bind=engine)
# 基本映射类,子孙们都需要继承它
Base = declarative_base()
"""
# 查看映射对应的表
KeyMouse.__table__
# 创建数据表。一方面通过engine来连接数据库,另一方面根据哪些类继承了Base来决定创建哪些表
# checkfirst=True,表示创建表前先检查该表是否存在,如同名表已存在则不再创建。其实默认就是True
Base.metadata.create_all(engine, checkfirst=True)
# 上边的写法会在engine对应的数据库中创建所有继承Base的类对应的表,但很多时候很多只是用来则试的或是其他库的
# 此时可以通过tables参数指定方式,指示仅创建哪些表
# Base.metadata.create_all(engine,tables=[Base.metadata.tables['keymouse']],checkfirst=True)
# 在项目中由于model经常在别的文件定义,没主动加载时上边的写法可能写导致报错,可使用下边这种更明确的写法
# KeyMouse.__table__.create(engine, checkfirst=True)
# 另外我们说这一步的作用是创建表,当我们已经确定表已经在数据库中存在时,我完可以跳过这一步
# 针对已存放有关键数据的表,或大家共用的表,直接不写这创建代码更让人心里踏实
所以我就不写了,结果不是默认执行的,所以再加上吧...
# 反向生成代码
# sqlacodegen mysql+pymysql://user:password@localhost/dbname [--tables table_name1,table_name2] [--outfile model.py]
"""
# 定义键盘鼠标事件类KeyMouse,其继承上一步创建的Base
class KeyMouse(Base):
"""
# 如果有多个类指向同一张表,那么在后边的类需要把extend_existing设为True,表示在已有列基础上进行扩展
# 或者换句话说,sqlalchemy允许类是表的子集
# __table_args__ = {'extend_existing': True}
# 如果表在同一个数据库服务(datebase)的不同数据库中(schema),可使用schema参数进一步指定数据库
# __table_args__ = {'schema': 'test_database'}
# 各变量名一定要与表的各字段名一样,因为相同的名字是他们之间的唯一关联关系
# 从语法上说,各变量类型和表的类型可以不完全一致,如表字段是String(64),但我就定义成String(32)
# 但为了避免造成不必要的错误,变量的类型和其对应的表的字段的类型还是要相一致
# sqlalchemy强制要求必须要有主键字段不然会报错,如果要映射一张已存在且没有主键的表,那么可行的做法是将所有字段都设为primary_key=True
# 不要看随便将一个非主键字段设为primary_key,然后似乎就没报错就能使用了,sqlalchemy在接收到查询结果后还会自己根据主键进行一次去重
"""
# 指定本类映射到`keymouse`表
__tablename__ = 'keymouse'
# 指定id映射到id字段; id字段为整型,为主键,自动增长(其实整型主键默认就自动增长)
id = Column(Integer, primary_key=True, autoincrement=True)
# 指定name映射到name字段; name字段为字符串类形
name = Column(CHAR(1), nullable=False)
create_time = Column(DateTime(timezone=8),
server_default=time_now,
comment='创建时间 datetime')
update_time = Column(DateTime,
server_default=time_now,
onupdate=time_now,
comment='修改时间')
count = Column(Integer, server_default=text('1'), comment='次数统计')
device = Column(SmallInteger,
nullable=False,
server_default=text('0'),
comment='设备1: 键盘, 0: 鼠标')
UniqueConstraint('name', 'create_time', name='fcx_name_date')
# __repr__方法用于输出该类的对象被print()时输出的字符串,如果不想写可以不写
def __repr__(self):
return "<KeyMouse(name='%s', create_time='%s', count='%d')>" % (
self.name, datetime2str(self.create_time), self.count)
# 定义工作时间状态类WorkInfo
class WorkInfo(Base):
"""
pass
"""
__tablename__ = 'workinfo'
id = Column(Integer, primary_key=True, autoincrement=True)
type = Column(SmallInteger,
server_default=text('1'),
comment='类型, 见 type_map')
continued = Column(Integer,
nullable=False,
comment="此条状态持续时间,create_time-continued为这条状态真正开始时间")
star = Column(SmallInteger,
server_default=text('0'),
comment='星级, 允许收藏一些东西')
create_time = Column(DateTime(timezone=8),
server_default=time_now,
comment='创建时间 datetime, 在状态结束/状态切换时才会插入')
update_time = Column(DateTime,
server_default=time_now,
onupdate=time_now,
comment='修改时间')
note = Column(String, comment='笔记,比如小憩前可以先记录一下当前工作的进度.提醒性文字,再小憩')
UniqueConstraint('type', 'create_time', name='notefx_type_crtime')
Index("date_max", "create_time", "continued")
type_map = {1: "工作", 2: "开会", -1: "小憩", -2: "午休"}
type_map_reverse = dict(zip(type_map.values(), type_map.keys()))
@hybrid_property
def name(self):
# 返回值中可获取name
return self.type_map(self.type)
@hybrid_method
def point_type(self, _type):
# 大则大,小则小,无则全
if _type > 0:
return self.type > _type
elif _type < 0:
return self.type < _type
else:
return True
def __repr__(self):
return "<WorkInfo(name='%s', create_time='%s', type='%d')>" % (
self.name, datetime2str(self.create_time), self.type)
WorkInfo.__table__.create(engine, checkfirst=True)
KeyMouse.__table__.create(engine, checkfirst=True)
|
data_alchemy/models.py
|
from datetime import datetime
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.hybrid import hybrid_method, hybrid_property
from sqlalchemy import create_engine, UniqueConstraint, desc, Index
from sqlalchemy import Column, Integer, String, Date, Time, DateTime, SmallInteger, CHAR, func, text
from args import args
time_now = func.datetime("now", "localtime") if args.localtime else func.now()
# check_same_thread 不检测是否和创建线程为同一线程--可供多线程使用
# echo 输出具体执行的sql语句
engine = create_engine('sqlite:///myData.db?check_same_thread=False',
echo=bool(args.debug))
# 增查改删(CRUD)操作需要使用session进行操作
Session = sessionmaker(bind=engine)
# 基本映射类,子孙们都需要继承它
Base = declarative_base()
"""
# 查看映射对应的表
KeyMouse.__table__
# 创建数据表。一方面通过engine来连接数据库,另一方面根据哪些类继承了Base来决定创建哪些表
# checkfirst=True,表示创建表前先检查该表是否存在,如同名表已存在则不再创建。其实默认就是True
Base.metadata.create_all(engine, checkfirst=True)
# 上边的写法会在engine对应的数据库中创建所有继承Base的类对应的表,但很多时候很多只是用来则试的或是其他库的
# 此时可以通过tables参数指定方式,指示仅创建哪些表
# Base.metadata.create_all(engine,tables=[Base.metadata.tables['keymouse']],checkfirst=True)
# 在项目中由于model经常在别的文件定义,没主动加载时上边的写法可能写导致报错,可使用下边这种更明确的写法
# KeyMouse.__table__.create(engine, checkfirst=True)
# 另外我们说这一步的作用是创建表,当我们已经确定表已经在数据库中存在时,我完可以跳过这一步
# 针对已存放有关键数据的表,或大家共用的表,直接不写这创建代码更让人心里踏实
所以我就不写了,结果不是默认执行的,所以再加上吧...
# 反向生成代码
# sqlacodegen mysql+pymysql://user:password@localhost/dbname [--tables table_name1,table_name2] [--outfile model.py]
"""
# 定义键盘鼠标事件类KeyMouse,其继承上一步创建的Base
class KeyMouse(Base):
"""
# 如果有多个类指向同一张表,那么在后边的类需要把extend_existing设为True,表示在已有列基础上进行扩展
# 或者换句话说,sqlalchemy允许类是表的子集
# __table_args__ = {'extend_existing': True}
# 如果表在同一个数据库服务(datebase)的不同数据库中(schema),可使用schema参数进一步指定数据库
# __table_args__ = {'schema': 'test_database'}
# 各变量名一定要与表的各字段名一样,因为相同的名字是他们之间的唯一关联关系
# 从语法上说,各变量类型和表的类型可以不完全一致,如表字段是String(64),但我就定义成String(32)
# 但为了避免造成不必要的错误,变量的类型和其对应的表的字段的类型还是要相一致
# sqlalchemy强制要求必须要有主键字段不然会报错,如果要映射一张已存在且没有主键的表,那么可行的做法是将所有字段都设为primary_key=True
# 不要看随便将一个非主键字段设为primary_key,然后似乎就没报错就能使用了,sqlalchemy在接收到查询结果后还会自己根据主键进行一次去重
"""
# 指定本类映射到`keymouse`表
__tablename__ = 'keymouse'
# 指定id映射到id字段; id字段为整型,为主键,自动增长(其实整型主键默认就自动增长)
id = Column(Integer, primary_key=True, autoincrement=True)
# 指定name映射到name字段; name字段为字符串类形
name = Column(CHAR(1), nullable=False)
create_time = Column(DateTime(timezone=8),
server_default=time_now,
comment='创建时间 datetime')
update_time = Column(DateTime,
server_default=time_now,
onupdate=time_now,
comment='修改时间')
count = Column(Integer, server_default=text('1'), comment='次数统计')
device = Column(SmallInteger,
nullable=False,
server_default=text('0'),
comment='设备1: 键盘, 0: 鼠标')
UniqueConstraint('name', 'create_time', name='fcx_name_date')
# __repr__方法用于输出该类的对象被print()时输出的字符串,如果不想写可以不写
def __repr__(self):
return "<KeyMouse(name='%s', create_time='%s', count='%d')>" % (
self.name, datetime2str(self.create_time), self.count)
# 定义工作时间状态类WorkInfo
class WorkInfo(Base):
"""
pass
"""
__tablename__ = 'workinfo'
id = Column(Integer, primary_key=True, autoincrement=True)
type = Column(SmallInteger,
server_default=text('1'),
comment='类型, 见 type_map')
continued = Column(Integer,
nullable=False,
comment="此条状态持续时间,create_time-continued为这条状态真正开始时间")
star = Column(SmallInteger,
server_default=text('0'),
comment='星级, 允许收藏一些东西')
create_time = Column(DateTime(timezone=8),
server_default=time_now,
comment='创建时间 datetime, 在状态结束/状态切换时才会插入')
update_time = Column(DateTime,
server_default=time_now,
onupdate=time_now,
comment='修改时间')
note = Column(String, comment='笔记,比如小憩前可以先记录一下当前工作的进度.提醒性文字,再小憩')
UniqueConstraint('type', 'create_time', name='notefx_type_crtime')
Index("date_max", "create_time", "continued")
type_map = {1: "工作", 2: "开会", -1: "小憩", -2: "午休"}
type_map_reverse = dict(zip(type_map.values(), type_map.keys()))
@hybrid_property
def name(self):
# 返回值中可获取name
return self.type_map(self.type)
@hybrid_method
def point_type(self, _type):
# 大则大,小则小,无则全
if _type > 0:
return self.type > _type
elif _type < 0:
return self.type < _type
else:
return True
def __repr__(self):
return "<WorkInfo(name='%s', create_time='%s', type='%d')>" % (
self.name, datetime2str(self.create_time), self.type)
WorkInfo.__table__.create(engine, checkfirst=True)
KeyMouse.__table__.create(engine, checkfirst=True)
| 0.306423 | 0.290292 |
import asyncio
import logging
from abc import ABC, abstractmethod
from typing import Optional, Awaitable, Tuple, Union, Any, TYPE_CHECKING
import grpc
from google.protobuf import empty_pb2
from . import rpc
from .settings import Settings, configure, get_stack, get_project, get_root_resource
from .sync_await import _sync_await
from ..runtime.proto import engine_pb2, engine_pb2_grpc, provider_pb2, resource_pb2, resource_pb2_grpc
from ..runtime.stack import Stack, run_pulumi_func
from ..output import Output
if TYPE_CHECKING:
from ..resource import Resource
def test(fn):
def wrapper(*args, **kwargs):
_sync_await(run_pulumi_func(lambda: _sync_await(Output.from_input(fn(*args, **kwargs)).future())))
return wrapper
class Mocks(ABC):
"""
Mocks is an abstract class that allows subclasses to replace operations normally implemented by the Pulumi engine with
their own implementations. This can be used during testing to ensure that calls to provider functions and resource constructors
return predictable values.
"""
@abstractmethod
def call(self, token: str, args: dict, provider: Optional[str]) -> dict:
"""
call mocks provider-implemented function calls (e.g. aws.get_availability_zones).
:param str token: The token that indicates which function is being called. This token is of the form "package:module:function".
:param dict args: The arguments provided to the function call.
:param Optional[str] provider: If provided, the identifier of the provider instance being used to make the call.
"""
return {}
@abstractmethod
def new_resource(self, type_: str, name: str, inputs: dict, provider: Optional[str], id_: Optional[str]) -> Tuple[str, dict]:
"""
new_resource mocks resource construction calls. This function should return the physical identifier and the output properties
for the resource being constructed.
:param str type_: The token that indicates which resource type is being constructed. This token is of the form "package:module:type".
:param str name: The logical name of the resource instance.
:param dict inputs: The inputs for the resource.
:param Optional[str] provider: If provided, the identifier of the provider instnace being used to manage this resource.
:param Optional[str] id_: If provided, the physical identifier of an existing resource to read or import.
"""
return ("", {})
class MockMonitor:
mocks: Mocks
def __init__(self, mocks: Mocks):
self.mocks = mocks
def make_urn(self, parent: str, type_: str, name: str) -> str:
if parent != "":
qualifiedType = parent.split("::")[2]
parentType = qualifiedType.split("$").pop()
type_ = parentType + "$" + type_
return "urn:pulumi:" + "::".join([get_stack(), get_project(), type_, name])
def Invoke(self, request):
args = rpc.deserialize_properties(request.args)
ret = self.mocks.call(request.tok, args, request.provider)
ret_proto = _sync_await(rpc.serialize_properties(ret, {}))
fields = {"failures": None, "return": ret_proto}
return provider_pb2.InvokeResponse(**fields)
def ReadResource(self, request):
state = rpc.deserialize_properties(request.properties)
_, state = self.mocks.new_resource(request.type, request.name, state, request.provider, request.id)
props_proto = _sync_await(rpc.serialize_properties(state, {}))
urn = self.make_urn(request.parent, request.type, request.name)
return resource_pb2.ReadResourceResponse(urn=urn, properties=props_proto)
def RegisterResource(self, request):
urn = self.make_urn(request.parent, request.type, request.name)
if request.type == "pulumi:pulumi:Stack":
return resource_pb2.RegisterResourceResponse(urn=urn)
inputs = rpc.deserialize_properties(request.object)
id_, state = self.mocks.new_resource(request.type, request.name, inputs, request.provider, request.importId)
obj_proto = _sync_await(rpc.serialize_properties(state, {}))
return resource_pb2.RegisterResourceResponse(urn=urn, id=id_, object=obj_proto)
def RegisterResourceOutputs(self, request):
#pylint: disable=unused-argument
return empty_pb2.Empty()
def SupportsFeature(self, request):
#pylint: disable=unused-argument
return type('SupportsFeatureResponse', (object,), {'hasSupport' : True})
class MockEngine:
logger: logging.Logger
def __init__(self, logger: Optional[logging.Logger]):
self.logger = logger if logger is not None else logging.getLogger()
def Log(self, request):
if request.severity == engine_pb2.DEBUG:
self.logger.debug(request.message)
elif request.severity == engine_pb2.INFO:
self.logger.info(request.message)
elif request.severity == engine_pb2.WARNING:
self.logger.warning(request.message)
elif request.severity == engine_pb2.ERROR:
self.logger.error(request.message)
def set_mocks(mocks: Mocks,
project: Optional[str] = None,
stack: Optional[str] = None,
preview: Optional[bool] = None,
logger: Optional[logging.Logger] = None):
"""
set_mocks configures the Pulumi runtime to use the given mocks for testing.
"""
settings = Settings(monitor=MockMonitor(mocks),
engine=MockEngine(logger),
project=project if project is not None else 'project',
stack=stack if stack is not None else 'stack',
dry_run=preview,
test_mode_enabled=True)
configure(settings)
# Ensure a new root stack resource has been initialized.
if get_root_resource() is None:
Stack(lambda: None)
|
sdk/python/lib/pulumi/runtime/mocks.py
|
import asyncio
import logging
from abc import ABC, abstractmethod
from typing import Optional, Awaitable, Tuple, Union, Any, TYPE_CHECKING
import grpc
from google.protobuf import empty_pb2
from . import rpc
from .settings import Settings, configure, get_stack, get_project, get_root_resource
from .sync_await import _sync_await
from ..runtime.proto import engine_pb2, engine_pb2_grpc, provider_pb2, resource_pb2, resource_pb2_grpc
from ..runtime.stack import Stack, run_pulumi_func
from ..output import Output
if TYPE_CHECKING:
from ..resource import Resource
def test(fn):
def wrapper(*args, **kwargs):
_sync_await(run_pulumi_func(lambda: _sync_await(Output.from_input(fn(*args, **kwargs)).future())))
return wrapper
class Mocks(ABC):
"""
Mocks is an abstract class that allows subclasses to replace operations normally implemented by the Pulumi engine with
their own implementations. This can be used during testing to ensure that calls to provider functions and resource constructors
return predictable values.
"""
@abstractmethod
def call(self, token: str, args: dict, provider: Optional[str]) -> dict:
"""
call mocks provider-implemented function calls (e.g. aws.get_availability_zones).
:param str token: The token that indicates which function is being called. This token is of the form "package:module:function".
:param dict args: The arguments provided to the function call.
:param Optional[str] provider: If provided, the identifier of the provider instance being used to make the call.
"""
return {}
@abstractmethod
def new_resource(self, type_: str, name: str, inputs: dict, provider: Optional[str], id_: Optional[str]) -> Tuple[str, dict]:
"""
new_resource mocks resource construction calls. This function should return the physical identifier and the output properties
for the resource being constructed.
:param str type_: The token that indicates which resource type is being constructed. This token is of the form "package:module:type".
:param str name: The logical name of the resource instance.
:param dict inputs: The inputs for the resource.
:param Optional[str] provider: If provided, the identifier of the provider instnace being used to manage this resource.
:param Optional[str] id_: If provided, the physical identifier of an existing resource to read or import.
"""
return ("", {})
class MockMonitor:
mocks: Mocks
def __init__(self, mocks: Mocks):
self.mocks = mocks
def make_urn(self, parent: str, type_: str, name: str) -> str:
if parent != "":
qualifiedType = parent.split("::")[2]
parentType = qualifiedType.split("$").pop()
type_ = parentType + "$" + type_
return "urn:pulumi:" + "::".join([get_stack(), get_project(), type_, name])
def Invoke(self, request):
args = rpc.deserialize_properties(request.args)
ret = self.mocks.call(request.tok, args, request.provider)
ret_proto = _sync_await(rpc.serialize_properties(ret, {}))
fields = {"failures": None, "return": ret_proto}
return provider_pb2.InvokeResponse(**fields)
def ReadResource(self, request):
state = rpc.deserialize_properties(request.properties)
_, state = self.mocks.new_resource(request.type, request.name, state, request.provider, request.id)
props_proto = _sync_await(rpc.serialize_properties(state, {}))
urn = self.make_urn(request.parent, request.type, request.name)
return resource_pb2.ReadResourceResponse(urn=urn, properties=props_proto)
def RegisterResource(self, request):
urn = self.make_urn(request.parent, request.type, request.name)
if request.type == "pulumi:pulumi:Stack":
return resource_pb2.RegisterResourceResponse(urn=urn)
inputs = rpc.deserialize_properties(request.object)
id_, state = self.mocks.new_resource(request.type, request.name, inputs, request.provider, request.importId)
obj_proto = _sync_await(rpc.serialize_properties(state, {}))
return resource_pb2.RegisterResourceResponse(urn=urn, id=id_, object=obj_proto)
def RegisterResourceOutputs(self, request):
#pylint: disable=unused-argument
return empty_pb2.Empty()
def SupportsFeature(self, request):
#pylint: disable=unused-argument
return type('SupportsFeatureResponse', (object,), {'hasSupport' : True})
class MockEngine:
logger: logging.Logger
def __init__(self, logger: Optional[logging.Logger]):
self.logger = logger if logger is not None else logging.getLogger()
def Log(self, request):
if request.severity == engine_pb2.DEBUG:
self.logger.debug(request.message)
elif request.severity == engine_pb2.INFO:
self.logger.info(request.message)
elif request.severity == engine_pb2.WARNING:
self.logger.warning(request.message)
elif request.severity == engine_pb2.ERROR:
self.logger.error(request.message)
def set_mocks(mocks: Mocks,
project: Optional[str] = None,
stack: Optional[str] = None,
preview: Optional[bool] = None,
logger: Optional[logging.Logger] = None):
"""
set_mocks configures the Pulumi runtime to use the given mocks for testing.
"""
settings = Settings(monitor=MockMonitor(mocks),
engine=MockEngine(logger),
project=project if project is not None else 'project',
stack=stack if stack is not None else 'stack',
dry_run=preview,
test_mode_enabled=True)
configure(settings)
# Ensure a new root stack resource has been initialized.
if get_root_resource() is None:
Stack(lambda: None)
| 0.827375 | 0.213869 |
from collections import namedtuple
from dataclasses import dataclass
from utils import to_form_url
@dataclass
class EntryInfo:
required: bool
prompt: bool
type: str
key: str
title: str
value: str
# See README's Config section for more info
TYPES = {
"words": ["w", "word", "text"],
"choice": ["m", "mc", "multiple choice"],
"checkboxes": ["c", "checkbox"],
"date": ["d"],
"time": ["t"],
"extra": ["x", "xD", "extra data"],
}
@classmethod
def from_string(cls, string):
"""
Return info on a config file line.
Parse a string of the format `[*] [!] type - key ; title = value`.
Return a dataclass (simple object) with the config info.
A string "*!type-key;title=value" would give `EntryInfo(required=True,
prompt=True, type="type", key="key", title="title", value="value")`.
Examples of config lines:
w-1000;Question=Default
! time - 1001 ; Time = current
*multiple choice - 1001 ; Class =
checkbox-1002; Languages = Python, Java, C++
*! extra-emailAddress; Email Address =
"""
string = string.strip()
if not string:
raise ValueError("Empty entry")
required = (string[0] == "*")
string = string.removeprefix("*").strip()
if not string:
raise ValueError("Missing type")
prompt = (string[0] == "!")
string = string.removeprefix("!").strip()
type, split, string = map(str.strip, string.partition("-"))
for name, aliases in cls.TYPES.items():
if type == name:
break
elif type in aliases:
type = name
break
else:
raise ValueError(f"Type not valid: {type}")
if not split:
raise ValueError("Missing type-key split '-'")
key, split, string = map(str.strip, string.partition(";"))
if not key:
raise ValueError("Missing key")
if not split:
raise ValueError("Missing key-title split ';'")
title, split, value = map(str.strip, string.partition("="))
if not title:
title = key # Title defaults to the key if absent.
if not split:
raise ValueError("Missing title-value split '='")
return cls(required, prompt, type, key, title, value)
def __str__(self):
return (
f"{'*'*self.required}{'!'*self.prompt}{self.type}"
f"-{self.key};{self.title}={self.value}"
)
ConfigInfo = namedtuple("ConfigInfo", "url entries")
def open_config(file):
"""
Open config file and return the URL and entries.
"""
if isinstance(file, str):
file = open(file)
with file:
url = to_form_url(file.readline())
entries = []
for line in file:
line = line.strip()
if not line:
continue
if line.startswith("#"):
continue
entries.append(EntryInfo.from_string(line))
return ConfigInfo(url, entries)
# - Tests
def test_entry_from_string():
# TODO: Add tests for ValueError (maybe use pytest)
a = EntryInfo(True, True, "words", "key", "title", "value")
assert EntryInfo.from_string(" *!words-key;title=value ") == a
assert EntryInfo.from_string(" * ! words - key ; title = value ") == a
b = EntryInfo(False, False, "words", "key", "key", "")
assert EntryInfo.from_string("words-key;=") == b
assert EntryInfo.from_string("w-key;=") == b
assert EntryInfo.from_string("word-key;=") == b
assert EntryInfo.from_string("text-key;=") == b
def test_entry_str():
entry = EntryInfo(True, True, "words", "key", "title", "value")
assert EntryInfo.from_string(str(entry)) == entry
line = "*!words-key;title=value"
assert str(entry) == line
assert str(EntryInfo.from_string(line)) == line
|
config.py
|
from collections import namedtuple
from dataclasses import dataclass
from utils import to_form_url
@dataclass
class EntryInfo:
required: bool
prompt: bool
type: str
key: str
title: str
value: str
# See README's Config section for more info
TYPES = {
"words": ["w", "word", "text"],
"choice": ["m", "mc", "multiple choice"],
"checkboxes": ["c", "checkbox"],
"date": ["d"],
"time": ["t"],
"extra": ["x", "xD", "extra data"],
}
@classmethod
def from_string(cls, string):
"""
Return info on a config file line.
Parse a string of the format `[*] [!] type - key ; title = value`.
Return a dataclass (simple object) with the config info.
A string "*!type-key;title=value" would give `EntryInfo(required=True,
prompt=True, type="type", key="key", title="title", value="value")`.
Examples of config lines:
w-1000;Question=Default
! time - 1001 ; Time = current
*multiple choice - 1001 ; Class =
checkbox-1002; Languages = Python, Java, C++
*! extra-emailAddress; Email Address =
"""
string = string.strip()
if not string:
raise ValueError("Empty entry")
required = (string[0] == "*")
string = string.removeprefix("*").strip()
if not string:
raise ValueError("Missing type")
prompt = (string[0] == "!")
string = string.removeprefix("!").strip()
type, split, string = map(str.strip, string.partition("-"))
for name, aliases in cls.TYPES.items():
if type == name:
break
elif type in aliases:
type = name
break
else:
raise ValueError(f"Type not valid: {type}")
if not split:
raise ValueError("Missing type-key split '-'")
key, split, string = map(str.strip, string.partition(";"))
if not key:
raise ValueError("Missing key")
if not split:
raise ValueError("Missing key-title split ';'")
title, split, value = map(str.strip, string.partition("="))
if not title:
title = key # Title defaults to the key if absent.
if not split:
raise ValueError("Missing title-value split '='")
return cls(required, prompt, type, key, title, value)
def __str__(self):
return (
f"{'*'*self.required}{'!'*self.prompt}{self.type}"
f"-{self.key};{self.title}={self.value}"
)
ConfigInfo = namedtuple("ConfigInfo", "url entries")
def open_config(file):
"""
Open config file and return the URL and entries.
"""
if isinstance(file, str):
file = open(file)
with file:
url = to_form_url(file.readline())
entries = []
for line in file:
line = line.strip()
if not line:
continue
if line.startswith("#"):
continue
entries.append(EntryInfo.from_string(line))
return ConfigInfo(url, entries)
# - Tests
def test_entry_from_string():
# TODO: Add tests for ValueError (maybe use pytest)
a = EntryInfo(True, True, "words", "key", "title", "value")
assert EntryInfo.from_string(" *!words-key;title=value ") == a
assert EntryInfo.from_string(" * ! words - key ; title = value ") == a
b = EntryInfo(False, False, "words", "key", "key", "")
assert EntryInfo.from_string("words-key;=") == b
assert EntryInfo.from_string("w-key;=") == b
assert EntryInfo.from_string("word-key;=") == b
assert EntryInfo.from_string("text-key;=") == b
def test_entry_str():
entry = EntryInfo(True, True, "words", "key", "title", "value")
assert EntryInfo.from_string(str(entry)) == entry
line = "*!words-key;title=value"
assert str(entry) == line
assert str(EntryInfo.from_string(line)) == line
| 0.751101 | 0.316079 |
import argparse
import asyncio
import fcntl
import json
import logging
import os
import pty
import shlex
import signal
import struct
import sys
import termios
import traceback
import zmq, zmq.asyncio
from .compat import current_loop
from .logging import BraceStyleAdapter
from .utils import safe_close_task
log = BraceStyleAdapter(logging.getLogger())
class Terminal:
'''
A wrapper for a terminal-based app.
'''
def __init__(self, shell_cmd, ev_term, sock_out, *,
auto_restart=True, loop=None):
self._sorna_media = []
self.loop = loop if loop else current_loop()
self.zctx = sock_out.context
self.ev_term = ev_term
self.pid = None
self.fd = None
self.shell_cmd = shell_cmd
self.auto_restart = auto_restart
# For command output
self.sock_out = sock_out
# For terminal I/O
self.sock_term_in = None
self.sock_term_out = None
self.term_in_task = None
self.term_out_task = None
self.start_lock = asyncio.Lock(loop=self.loop)
self.accept_term_input = False
self.cmdparser = argparse.ArgumentParser()
self.subparsers = self.cmdparser.add_subparsers()
# Base commands for generic terminal-based app
parser_ping = self.subparsers.add_parser('ping')
parser_ping.set_defaults(func=self.do_ping)
parser_resize = self.subparsers.add_parser('resize')
parser_resize.add_argument('rows', type=int)
parser_resize.add_argument('cols', type=int)
parser_resize.set_defaults(func=self.do_resize_term)
async def do_ping(self, args) -> int:
await self.sock_out.send_multipart([b'stdout', b'pong!'])
return 0
async def do_resize_term(self, args) -> int:
if self.fd is None:
return
origsz = struct.pack('HHHH', 0, 0, 0, 0)
origsz = fcntl.ioctl(self.fd, termios.TIOCGWINSZ, origsz)
_, _, origx, origy = struct.unpack('HHHH', origsz)
newsz = struct.pack('HHHH', args.rows, args.cols, origx, origy)
newsz = fcntl.ioctl(self.fd, termios.TIOCSWINSZ, newsz)
newr, newc, _, _ = struct.unpack('HHHH', newsz)
await self.sock_out.send_multipart([
b'stdout',
f'OK; terminal resized to {newr} rows and {newc} cols'.encode(),
])
return 0
async def handle_command(self, code_txt) -> int:
try:
if code_txt.startswith('%'):
args = self.cmdparser.parse_args(
shlex.split(code_txt[1:], comments=True))
if asyncio.iscoroutine(args.func) or \
asyncio.iscoroutinefunction(args.func):
return await args.func(args)
else:
return args.func(args)
else:
await self.sock_out.send_multipart([b'stderr', b'Invalid command.'])
return 127
except:
exc_type, exc_val, tb = sys.exc_info()
trace = traceback.format_exception(exc_type, exc_val, tb)
await self.sock_out.send_multipart([b'stderr', trace.encode()])
return 1
finally:
opts = {
'upload_output_files': False,
}
body = json.dumps(opts).encode()
await self.sock_out.send_multipart([b'finished', body])
async def start(self):
assert not self.accept_term_input
await safe_close_task(self.term_in_task)
await safe_close_task(self.term_out_task)
pid, fd = pty.fork()
if pid == 0:
args = shlex.split(self.shell_cmd)
os.execv(args[0], args)
else:
self.pid = pid
self.fd = fd
if self.sock_term_in is None:
self.sock_term_in = self.zctx.socket(zmq.SUB)
self.sock_term_in.bind('tcp://*:2002')
self.sock_term_in.subscribe(b'')
if self.sock_term_out is None:
self.sock_term_out = self.zctx.socket(zmq.PUB)
self.sock_term_out.bind('tcp://*:2003')
term_reader = asyncio.StreamReader()
term_read_protocol = asyncio.StreamReaderProtocol(term_reader)
await self.loop.connect_read_pipe(
lambda: term_read_protocol, os.fdopen(self.fd, 'rb'))
_reader_factory = lambda: asyncio.StreamReaderProtocol(
asyncio.StreamReader())
term_writer_transport, term_writer_protocol = \
await self.loop.connect_write_pipe(_reader_factory,
os.fdopen(self.fd, 'wb'))
term_writer = asyncio.StreamWriter(term_writer_transport,
term_writer_protocol,
None, self.loop)
self.term_in_task = self.loop.create_task(self.term_in(term_writer))
self.term_out_task = self.loop.create_task(self.term_out(term_reader)) # noqa
self.accept_term_input = True
await asyncio.sleep(0)
async def restart(self):
try:
async with self.start_lock:
if not self.accept_term_input:
return
self.accept_term_input = False
await self.sock_term_out.send_multipart([b'Restarting...\r\n'])
os.waitpid(self.pid, 0)
await self.start()
except Exception:
log.exception('Unexpected error during restart of terminal')
async def term_in(self, term_writer):
try:
while True:
data = await self.sock_term_in.recv_multipart()
if not data:
break
if self.accept_term_input:
try:
term_writer.write(data[0])
await term_writer.drain()
except IOError:
break
except asyncio.CancelledError:
pass
except Exception:
log.exception('Unexpected error at term_in()')
async def term_out(self, term_reader):
try:
while not term_reader.at_eof():
try:
data = await term_reader.read(4096)
except IOError:
# In docker containers, this path is taken.
break
if not data:
# In macOS, this path is taken.
break
await self.sock_term_out.send_multipart([data])
self.fd = None
if not self.auto_restart:
await self.sock_term_out.send_multipart([b'Terminated.\r\n'])
return
if not self.ev_term.is_set() and self.accept_term_input:
self.loop.create_task(self.restart())
except asyncio.CancelledError:
pass
except Exception:
log.exception('Unexpected error at term_out()')
async def shutdown(self):
self.term_in_task.cancel()
self.term_out_task.cancel()
await self.term_in_task
await self.term_out_task
self.sock_term_in.close()
self.sock_term_out.close()
os.kill(self.pid, signal.SIGHUP)
os.kill(self.pid, signal.SIGCONT)
await asyncio.sleep(0)
os.waitpid(self.pid, 0)
self.pid = None
self.fd = None
|
src/ai/backend/kernel/terminal.py
|
import argparse
import asyncio
import fcntl
import json
import logging
import os
import pty
import shlex
import signal
import struct
import sys
import termios
import traceback
import zmq, zmq.asyncio
from .compat import current_loop
from .logging import BraceStyleAdapter
from .utils import safe_close_task
log = BraceStyleAdapter(logging.getLogger())
class Terminal:
'''
A wrapper for a terminal-based app.
'''
def __init__(self, shell_cmd, ev_term, sock_out, *,
auto_restart=True, loop=None):
self._sorna_media = []
self.loop = loop if loop else current_loop()
self.zctx = sock_out.context
self.ev_term = ev_term
self.pid = None
self.fd = None
self.shell_cmd = shell_cmd
self.auto_restart = auto_restart
# For command output
self.sock_out = sock_out
# For terminal I/O
self.sock_term_in = None
self.sock_term_out = None
self.term_in_task = None
self.term_out_task = None
self.start_lock = asyncio.Lock(loop=self.loop)
self.accept_term_input = False
self.cmdparser = argparse.ArgumentParser()
self.subparsers = self.cmdparser.add_subparsers()
# Base commands for generic terminal-based app
parser_ping = self.subparsers.add_parser('ping')
parser_ping.set_defaults(func=self.do_ping)
parser_resize = self.subparsers.add_parser('resize')
parser_resize.add_argument('rows', type=int)
parser_resize.add_argument('cols', type=int)
parser_resize.set_defaults(func=self.do_resize_term)
async def do_ping(self, args) -> int:
await self.sock_out.send_multipart([b'stdout', b'pong!'])
return 0
async def do_resize_term(self, args) -> int:
if self.fd is None:
return
origsz = struct.pack('HHHH', 0, 0, 0, 0)
origsz = fcntl.ioctl(self.fd, termios.TIOCGWINSZ, origsz)
_, _, origx, origy = struct.unpack('HHHH', origsz)
newsz = struct.pack('HHHH', args.rows, args.cols, origx, origy)
newsz = fcntl.ioctl(self.fd, termios.TIOCSWINSZ, newsz)
newr, newc, _, _ = struct.unpack('HHHH', newsz)
await self.sock_out.send_multipart([
b'stdout',
f'OK; terminal resized to {newr} rows and {newc} cols'.encode(),
])
return 0
async def handle_command(self, code_txt) -> int:
try:
if code_txt.startswith('%'):
args = self.cmdparser.parse_args(
shlex.split(code_txt[1:], comments=True))
if asyncio.iscoroutine(args.func) or \
asyncio.iscoroutinefunction(args.func):
return await args.func(args)
else:
return args.func(args)
else:
await self.sock_out.send_multipart([b'stderr', b'Invalid command.'])
return 127
except:
exc_type, exc_val, tb = sys.exc_info()
trace = traceback.format_exception(exc_type, exc_val, tb)
await self.sock_out.send_multipart([b'stderr', trace.encode()])
return 1
finally:
opts = {
'upload_output_files': False,
}
body = json.dumps(opts).encode()
await self.sock_out.send_multipart([b'finished', body])
async def start(self):
assert not self.accept_term_input
await safe_close_task(self.term_in_task)
await safe_close_task(self.term_out_task)
pid, fd = pty.fork()
if pid == 0:
args = shlex.split(self.shell_cmd)
os.execv(args[0], args)
else:
self.pid = pid
self.fd = fd
if self.sock_term_in is None:
self.sock_term_in = self.zctx.socket(zmq.SUB)
self.sock_term_in.bind('tcp://*:2002')
self.sock_term_in.subscribe(b'')
if self.sock_term_out is None:
self.sock_term_out = self.zctx.socket(zmq.PUB)
self.sock_term_out.bind('tcp://*:2003')
term_reader = asyncio.StreamReader()
term_read_protocol = asyncio.StreamReaderProtocol(term_reader)
await self.loop.connect_read_pipe(
lambda: term_read_protocol, os.fdopen(self.fd, 'rb'))
_reader_factory = lambda: asyncio.StreamReaderProtocol(
asyncio.StreamReader())
term_writer_transport, term_writer_protocol = \
await self.loop.connect_write_pipe(_reader_factory,
os.fdopen(self.fd, 'wb'))
term_writer = asyncio.StreamWriter(term_writer_transport,
term_writer_protocol,
None, self.loop)
self.term_in_task = self.loop.create_task(self.term_in(term_writer))
self.term_out_task = self.loop.create_task(self.term_out(term_reader)) # noqa
self.accept_term_input = True
await asyncio.sleep(0)
async def restart(self):
try:
async with self.start_lock:
if not self.accept_term_input:
return
self.accept_term_input = False
await self.sock_term_out.send_multipart([b'Restarting...\r\n'])
os.waitpid(self.pid, 0)
await self.start()
except Exception:
log.exception('Unexpected error during restart of terminal')
async def term_in(self, term_writer):
try:
while True:
data = await self.sock_term_in.recv_multipart()
if not data:
break
if self.accept_term_input:
try:
term_writer.write(data[0])
await term_writer.drain()
except IOError:
break
except asyncio.CancelledError:
pass
except Exception:
log.exception('Unexpected error at term_in()')
async def term_out(self, term_reader):
try:
while not term_reader.at_eof():
try:
data = await term_reader.read(4096)
except IOError:
# In docker containers, this path is taken.
break
if not data:
# In macOS, this path is taken.
break
await self.sock_term_out.send_multipart([data])
self.fd = None
if not self.auto_restart:
await self.sock_term_out.send_multipart([b'Terminated.\r\n'])
return
if not self.ev_term.is_set() and self.accept_term_input:
self.loop.create_task(self.restart())
except asyncio.CancelledError:
pass
except Exception:
log.exception('Unexpected error at term_out()')
async def shutdown(self):
self.term_in_task.cancel()
self.term_out_task.cancel()
await self.term_in_task
await self.term_out_task
self.sock_term_in.close()
self.sock_term_out.close()
os.kill(self.pid, signal.SIGHUP)
os.kill(self.pid, signal.SIGCONT)
await asyncio.sleep(0)
os.waitpid(self.pid, 0)
self.pid = None
self.fd = None
| 0.218669 | 0.072276 |
from consolemenu import ConsoleMenu
from consolemenu.items import SelectionItem
class SelectionMenu(ConsoleMenu):
"""
A menu that simplifies item creation, just give it a list of strings and it builds the menu for you
Args:
strings (:obj:`list` of :obj:`str`): The list of strings this menu should be built from.
title (str): The title of the menu.
subtitle (str): The subtitle of the menu.
screen (:obj:`consolemenu.screen.Screen`): The screen object associated with this menu.
formatter (:obj:`MenuFormatBuilder`): The MenuFormatBuilder instance used to format this menu.
prologue_text (str): Text to include in the "prologue" section of the menu.
epilogue_text (str): Text to include in the "epilogue" section of the menu.
show_exit_option (bool): Specifies whether this menu should show an exit item by default. Defaults to True.
Can be overridden when the menu is started.
exit_option_text (str): Text for the Exit menu item. Defaults to 'Exit'.
clear_screen (bool): Set to False to disable clearing of screen between menus
"""
def __init__(self, strings, title=None, subtitle=None, screen=None, formatter=None,
prologue_text=None, epilogue_text=None, show_exit_option=True, exit_option_text='Exit',
clear_screen=True):
super(SelectionMenu, self).__init__(title, subtitle, screen=screen, formatter=formatter,
prologue_text=prologue_text, epilogue_text=epilogue_text,
show_exit_option=show_exit_option, exit_option_text=exit_option_text,
clear_screen=clear_screen)
for index, item in enumerate(strings):
self.append_item(SelectionItem(item, index, self))
@classmethod
def get_selection(cls, strings, title="Select an option", subtitle=None, show_exit_option=True, _menu=None):
"""
Single-method way of getting a selection out of a list of strings.
Args:
strings (:obj:`list` of :obj:`str`): The list of strings this menu should be built from.
title (str): The title of the menu.
subtitle (str): The subtitle of the menu.
show_exit_option (bool): Specifies whether this menu should show an exit item by default. Defaults to True.
_menu: Should probably only be used for testing, pass in a list and the created menu used internally by
the method will be appended to it
Returns:
int: The index of the selected option.
"""
menu = cls(strings, title, subtitle, show_exit_option=show_exit_option)
if _menu is not None:
_menu.append(menu)
menu.show()
menu.join()
return menu.selected_option
def append_string(self, string):
self.append_item(SelectionItem(string))
|
consolemenu/selection_menu.py
|
from consolemenu import ConsoleMenu
from consolemenu.items import SelectionItem
class SelectionMenu(ConsoleMenu):
"""
A menu that simplifies item creation, just give it a list of strings and it builds the menu for you
Args:
strings (:obj:`list` of :obj:`str`): The list of strings this menu should be built from.
title (str): The title of the menu.
subtitle (str): The subtitle of the menu.
screen (:obj:`consolemenu.screen.Screen`): The screen object associated with this menu.
formatter (:obj:`MenuFormatBuilder`): The MenuFormatBuilder instance used to format this menu.
prologue_text (str): Text to include in the "prologue" section of the menu.
epilogue_text (str): Text to include in the "epilogue" section of the menu.
show_exit_option (bool): Specifies whether this menu should show an exit item by default. Defaults to True.
Can be overridden when the menu is started.
exit_option_text (str): Text for the Exit menu item. Defaults to 'Exit'.
clear_screen (bool): Set to False to disable clearing of screen between menus
"""
def __init__(self, strings, title=None, subtitle=None, screen=None, formatter=None,
prologue_text=None, epilogue_text=None, show_exit_option=True, exit_option_text='Exit',
clear_screen=True):
super(SelectionMenu, self).__init__(title, subtitle, screen=screen, formatter=formatter,
prologue_text=prologue_text, epilogue_text=epilogue_text,
show_exit_option=show_exit_option, exit_option_text=exit_option_text,
clear_screen=clear_screen)
for index, item in enumerate(strings):
self.append_item(SelectionItem(item, index, self))
@classmethod
def get_selection(cls, strings, title="Select an option", subtitle=None, show_exit_option=True, _menu=None):
"""
Single-method way of getting a selection out of a list of strings.
Args:
strings (:obj:`list` of :obj:`str`): The list of strings this menu should be built from.
title (str): The title of the menu.
subtitle (str): The subtitle of the menu.
show_exit_option (bool): Specifies whether this menu should show an exit item by default. Defaults to True.
_menu: Should probably only be used for testing, pass in a list and the created menu used internally by
the method will be appended to it
Returns:
int: The index of the selected option.
"""
menu = cls(strings, title, subtitle, show_exit_option=show_exit_option)
if _menu is not None:
_menu.append(menu)
menu.show()
menu.join()
return menu.selected_option
def append_string(self, string):
self.append_item(SelectionItem(string))
| 0.549882 | 0.09899 |
import os
import sys
import re
import subprocess
import plistlib
import shutil
CLT_BINARY = os.path.dirname(os.path.realpath(__file__)) + '/platypus'
def profile_plist_for_args(args):
pnargs = [CLT_BINARY]
pnargs.extend(args)
pnargs.extend(['-O', '-'])
out = subprocess.check_output(pnargs)
return plistlib.readPlistFromString(out)
def create_app_with_args(args, name='MyApp'):
pnargs = [CLT_BINARY]
pnargs.extend(args)
pnargs.extend(['--overwrite', '--name', name, 'args.py', name + '.app'])
with open(os.devnull, 'w') as devnull:
out = subprocess.check_output(pnargs, stderr=devnull)
return 'MyApp.app'
def create_profile_with_args(args, name='dummy.profile'):
pass
def run_app(name='MyApp', args=[]):
with open(os.devnull, 'w') as devnull:
cmd = ['./' + name + '.app/Contents/MacOS/' + name]
cmd.extend(args)
out = subprocess.check_output(cmd, stderr=devnull)
with open('args.txt', 'r') as f:
arglist = [l.rstrip('\n') for l in f.readlines()]
return arglist
os.chdir(os.path.dirname(os.path.realpath(__file__)))
print("Checking basic sanity of default profile")
plist = profile_plist_for_args([])
assert(plist['Version'] == '1.0')
assert(plist['InterpreterPath'] == '/bin/sh')
assert(plist['InterfaceType'] == 'Text Window')
assert(len(plist['BundledFiles']) == 0)
assert(plist['Authentication'] == False)
assert(plist['Name'] != '')
assert(re.match('\w+\.\w+\.\w+', plist['Identifier']))
print("Profile generation: Testing boolean switches")
boolean_opts = {
'-A': 'Authentication',
'-D': ['Droppable', 'AcceptsFiles'],
'-F': 'AcceptsText',
'-N': 'DeclareService',
'-B': 'RunInBackground',
'-Z': 'PromptForFileOnLaunch',
'-c': 'StatusItemUseSystemFont',
'-d': 'DevelopmentVersion',
'-l': 'OptimizeApplication',
'-y': 'Overwrite'
}
for k,v in boolean_opts.iteritems():
plist = profile_plist_for_args([k])
l = v
if isinstance(v, basestring):
l = [v]
for m in l:
assert(plist[m] == True)
inv_boolean_opts = {
'-R': 'RemainRunning'
}
for k,v in inv_boolean_opts.iteritems():
plist = profile_plist_for_args([k])
assert(plist[v] == False)
print("Profile generation: Testing strings")
string_opts = {
'-a': ['Name', 'MyAppName'],
'-o': ['InterfaceType', 'Progress Bar'],
'-p': ['InterpreterPath', '/usr/bin/perl'],
'-V': ['Version', '3.2'],
'-u': ['Author', '<NAME>'],
'-I': ['Identifier', 'org.something.Blergh'],
'-b': ['TextBackground', '#000000'],
'-g': ['TextForeground', '#ffeeee'],
# '-n': ['TextFont', 'Comic Sans 13'],
'-K': ['StatusItemDisplayType', 'Icon'],
'-Y': ['StatusItemTitle', 'MySillyTitle'],
}
for k,v in string_opts.iteritems():
plist = profile_plist_for_args([k, v[1]])
assert(plist[v[0]] == v[1])
print("Profile generation: Testing data args")
dummy_icon_path = os.path.abspath('dummy.icns')
data_opts = {
'-i': ['IconPath', dummy_icon_path],
'-Q': ['DocIconPath', dummy_icon_path],
'-L': ['StatusItemIcon', dummy_icon_path]
}
for k,v in data_opts.iteritems():
plist = profile_plist_for_args([k, v[1]])
# print plist[v[0]]
assert(plist[v[0]] != None)
print("Profile generation: Testing flags w. multiple args")
# Create dummy bundled files
open('dummy1', 'w').close()
open('dummy2', 'w').close()
multiple_items_opts = {
'-G': ['InterpreterArgs', ['-a','-b','-c']],
'-C': ['ScriptArgs', ['-e','-f','-g']],
'-f': ['BundledFiles', [os.path.abspath('dummy1'),os.path.abspath('dummy2')]],
'-X': ['Suffixes', ['txt','png','pdf']],
'-T': ['UniformTypes', ['public.text', 'public.rtf']],
'-U': ['URISchemes', ['https', 'ssh']]
}
for k,v in multiple_items_opts.iteritems():
plist = profile_plist_for_args([k, '|'.join(v[1])])
items = plist[v[0]]
#print items
for i in items:
assert(i in v[1])
os.remove('dummy1')
os.remove('dummy2')
print("Verifying app directory structure and permissions")
app_path = create_app_with_args(['-R'])
files = [
app_path + '/',
app_path + '/Contents',
app_path + '/Contents/Info.plist',
app_path + '/Contents/MacOS',
app_path + '/Contents/MacOS/MyApp',
app_path + '/Contents/Resources',
app_path + '/Contents/Resources/AppIcon.icns',
app_path + '/Contents/Resources/AppSettings.plist',
app_path + '/Contents/Resources/MainMenu.nib',
app_path + '/Contents/Resources/script'
]
for p in files:
assert(os.path.exists(p))
assert(os.access(files[4], os.X_OK)) # app binary
assert(os.access(files[9], os.X_OK)) # script
# Verify keys in AppSettings.plist
# Create new app from python, perl scripts, verify
# that correct interpreter is automatically selected
# Run app
print("Verifying app argument handling")
assert(run_app(args=['a', 'b', 'c']) == ['a', 'b', 'c'])
# Create app with droppable settings, test opening file
#shutil.rmtree('MyApp.app')
|
Tests/clt_tests.py
|
import os
import sys
import re
import subprocess
import plistlib
import shutil
CLT_BINARY = os.path.dirname(os.path.realpath(__file__)) + '/platypus'
def profile_plist_for_args(args):
pnargs = [CLT_BINARY]
pnargs.extend(args)
pnargs.extend(['-O', '-'])
out = subprocess.check_output(pnargs)
return plistlib.readPlistFromString(out)
def create_app_with_args(args, name='MyApp'):
pnargs = [CLT_BINARY]
pnargs.extend(args)
pnargs.extend(['--overwrite', '--name', name, 'args.py', name + '.app'])
with open(os.devnull, 'w') as devnull:
out = subprocess.check_output(pnargs, stderr=devnull)
return 'MyApp.app'
def create_profile_with_args(args, name='dummy.profile'):
pass
def run_app(name='MyApp', args=[]):
with open(os.devnull, 'w') as devnull:
cmd = ['./' + name + '.app/Contents/MacOS/' + name]
cmd.extend(args)
out = subprocess.check_output(cmd, stderr=devnull)
with open('args.txt', 'r') as f:
arglist = [l.rstrip('\n') for l in f.readlines()]
return arglist
os.chdir(os.path.dirname(os.path.realpath(__file__)))
print("Checking basic sanity of default profile")
plist = profile_plist_for_args([])
assert(plist['Version'] == '1.0')
assert(plist['InterpreterPath'] == '/bin/sh')
assert(plist['InterfaceType'] == 'Text Window')
assert(len(plist['BundledFiles']) == 0)
assert(plist['Authentication'] == False)
assert(plist['Name'] != '')
assert(re.match('\w+\.\w+\.\w+', plist['Identifier']))
print("Profile generation: Testing boolean switches")
boolean_opts = {
'-A': 'Authentication',
'-D': ['Droppable', 'AcceptsFiles'],
'-F': 'AcceptsText',
'-N': 'DeclareService',
'-B': 'RunInBackground',
'-Z': 'PromptForFileOnLaunch',
'-c': 'StatusItemUseSystemFont',
'-d': 'DevelopmentVersion',
'-l': 'OptimizeApplication',
'-y': 'Overwrite'
}
for k,v in boolean_opts.iteritems():
plist = profile_plist_for_args([k])
l = v
if isinstance(v, basestring):
l = [v]
for m in l:
assert(plist[m] == True)
inv_boolean_opts = {
'-R': 'RemainRunning'
}
for k,v in inv_boolean_opts.iteritems():
plist = profile_plist_for_args([k])
assert(plist[v] == False)
print("Profile generation: Testing strings")
string_opts = {
'-a': ['Name', 'MyAppName'],
'-o': ['InterfaceType', 'Progress Bar'],
'-p': ['InterpreterPath', '/usr/bin/perl'],
'-V': ['Version', '3.2'],
'-u': ['Author', '<NAME>'],
'-I': ['Identifier', 'org.something.Blergh'],
'-b': ['TextBackground', '#000000'],
'-g': ['TextForeground', '#ffeeee'],
# '-n': ['TextFont', 'Comic Sans 13'],
'-K': ['StatusItemDisplayType', 'Icon'],
'-Y': ['StatusItemTitle', 'MySillyTitle'],
}
for k,v in string_opts.iteritems():
plist = profile_plist_for_args([k, v[1]])
assert(plist[v[0]] == v[1])
print("Profile generation: Testing data args")
dummy_icon_path = os.path.abspath('dummy.icns')
data_opts = {
'-i': ['IconPath', dummy_icon_path],
'-Q': ['DocIconPath', dummy_icon_path],
'-L': ['StatusItemIcon', dummy_icon_path]
}
for k,v in data_opts.iteritems():
plist = profile_plist_for_args([k, v[1]])
# print plist[v[0]]
assert(plist[v[0]] != None)
print("Profile generation: Testing flags w. multiple args")
# Create dummy bundled files
open('dummy1', 'w').close()
open('dummy2', 'w').close()
multiple_items_opts = {
'-G': ['InterpreterArgs', ['-a','-b','-c']],
'-C': ['ScriptArgs', ['-e','-f','-g']],
'-f': ['BundledFiles', [os.path.abspath('dummy1'),os.path.abspath('dummy2')]],
'-X': ['Suffixes', ['txt','png','pdf']],
'-T': ['UniformTypes', ['public.text', 'public.rtf']],
'-U': ['URISchemes', ['https', 'ssh']]
}
for k,v in multiple_items_opts.iteritems():
plist = profile_plist_for_args([k, '|'.join(v[1])])
items = plist[v[0]]
#print items
for i in items:
assert(i in v[1])
os.remove('dummy1')
os.remove('dummy2')
print("Verifying app directory structure and permissions")
app_path = create_app_with_args(['-R'])
files = [
app_path + '/',
app_path + '/Contents',
app_path + '/Contents/Info.plist',
app_path + '/Contents/MacOS',
app_path + '/Contents/MacOS/MyApp',
app_path + '/Contents/Resources',
app_path + '/Contents/Resources/AppIcon.icns',
app_path + '/Contents/Resources/AppSettings.plist',
app_path + '/Contents/Resources/MainMenu.nib',
app_path + '/Contents/Resources/script'
]
for p in files:
assert(os.path.exists(p))
assert(os.access(files[4], os.X_OK)) # app binary
assert(os.access(files[9], os.X_OK)) # script
# Verify keys in AppSettings.plist
# Create new app from python, perl scripts, verify
# that correct interpreter is automatically selected
# Run app
print("Verifying app argument handling")
assert(run_app(args=['a', 'b', 'c']) == ['a', 'b', 'c'])
# Create app with droppable settings, test opening file
#shutil.rmtree('MyApp.app')
| 0.151686 | 0.187411 |
import os
import matplotlib.pyplot as plt
import numpy as np
import plotly.express as px
import pandas as pd
import seaborn as sns
from dotenv import find_dotenv, load_dotenv
from IPython.core.interactiveshell import InteractiveShell
# Setting styles
InteractiveShell.ast_node_interactivity = "all"
sns.set(style="whitegrid", color_codes=True, rc={"figure.figsize": (12.7, 9.27)})
# %% load data
df = pd.read_csv(os.path.join("data", "processed", "bhci.csv"))
# %%
df.head()
# %%
df.isna().sum()
# %%
df_all_eth_sex = df[(df["Race/Ethnicity"] == "All") & (df["Sex"] == "Both")].copy()
df_all_eth_sex.drop(columns=["Race/Ethnicity", "Sex"], inplace=True)
# %%
df_all_eth_sex.isna().sum()
# %%
indicators = [
"AIDS Diagnoses Rate (Per 100,000 people)",
"All Types of Cancer Mortality Rate (Age-Adjusted; Per 100,000 people)",
"All-Cause Mortality Rate (Age-Adjusted; Per 100,000 people)",
"Asthma Emergency Department Visit Rate (Age-Adjusted; Per 10,000)",
"Bike Score",
"Chlamydia Rate (Per 100,000 People)",
"Congenital Syphilis Rate (Per 100,000 Live Births)",
"Diabetes Mortality Rate (Age-Adjusted; Per 100,000 people)",
"Female Breast Cancer Mortality Rate (Age-Adjusted; Per 100,000 people)",
"Firearm-Related Emergency Department Visit Rate (Age-Adjusted; Per 10,000 people)",
"Firearm-Related Mortality Rate (Age-Adjusted; Per 100,000 people)",
"Gonorrhea Rate (Per 100,000 People)",
"HIV Diagnoses Rate (Per 100,000 people)",
"HIV-Related Mortality Rate (Age-Adjusted; Per 100,000 people)",
"Heart Disease Mortality Rate (Age-Adjusted; Per 100,000 people)",
"Homicide Rate (Age-Adjusted; Per 100,000 people)",
"Infant Mortality Rate (Per 1,000 live births)",
"Life Expectancy at Birth (Years)",
"Lung Cancer Mortality Rate (Age-Adjusted; Per 100,000 people)",
"Median Household Income (Dollars)",
"Motor Vehicle Mortality Rate (Age-Adjusted; Per 100,000 people)",
"Opioid-Related Unintentional Drug Overdose Mortality Rate (Age-Adjusted; Per 100,000 people)",
"Percent Foreign Born",
"Percent Living Below 200% Poverty Level",
"Percent Unemployed",
"Percent Who Only Speak English at Home",
"Percent Who Speak Spanish at Home",
"Percent of 3 and 4 Year Olds Currently Enrolled in Preschool",
"Percent of Adults 65 and Over Who Received Pneumonia Vaccine",
"Percent of Adults Who Are Obese",
"Percent of Adults Who Binge Drank",
"Percent of Adults Who Currently Smoke",
"Percent of Adults Who Meet CDC-Recommended Physical Activity Levels",
"Percent of Adults Who Received Seasonal Flu Shot",
"Percent of Children (Tested) Under Age 6 with Elevated Blood Lead Levels",
"Percent of Children Living in Poverty",
"Percent of Children Who Received Seasonal Flu Shot",
"Percent of High School Graduates (Over Age 18)",
"Percent of High School Students Who Are Obese",
"Percent of High School Students Who Binge Drank",
"Percent of High School Students Who Currently Smoke",
"Percent of High School Students Who Meet CDC-Recommended Physical Activity Levels",
"Percent of Households Whose Housing Costs Exceed 35% of Income",
"Percent of Low Birth Weight Babies Born",
"Percent of Mothers Under Age 20",
"Percent of Population 65 and Over",
"Percent of Population Under 18",
"Percent of Population Uninsured",
"Percent of Population with a Disability",
"Persons Living with HIV/AIDS Rate (Per 100,000 people)",
"Pneumonia and Influenza Mortality Rate (Age-Adjusted; Per 100,000 people)",
"Primary and Secondary Syphilis Rate (Per 100,000 People)",
"Race/Ethnicity (Percent)",
"Rate of Laboratory Confirmed Infections Caused by Salmonella (Per 100,000 people)",
"Rate of Laboratory Confirmed Infections Caused by Shiga Toxin-Producing E-Coli (Per 100,000 people)",
"Sex (Percent)",
"Suicide Rate (Age-Adjusted; Per 100,000 people)",
"Total Population (People)",
"Transit Score",
"Tuberculosis Incidence Rate (Per 100,000 people)",
"Walkability",
]
# %% initial exploration
for indicator in indicators:
sns.lineplot(x="Year", y=indicator, hue="Place", data=df_all_eth_sex)
plt.title(indicator)
plt.show()
# %% Opioids
fig = px.line(
df_all_eth_sex,
x="Year",
y="Opioid-Related Unintentional Drug Overdose Mortality Rate (Age-Adjusted; Per 100,000 people)",
color="Place",
)
fig.show()
# %%
|
notebooks/data_exploraton.py
|
import os
import matplotlib.pyplot as plt
import numpy as np
import plotly.express as px
import pandas as pd
import seaborn as sns
from dotenv import find_dotenv, load_dotenv
from IPython.core.interactiveshell import InteractiveShell
# Setting styles
InteractiveShell.ast_node_interactivity = "all"
sns.set(style="whitegrid", color_codes=True, rc={"figure.figsize": (12.7, 9.27)})
# %% load data
df = pd.read_csv(os.path.join("data", "processed", "bhci.csv"))
# %%
df.head()
# %%
df.isna().sum()
# %%
df_all_eth_sex = df[(df["Race/Ethnicity"] == "All") & (df["Sex"] == "Both")].copy()
df_all_eth_sex.drop(columns=["Race/Ethnicity", "Sex"], inplace=True)
# %%
df_all_eth_sex.isna().sum()
# %%
indicators = [
"AIDS Diagnoses Rate (Per 100,000 people)",
"All Types of Cancer Mortality Rate (Age-Adjusted; Per 100,000 people)",
"All-Cause Mortality Rate (Age-Adjusted; Per 100,000 people)",
"Asthma Emergency Department Visit Rate (Age-Adjusted; Per 10,000)",
"Bike Score",
"Chlamydia Rate (Per 100,000 People)",
"Congenital Syphilis Rate (Per 100,000 Live Births)",
"Diabetes Mortality Rate (Age-Adjusted; Per 100,000 people)",
"Female Breast Cancer Mortality Rate (Age-Adjusted; Per 100,000 people)",
"Firearm-Related Emergency Department Visit Rate (Age-Adjusted; Per 10,000 people)",
"Firearm-Related Mortality Rate (Age-Adjusted; Per 100,000 people)",
"Gonorrhea Rate (Per 100,000 People)",
"HIV Diagnoses Rate (Per 100,000 people)",
"HIV-Related Mortality Rate (Age-Adjusted; Per 100,000 people)",
"Heart Disease Mortality Rate (Age-Adjusted; Per 100,000 people)",
"Homicide Rate (Age-Adjusted; Per 100,000 people)",
"Infant Mortality Rate (Per 1,000 live births)",
"Life Expectancy at Birth (Years)",
"Lung Cancer Mortality Rate (Age-Adjusted; Per 100,000 people)",
"Median Household Income (Dollars)",
"Motor Vehicle Mortality Rate (Age-Adjusted; Per 100,000 people)",
"Opioid-Related Unintentional Drug Overdose Mortality Rate (Age-Adjusted; Per 100,000 people)",
"Percent Foreign Born",
"Percent Living Below 200% Poverty Level",
"Percent Unemployed",
"Percent Who Only Speak English at Home",
"Percent Who Speak Spanish at Home",
"Percent of 3 and 4 Year Olds Currently Enrolled in Preschool",
"Percent of Adults 65 and Over Who Received Pneumonia Vaccine",
"Percent of Adults Who Are Obese",
"Percent of Adults Who Binge Drank",
"Percent of Adults Who Currently Smoke",
"Percent of Adults Who Meet CDC-Recommended Physical Activity Levels",
"Percent of Adults Who Received Seasonal Flu Shot",
"Percent of Children (Tested) Under Age 6 with Elevated Blood Lead Levels",
"Percent of Children Living in Poverty",
"Percent of Children Who Received Seasonal Flu Shot",
"Percent of High School Graduates (Over Age 18)",
"Percent of High School Students Who Are Obese",
"Percent of High School Students Who Binge Drank",
"Percent of High School Students Who Currently Smoke",
"Percent of High School Students Who Meet CDC-Recommended Physical Activity Levels",
"Percent of Households Whose Housing Costs Exceed 35% of Income",
"Percent of Low Birth Weight Babies Born",
"Percent of Mothers Under Age 20",
"Percent of Population 65 and Over",
"Percent of Population Under 18",
"Percent of Population Uninsured",
"Percent of Population with a Disability",
"Persons Living with HIV/AIDS Rate (Per 100,000 people)",
"Pneumonia and Influenza Mortality Rate (Age-Adjusted; Per 100,000 people)",
"Primary and Secondary Syphilis Rate (Per 100,000 People)",
"Race/Ethnicity (Percent)",
"Rate of Laboratory Confirmed Infections Caused by Salmonella (Per 100,000 people)",
"Rate of Laboratory Confirmed Infections Caused by Shiga Toxin-Producing E-Coli (Per 100,000 people)",
"Sex (Percent)",
"Suicide Rate (Age-Adjusted; Per 100,000 people)",
"Total Population (People)",
"Transit Score",
"Tuberculosis Incidence Rate (Per 100,000 people)",
"Walkability",
]
# %% initial exploration
for indicator in indicators:
sns.lineplot(x="Year", y=indicator, hue="Place", data=df_all_eth_sex)
plt.title(indicator)
plt.show()
# %% Opioids
fig = px.line(
df_all_eth_sex,
x="Year",
y="Opioid-Related Unintentional Drug Overdose Mortality Rate (Age-Adjusted; Per 100,000 people)",
color="Place",
)
fig.show()
# %%
| 0.474875 | 0.448185 |
from __future__ import division
from past.builtins import cmp
from future import standard_library
standard_library.install_aliases()
from builtins import object
import errno
import logging
import math
import os
import posixpath
import random
import subprocess
import sys
from django.utils.encoding import smart_str
from desktop.lib import i18n
import hadoop.conf
from hadoop.fs import normpath, SEEK_SET, SEEK_CUR, SEEK_END
from hadoop.fs.exceptions import PermissionDeniedException
if sys.version_info[0] > 2:
from django.utils.encoding import force_str
from urllib.parse import urlsplit as lib_urlsplit
from django.utils.translation import gettext as _
else:
from django.utils.encoding import force_unicode as force_str
from urlparse import urlsplit as lib_urlsplit
from django.utils.translation import ugettext as _
LOG = logging.getLogger(__name__)
DEFAULT_USER = "webui"
# The number of bytes to read if not specified
DEFAULT_READ_SIZE = 1024*1024 # 1MB
# The buffer size of the pipe to hdfs -put during upload
WRITE_BUFFER_SIZE = 128*1024 # 128K
# Class that we translate into PermissionDeniedException
HADOOP_ACCESSCONTROLEXCEPTION = "org.apache.hadoop.security.AccessControlException"
# Timeout for thrift calls to NameNode
NN_THRIFT_TIMEOUT = 15
DN_THRIFT_TIMEOUT = 3
# Encoding used by HDFS namespace
HDFS_ENCODING = 'utf-8'
def encode_fs_path(path):
"""encode_fs_path(path) -> byte string in utf8"""
return smart_str(path, HDFS_ENCODING, errors='strict')
def decode_fs_path(path):
"""decode_fs_path(bytestring) -> unicode path"""
return force_str(path, HDFS_ENCODING, errors='strict')
def _coerce_exceptions(function):
"""
Decorator that causes exceptions thrown by the decorated function
to be coerced into generic exceptions from the hadoop.fs.exceptions
module.
"""
def wrapper(*args, **kwargs):
try:
return function(*args, **kwargs)
except Exception as e:
e.msg = force_str(e.msg, errors='replace')
e.stack = force_str(e.stack, errors='replace')
LOG.exception("Exception in Hadoop FS call " + function.__name__)
if e.clazz == HADOOP_ACCESSCONTROLEXCEPTION:
raise PermissionDeniedException(e.msg, e)
else:
raise
return wrapper
class Hdfs(object):
"""
An abstract HDFS proxy
"""
@staticmethod
def basename(path):
return posixpath.basename(path)
@staticmethod
def dirname(path):
return posixpath.dirname(path)
@staticmethod
def split(path):
return posixpath.split(path)
@staticmethod
def join(first, *comp_list):
return posixpath.join(first, *comp_list)
@staticmethod
def abspath(path):
return posixpath.abspath(path)
@staticmethod
def normpath(path):
res = posixpath.normpath(path)
# Python normpath() doesn't eliminate leading double slashes
if res.startswith('//'):
return res[1:]
return res
@staticmethod
def parent_path(path):
return Hdfs.join(path, "..")
@staticmethod
def urlsplit(url):
"""
Take an HDFS path (hdfs://nn:port/foo) or just (/foo) and split it into
the standard urlsplit's 5-tuple.
"""
i = url.find('://')
if i == -1:
# Not found. Treat the entire argument as an HDFS path
return ('hdfs', '', normpath(url), '', '')
schema = url[:i]
if schema not in ('hdfs', 'viewfs'):
# Default to standard for non-hdfs
return lib_urlsplit(url)
url = url[i+3:]
i = url.find('/')
if i == -1:
# Everything is netloc. Assume path is root.
return (schema, url, '/', '', '')
netloc = url[:i]
path = url[i:]
return (schema, netloc, normpath(path), '', '')
def listdir_recursive(self, path, glob=None):
"""
listdir_recursive(path, glob=None) -> [ entry names ]
Get directory entry names without stats, recursively.
"""
paths = [path]
while paths:
path = paths.pop()
if self.isdir(path):
hdfs_paths = self.listdir_stats(path, glob)
paths[:0] = [x.path for x in hdfs_paths]
yield path
def create_home_dir(self, home_path=None):
if home_path is None:
home_path = self.get_home_dir()
from hadoop.hdfs_site import get_umask_mode
from useradmin.conf import HOME_DIR_PERMISSIONS, USE_HOME_DIR_PERMISSIONS
from desktop.conf import DEFAULT_HDFS_SUPERUSER
mode = int(HOME_DIR_PERMISSIONS.get(), 8) if USE_HOME_DIR_PERMISSIONS.get() else (0o777 & (0o1777 ^ get_umask_mode()))
if not self.exists(home_path):
user = self.user
LOG.debug('superuser used for home directory creation: %s' % self.superuser)
try:
try:
self.setuser(DEFAULT_HDFS_SUPERUSER.get())
self.mkdir(home_path)
self.chmod(home_path, mode)
self.chown(home_path, user)
try: # Handle the case when there is no group with the same name as the user.
self.chown(home_path, group=user)
except IOError:
LOG.exception('Failed to change the group of "{}" to "{}" when creating a home directory '
'for user "{}"'.format(home_path, user, user))
except IOError:
msg = 'Failed to create home dir ("%s") as superuser %s' % (home_path, self.superuser)
LOG.exception(msg)
raise
finally:
self.setuser(user)
def copyFromLocal(self, local_src, remote_dst, mode=0o755):
remote_dst = remote_dst.endswith(posixpath.sep) and remote_dst[:-1] or remote_dst
local_src = local_src.endswith(posixpath.sep) and local_src[:-1] or local_src
if os.path.isdir(local_src):
self._copy_dir(local_src, remote_dst, mode)
else:
(basename, filename) = os.path.split(local_src)
self._copy_file(local_src, self.isdir(remote_dst) and self.join(remote_dst, filename) or remote_dst)
def _copy_dir(self, local_dir, remote_dir, mode=0o755):
self.mkdir(remote_dir, mode=mode)
for f in os.listdir(local_dir):
local_src = os.path.join(local_dir, f)
remote_dst = self.join(remote_dir, f)
if os.path.isdir(local_src):
self._copy_dir(local_src, remote_dst, mode)
else:
self._copy_file(local_src, remote_dst)
def _copy_file(self, local_src, remote_dst, chunk_size=1024 * 1024 * 64):
if os.path.isfile(local_src):
if self.exists(remote_dst):
LOG.info(_('%(remote_dst)s already exists. Skipping.') % {'remote_dst': remote_dst})
return
else:
LOG.info(_('%(remote_dst)s does not exist. Trying to copy.') % {'remote_dst': remote_dst})
src = file(local_src)
try:
try:
self.create(remote_dst, permission=0o755)
chunk = src.read(chunk_size)
while chunk:
self.append(remote_dst, chunk)
chunk = src.read(chunk_size)
LOG.info(_('Copied %s -> %s.') % (local_src, remote_dst))
except:
LOG.exception(_('Copying %s -> %s failed.') % (local_src, remote_dst))
raise
finally:
src.close()
else:
LOG.info(_('Skipping %s (not a file).') % local_src)
@_coerce_exceptions
def mktemp(self, subdir='', prefix='tmp', basedir=None):
"""
mktemp(prefix) -> <temp_dir or basedir>/<subdir>/prefix.<rand>
Return a unique temporary filename with prefix in the cluster's temp dir.
"""
RANDOM_BITS = 64
base = self.join(basedir or self._temp_dir, subdir)
if not self.isdir(base):
self.mkdir(base)
while True:
name = prefix + '.' + str(random.getrandbits(RANDOM_BITS))
candidate = self.join(base, name)
if not self.exists(candidate):
return candidate
def mkswap(self, filename, subdir='', suffix='swp', basedir=None):
"""
mkswap(filename, suffix) -> <temp_dir or basedir>/<subdir>/filename.<suffix>
Return a unique temporary filename with prefix in the cluster's temp dir.
"""
RANDOM_BITS = 64
base = self.join(basedir or self._temp_dir, subdir)
if not self.isdir(base):
self.mkdir(base)
candidate = self.join(base, "%s.%s" % (filename, suffix))
return candidate
def exists(self):
raise NotImplementedError(_("%(function)s has not been implemented.") % {'function': 'exists'})
def do_as_user(self):
raise NotImplementedError(_("%(function)s has not been implemented.") % {'function': 'do_as_user'})
def create(self):
raise NotImplementedError(_("%(function)s has not been implemented.") % {'function': 'exists'})
def append(self):
raise NotImplementedError(_("%(function)s has not been implemented.") % {'function': 'append'})
def mkdir(self):
raise NotImplementedError(_("%(function)s has not been implemented.") % {'function': 'mkdir'})
def isdir(self):
raise NotImplementedError(_("%(function)s has not been implemented.") % {'function': 'isdir'})
def listdir_stats(self):
raise NotImplementedError(_("%(function)s has not been implemented.") % {'function': 'listdir_stats'})
def require_open(func):
"""
Decorator that ensures that the file instance isn't closed when the
function is run.
"""
def wrapper(self, *args, **kwargs):
if self.closed:
raise IOError(errno.EBADF, "I/O operation on closed file")
return func(self, *args, **kwargs)
return wrapper
class File(object):
""" Represents an open file on HDFS. """
def __init__(self, fs, path, mode="r", buffering=False):
self.fs = fs
self.path = normpath(path)
self.pos = 0
self.closed = False
self._block_cache = BlockCache()
if buffering or mode != "r":
raise Exception("buffering and write support not yet implemented") # NYI
stat = self._stat()
if stat is None:
raise IOError(errno.ENOENT, "No such file or directory: '%s'" % path)
if stat.isDir:
raise IOError(errno.EISDIR, "Is a directory: '%s'" % path)
#TODO(todd) somehow we need to check permissions here - maybe we need an access() call?
# Minimal context manager implementation.
# See: http://www.python.org/doc/2.5.2/lib/typecontextmanager.html
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
return False # don't supress exceptions.
@require_open
def seek(self, offset, whence=0):
""" Set the file pointer to the given spot. @see file.seek """
if whence == SEEK_SET:
self.pos = offset
elif whence == SEEK_CUR:
self.pos += offset
elif whence == SEEK_END:
self.pos = self._stat().length + offset
else:
raise IOError(errno.EINVAL, "Invalid argument to seek for whence")
@require_open
def tell(self):
return self.pos
def _get_block(self, pos):
"""Return the Block instance that contains the given offset"""
cached_block = self._block_cache.find_block(pos)
if cached_block:
return cached_block
# Cache "miss" - fetch ahead 500MB worth of blocks
new_blocks = self.fs._get_blocks(self.path, pos, 500*1024*1024)
self._block_cache.insert_new_blocks(new_blocks)
result = self._block_cache.find_block(pos)
if not result:
raise IOError("No block for position %d in file %s" % (pos, self.path))
return result
@require_open
def _read_in_block(self, length=DEFAULT_READ_SIZE):
"""
Tries to read up to length bytes, but will often read fewer, since
a single call will not read across a block boundary.
"""
end_pos = min(self.pos + length, self._stat().length)
# If we're at EOF, return empty string
if end_pos == self.pos:
return ""
block = self._get_block(self.pos)
assert _block_contains_pos(block, self.pos)
assert block.path == self.path
in_block_pos = self.pos - block.startOffset
assert in_block_pos >= 0
in_block_len = min(length, block.numBytes - in_block_pos)
result = self.fs._read_block(block, in_block_pos, in_block_len)
self.pos += len(result)
assert self.pos <= end_pos
return result
@require_open
def read(self, length=DEFAULT_READ_SIZE):
"""
Read the given number of bytes from this file.
If EOF has been reached, returns the empty string.
@param length the number of bytes wanted
"""
result = []
read_so_far = 0
while read_so_far < length:
this_data = self._read_in_block(length - read_so_far)
if this_data == "": # eof
break
read_so_far += len(this_data)
result.append(this_data)
return "".join(result)
def close(self):
self.closed = True
def _stat(self):
if not hasattr(self, "_stat_cache"):
self._stat_cache = self.fs._hadoop_stat(self.path)
return self._stat_cache
class FileUpload(object):
"""A write-only file that supports no seeking and cannot exist prior to
opening.
"""
def __init__(self, fs, path, mode="w", block_size=None):
self.fs = fs
self.closed = False
assert mode == "w"
extra_confs = []
if block_size:
extra_confs.append("-Ddfs.block.size=%d" % block_size)
self.subprocess_cmd = [self.fs.hadoop_bin_path,
"jar",
hadoop.conf.SUDO_SHELL_JAR.get(),
self.fs.user,
"-Dfs.default.name=" + self.fs.uri] + \
extra_confs + \
["-put", "-", encode_fs_path(path)]
self.subprocess_env = i18n.make_utf8_env()
if 'HADOOP_CLASSPATH' in self.subprocess_env:
self.subprocess_env['HADOOP_CLASSPATH'] += ':' + hadoop.conf.HADOOP_EXTRA_CLASSPATH_STRING.get()
else:
self.subprocess_env['HADOOP_CLASSPATH'] = hadoop.conf.HADOOP_EXTRA_CLASSPATH_STRING.get()
if hadoop.conf.HADOOP_CONF_DIR.get():
self.subprocess_env['HADOOP_CONF_DIR'] = hadoop.conf.HADOOP_CONF_DIR.get()
self.path = path
self.putter = subprocess.Popen(self.subprocess_cmd,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=True,
env=self.subprocess_env,
bufsize=WRITE_BUFFER_SIZE)
@require_open
def write(self, data):
"""May raise IOError, particularly EPIPE"""
self.putter.stdin.write(data)
@require_open
def close(self):
try:
(stdout, stderr) = self.putter.communicate()
except IOError as ioe:
logging.debug("Saw IOError writing %r" % self.path, exc_info=1)
if ioe.errno == errno.EPIPE:
stdout, stderr = self.putter.communicate()
self.closed = True
if stderr:
LOG.warning("HDFS FileUpload (cmd='%s', env='%s') outputted stderr:\n%s" %
(repr(self.subprocess_cmd), repr(self.subprocess_env), stderr))
if stdout:
LOG.info("HDFS FileUpload (cmd='%s', env='%s') outputted stdout:\n%s" %
(repr(self.subprocess_cmd), repr(self.subprocess_env), stdout))
if self.putter.returncode != 0:
raise IOError("hdfs put returned bad code: %d\nstderr: %s" %
(self.putter.returncode, stderr))
LOG.info("Completed upload: %s" % repr(self.subprocess_cmd))
@require_open
def flush(self):
self.putter.stdin.flush()
def _block_contains_pos(block, pos):
return pos >= block.startOffset and pos < block.startOffset + block.numBytes
class BlockCache(object):
"""
A cache of block locations used by a single HDFS input file.
Essentially this keeps the blocks in sorted order and does
binary search to find the block that contains a given offset.
It also provides the ability to merge in the response of a NN
getBlocks response to the cache.
"""
def __init__(self):
self.blocks = []
def find_block(self, pos, _min_idx=0, _max_idx=None):
"""
Return the Block object that contains the specified
position pos, or None if it is not in the cache.
"""
if _max_idx is None:
_max_idx = len(self.blocks) - 1
if _max_idx < _min_idx:
return None
pivot_idx = math.floor((_max_idx + _min_idx) / 2)
pivot_block = self.blocks[pivot_idx]
if pos < pivot_block.startOffset:
return self.find_block(pos, _min_idx, pivot_idx - 1)
elif pos >= pivot_block.startOffset + pivot_block.numBytes:
return self.find_block(pos, pivot_idx + 1, _max_idx)
else:
return pivot_block
def insert_new_blocks(self, new_blocks):
"""
Merge a list of Block objects from the NN into the list
of cached blocks.
If the set of blocks overlaps, the new blocks take precedence.
"""
# We could do a more efficient merge here since both lists
# are already sorted, but these data structures are small, so let's
# do the easy thing.
blocks_dict = dict((b.blockId, b) for b in self.blocks)
# Merge in new data to dictionary
for nb in new_blocks:
blocks_dict[nb.blockId] = nb
# Convert back to sorted list
block_list = list(blocks_dict.values())
block_list.sort(cmp=lambda a, b: cmp(a.startOffset, b.startOffset))
# Update cache with new data
self.blocks = block_list
|
desktop/libs/hadoop/src/hadoop/fs/hadoopfs.py
|
from __future__ import division
from past.builtins import cmp
from future import standard_library
standard_library.install_aliases()
from builtins import object
import errno
import logging
import math
import os
import posixpath
import random
import subprocess
import sys
from django.utils.encoding import smart_str
from desktop.lib import i18n
import hadoop.conf
from hadoop.fs import normpath, SEEK_SET, SEEK_CUR, SEEK_END
from hadoop.fs.exceptions import PermissionDeniedException
if sys.version_info[0] > 2:
from django.utils.encoding import force_str
from urllib.parse import urlsplit as lib_urlsplit
from django.utils.translation import gettext as _
else:
from django.utils.encoding import force_unicode as force_str
from urlparse import urlsplit as lib_urlsplit
from django.utils.translation import ugettext as _
LOG = logging.getLogger(__name__)
DEFAULT_USER = "webui"
# The number of bytes to read if not specified
DEFAULT_READ_SIZE = 1024*1024 # 1MB
# The buffer size of the pipe to hdfs -put during upload
WRITE_BUFFER_SIZE = 128*1024 # 128K
# Class that we translate into PermissionDeniedException
HADOOP_ACCESSCONTROLEXCEPTION = "org.apache.hadoop.security.AccessControlException"
# Timeout for thrift calls to NameNode
NN_THRIFT_TIMEOUT = 15
DN_THRIFT_TIMEOUT = 3
# Encoding used by HDFS namespace
HDFS_ENCODING = 'utf-8'
def encode_fs_path(path):
"""encode_fs_path(path) -> byte string in utf8"""
return smart_str(path, HDFS_ENCODING, errors='strict')
def decode_fs_path(path):
"""decode_fs_path(bytestring) -> unicode path"""
return force_str(path, HDFS_ENCODING, errors='strict')
def _coerce_exceptions(function):
"""
Decorator that causes exceptions thrown by the decorated function
to be coerced into generic exceptions from the hadoop.fs.exceptions
module.
"""
def wrapper(*args, **kwargs):
try:
return function(*args, **kwargs)
except Exception as e:
e.msg = force_str(e.msg, errors='replace')
e.stack = force_str(e.stack, errors='replace')
LOG.exception("Exception in Hadoop FS call " + function.__name__)
if e.clazz == HADOOP_ACCESSCONTROLEXCEPTION:
raise PermissionDeniedException(e.msg, e)
else:
raise
return wrapper
class Hdfs(object):
"""
An abstract HDFS proxy
"""
@staticmethod
def basename(path):
return posixpath.basename(path)
@staticmethod
def dirname(path):
return posixpath.dirname(path)
@staticmethod
def split(path):
return posixpath.split(path)
@staticmethod
def join(first, *comp_list):
return posixpath.join(first, *comp_list)
@staticmethod
def abspath(path):
return posixpath.abspath(path)
@staticmethod
def normpath(path):
res = posixpath.normpath(path)
# Python normpath() doesn't eliminate leading double slashes
if res.startswith('//'):
return res[1:]
return res
@staticmethod
def parent_path(path):
return Hdfs.join(path, "..")
@staticmethod
def urlsplit(url):
"""
Take an HDFS path (hdfs://nn:port/foo) or just (/foo) and split it into
the standard urlsplit's 5-tuple.
"""
i = url.find('://')
if i == -1:
# Not found. Treat the entire argument as an HDFS path
return ('hdfs', '', normpath(url), '', '')
schema = url[:i]
if schema not in ('hdfs', 'viewfs'):
# Default to standard for non-hdfs
return lib_urlsplit(url)
url = url[i+3:]
i = url.find('/')
if i == -1:
# Everything is netloc. Assume path is root.
return (schema, url, '/', '', '')
netloc = url[:i]
path = url[i:]
return (schema, netloc, normpath(path), '', '')
def listdir_recursive(self, path, glob=None):
"""
listdir_recursive(path, glob=None) -> [ entry names ]
Get directory entry names without stats, recursively.
"""
paths = [path]
while paths:
path = paths.pop()
if self.isdir(path):
hdfs_paths = self.listdir_stats(path, glob)
paths[:0] = [x.path for x in hdfs_paths]
yield path
def create_home_dir(self, home_path=None):
if home_path is None:
home_path = self.get_home_dir()
from hadoop.hdfs_site import get_umask_mode
from useradmin.conf import HOME_DIR_PERMISSIONS, USE_HOME_DIR_PERMISSIONS
from desktop.conf import DEFAULT_HDFS_SUPERUSER
mode = int(HOME_DIR_PERMISSIONS.get(), 8) if USE_HOME_DIR_PERMISSIONS.get() else (0o777 & (0o1777 ^ get_umask_mode()))
if not self.exists(home_path):
user = self.user
LOG.debug('superuser used for home directory creation: %s' % self.superuser)
try:
try:
self.setuser(DEFAULT_HDFS_SUPERUSER.get())
self.mkdir(home_path)
self.chmod(home_path, mode)
self.chown(home_path, user)
try: # Handle the case when there is no group with the same name as the user.
self.chown(home_path, group=user)
except IOError:
LOG.exception('Failed to change the group of "{}" to "{}" when creating a home directory '
'for user "{}"'.format(home_path, user, user))
except IOError:
msg = 'Failed to create home dir ("%s") as superuser %s' % (home_path, self.superuser)
LOG.exception(msg)
raise
finally:
self.setuser(user)
def copyFromLocal(self, local_src, remote_dst, mode=0o755):
remote_dst = remote_dst.endswith(posixpath.sep) and remote_dst[:-1] or remote_dst
local_src = local_src.endswith(posixpath.sep) and local_src[:-1] or local_src
if os.path.isdir(local_src):
self._copy_dir(local_src, remote_dst, mode)
else:
(basename, filename) = os.path.split(local_src)
self._copy_file(local_src, self.isdir(remote_dst) and self.join(remote_dst, filename) or remote_dst)
def _copy_dir(self, local_dir, remote_dir, mode=0o755):
self.mkdir(remote_dir, mode=mode)
for f in os.listdir(local_dir):
local_src = os.path.join(local_dir, f)
remote_dst = self.join(remote_dir, f)
if os.path.isdir(local_src):
self._copy_dir(local_src, remote_dst, mode)
else:
self._copy_file(local_src, remote_dst)
def _copy_file(self, local_src, remote_dst, chunk_size=1024 * 1024 * 64):
if os.path.isfile(local_src):
if self.exists(remote_dst):
LOG.info(_('%(remote_dst)s already exists. Skipping.') % {'remote_dst': remote_dst})
return
else:
LOG.info(_('%(remote_dst)s does not exist. Trying to copy.') % {'remote_dst': remote_dst})
src = file(local_src)
try:
try:
self.create(remote_dst, permission=0o755)
chunk = src.read(chunk_size)
while chunk:
self.append(remote_dst, chunk)
chunk = src.read(chunk_size)
LOG.info(_('Copied %s -> %s.') % (local_src, remote_dst))
except:
LOG.exception(_('Copying %s -> %s failed.') % (local_src, remote_dst))
raise
finally:
src.close()
else:
LOG.info(_('Skipping %s (not a file).') % local_src)
@_coerce_exceptions
def mktemp(self, subdir='', prefix='tmp', basedir=None):
"""
mktemp(prefix) -> <temp_dir or basedir>/<subdir>/prefix.<rand>
Return a unique temporary filename with prefix in the cluster's temp dir.
"""
RANDOM_BITS = 64
base = self.join(basedir or self._temp_dir, subdir)
if not self.isdir(base):
self.mkdir(base)
while True:
name = prefix + '.' + str(random.getrandbits(RANDOM_BITS))
candidate = self.join(base, name)
if not self.exists(candidate):
return candidate
def mkswap(self, filename, subdir='', suffix='swp', basedir=None):
"""
mkswap(filename, suffix) -> <temp_dir or basedir>/<subdir>/filename.<suffix>
Return a unique temporary filename with prefix in the cluster's temp dir.
"""
RANDOM_BITS = 64
base = self.join(basedir or self._temp_dir, subdir)
if not self.isdir(base):
self.mkdir(base)
candidate = self.join(base, "%s.%s" % (filename, suffix))
return candidate
def exists(self):
raise NotImplementedError(_("%(function)s has not been implemented.") % {'function': 'exists'})
def do_as_user(self):
raise NotImplementedError(_("%(function)s has not been implemented.") % {'function': 'do_as_user'})
def create(self):
raise NotImplementedError(_("%(function)s has not been implemented.") % {'function': 'exists'})
def append(self):
raise NotImplementedError(_("%(function)s has not been implemented.") % {'function': 'append'})
def mkdir(self):
raise NotImplementedError(_("%(function)s has not been implemented.") % {'function': 'mkdir'})
def isdir(self):
raise NotImplementedError(_("%(function)s has not been implemented.") % {'function': 'isdir'})
def listdir_stats(self):
raise NotImplementedError(_("%(function)s has not been implemented.") % {'function': 'listdir_stats'})
def require_open(func):
"""
Decorator that ensures that the file instance isn't closed when the
function is run.
"""
def wrapper(self, *args, **kwargs):
if self.closed:
raise IOError(errno.EBADF, "I/O operation on closed file")
return func(self, *args, **kwargs)
return wrapper
class File(object):
""" Represents an open file on HDFS. """
def __init__(self, fs, path, mode="r", buffering=False):
self.fs = fs
self.path = normpath(path)
self.pos = 0
self.closed = False
self._block_cache = BlockCache()
if buffering or mode != "r":
raise Exception("buffering and write support not yet implemented") # NYI
stat = self._stat()
if stat is None:
raise IOError(errno.ENOENT, "No such file or directory: '%s'" % path)
if stat.isDir:
raise IOError(errno.EISDIR, "Is a directory: '%s'" % path)
#TODO(todd) somehow we need to check permissions here - maybe we need an access() call?
# Minimal context manager implementation.
# See: http://www.python.org/doc/2.5.2/lib/typecontextmanager.html
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
return False # don't supress exceptions.
@require_open
def seek(self, offset, whence=0):
""" Set the file pointer to the given spot. @see file.seek """
if whence == SEEK_SET:
self.pos = offset
elif whence == SEEK_CUR:
self.pos += offset
elif whence == SEEK_END:
self.pos = self._stat().length + offset
else:
raise IOError(errno.EINVAL, "Invalid argument to seek for whence")
@require_open
def tell(self):
return self.pos
def _get_block(self, pos):
"""Return the Block instance that contains the given offset"""
cached_block = self._block_cache.find_block(pos)
if cached_block:
return cached_block
# Cache "miss" - fetch ahead 500MB worth of blocks
new_blocks = self.fs._get_blocks(self.path, pos, 500*1024*1024)
self._block_cache.insert_new_blocks(new_blocks)
result = self._block_cache.find_block(pos)
if not result:
raise IOError("No block for position %d in file %s" % (pos, self.path))
return result
@require_open
def _read_in_block(self, length=DEFAULT_READ_SIZE):
"""
Tries to read up to length bytes, but will often read fewer, since
a single call will not read across a block boundary.
"""
end_pos = min(self.pos + length, self._stat().length)
# If we're at EOF, return empty string
if end_pos == self.pos:
return ""
block = self._get_block(self.pos)
assert _block_contains_pos(block, self.pos)
assert block.path == self.path
in_block_pos = self.pos - block.startOffset
assert in_block_pos >= 0
in_block_len = min(length, block.numBytes - in_block_pos)
result = self.fs._read_block(block, in_block_pos, in_block_len)
self.pos += len(result)
assert self.pos <= end_pos
return result
@require_open
def read(self, length=DEFAULT_READ_SIZE):
"""
Read the given number of bytes from this file.
If EOF has been reached, returns the empty string.
@param length the number of bytes wanted
"""
result = []
read_so_far = 0
while read_so_far < length:
this_data = self._read_in_block(length - read_so_far)
if this_data == "": # eof
break
read_so_far += len(this_data)
result.append(this_data)
return "".join(result)
def close(self):
self.closed = True
def _stat(self):
if not hasattr(self, "_stat_cache"):
self._stat_cache = self.fs._hadoop_stat(self.path)
return self._stat_cache
class FileUpload(object):
"""A write-only file that supports no seeking and cannot exist prior to
opening.
"""
def __init__(self, fs, path, mode="w", block_size=None):
self.fs = fs
self.closed = False
assert mode == "w"
extra_confs = []
if block_size:
extra_confs.append("-Ddfs.block.size=%d" % block_size)
self.subprocess_cmd = [self.fs.hadoop_bin_path,
"jar",
hadoop.conf.SUDO_SHELL_JAR.get(),
self.fs.user,
"-Dfs.default.name=" + self.fs.uri] + \
extra_confs + \
["-put", "-", encode_fs_path(path)]
self.subprocess_env = i18n.make_utf8_env()
if 'HADOOP_CLASSPATH' in self.subprocess_env:
self.subprocess_env['HADOOP_CLASSPATH'] += ':' + hadoop.conf.HADOOP_EXTRA_CLASSPATH_STRING.get()
else:
self.subprocess_env['HADOOP_CLASSPATH'] = hadoop.conf.HADOOP_EXTRA_CLASSPATH_STRING.get()
if hadoop.conf.HADOOP_CONF_DIR.get():
self.subprocess_env['HADOOP_CONF_DIR'] = hadoop.conf.HADOOP_CONF_DIR.get()
self.path = path
self.putter = subprocess.Popen(self.subprocess_cmd,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=True,
env=self.subprocess_env,
bufsize=WRITE_BUFFER_SIZE)
@require_open
def write(self, data):
"""May raise IOError, particularly EPIPE"""
self.putter.stdin.write(data)
@require_open
def close(self):
try:
(stdout, stderr) = self.putter.communicate()
except IOError as ioe:
logging.debug("Saw IOError writing %r" % self.path, exc_info=1)
if ioe.errno == errno.EPIPE:
stdout, stderr = self.putter.communicate()
self.closed = True
if stderr:
LOG.warning("HDFS FileUpload (cmd='%s', env='%s') outputted stderr:\n%s" %
(repr(self.subprocess_cmd), repr(self.subprocess_env), stderr))
if stdout:
LOG.info("HDFS FileUpload (cmd='%s', env='%s') outputted stdout:\n%s" %
(repr(self.subprocess_cmd), repr(self.subprocess_env), stdout))
if self.putter.returncode != 0:
raise IOError("hdfs put returned bad code: %d\nstderr: %s" %
(self.putter.returncode, stderr))
LOG.info("Completed upload: %s" % repr(self.subprocess_cmd))
@require_open
def flush(self):
self.putter.stdin.flush()
def _block_contains_pos(block, pos):
return pos >= block.startOffset and pos < block.startOffset + block.numBytes
class BlockCache(object):
"""
A cache of block locations used by a single HDFS input file.
Essentially this keeps the blocks in sorted order and does
binary search to find the block that contains a given offset.
It also provides the ability to merge in the response of a NN
getBlocks response to the cache.
"""
def __init__(self):
self.blocks = []
def find_block(self, pos, _min_idx=0, _max_idx=None):
"""
Return the Block object that contains the specified
position pos, or None if it is not in the cache.
"""
if _max_idx is None:
_max_idx = len(self.blocks) - 1
if _max_idx < _min_idx:
return None
pivot_idx = math.floor((_max_idx + _min_idx) / 2)
pivot_block = self.blocks[pivot_idx]
if pos < pivot_block.startOffset:
return self.find_block(pos, _min_idx, pivot_idx - 1)
elif pos >= pivot_block.startOffset + pivot_block.numBytes:
return self.find_block(pos, pivot_idx + 1, _max_idx)
else:
return pivot_block
def insert_new_blocks(self, new_blocks):
"""
Merge a list of Block objects from the NN into the list
of cached blocks.
If the set of blocks overlaps, the new blocks take precedence.
"""
# We could do a more efficient merge here since both lists
# are already sorted, but these data structures are small, so let's
# do the easy thing.
blocks_dict = dict((b.blockId, b) for b in self.blocks)
# Merge in new data to dictionary
for nb in new_blocks:
blocks_dict[nb.blockId] = nb
# Convert back to sorted list
block_list = list(blocks_dict.values())
block_list.sort(cmp=lambda a, b: cmp(a.startOffset, b.startOffset))
# Update cache with new data
self.blocks = block_list
| 0.361503 | 0.094887 |
import asyncio, base64, discord, requests, time, traceback
from utils.datautils import config, set_client
from utils.discordbot import BotClient, send
client = None
class TimerClient(BotClient):
def __init__(self):
BotClient.__init__(self, "")
self.name = "timer"
client = TimerClient()
timers = {}
@client.command("Timer Commands", ["-start"], "-start", "Start a 5-minute BP timer; 30 seconds protected time, 15 seconds grace.")
async def command_start(command, message):
cid = message.channel.id
mid = message.id
timers[cid] = timers.get(cid, []) + [mid]
replies = []
replies.append(await send(message, "5-minute timer started!", reaction = "check"))
await asyncio.sleep(30)
if mid in timers[cid]: replies.append(await send(message, "Protected time over!"))
await asyncio.sleep(30)
if mid in timers[cid]: replies.append(await send(message, "4 minutes remaining!"))
await asyncio.sleep(60)
if mid in timers[cid]: replies.append(await send(message, "3 minutes remaining!"))
await asyncio.sleep(60)
if mid in timers[cid]: replies.append(await send(message, "2 minutes remaining!"))
await asyncio.sleep(60)
if mid in timers[cid]: replies.append(await send(message, "1 minute remaining!"))
await asyncio.sleep(30)
if mid in timers[cid]: replies.append(await send(message, "Protected time!"))
await asyncio.sleep(30)
if mid in timers[cid]: replies.append(await send(message, "15-second grace period!"))
await asyncio.sleep(5)
if mid in timers[cid]: replies.append(await send(message, "10 seconds!"))
await asyncio.sleep(5)
if mid in timers[cid]: replies.append(await send(message, "5 seconds!"))
await asyncio.sleep(5)
if mid in timers[cid]: replies.append(await send(message, "Time's Up!"))
await asyncio.sleep(5)
if mid in timers[cid]: await send(message, "[a 5-minute timer was run]")
if mid in timers[cid]: await message.channel.delete_messages(replies)
@client.command("Timer Commands", ["-stop"], "-stop", "Stops all timers in this channel.")
async def command_stop(command, message):
timers[message.channel.id] = []
await send(message, "All timers in this channel stopped!", reaction = "check")
set_client(client)
|
src/bots/timer/main.py
|
import asyncio, base64, discord, requests, time, traceback
from utils.datautils import config, set_client
from utils.discordbot import BotClient, send
client = None
class TimerClient(BotClient):
def __init__(self):
BotClient.__init__(self, "")
self.name = "timer"
client = TimerClient()
timers = {}
@client.command("Timer Commands", ["-start"], "-start", "Start a 5-minute BP timer; 30 seconds protected time, 15 seconds grace.")
async def command_start(command, message):
cid = message.channel.id
mid = message.id
timers[cid] = timers.get(cid, []) + [mid]
replies = []
replies.append(await send(message, "5-minute timer started!", reaction = "check"))
await asyncio.sleep(30)
if mid in timers[cid]: replies.append(await send(message, "Protected time over!"))
await asyncio.sleep(30)
if mid in timers[cid]: replies.append(await send(message, "4 minutes remaining!"))
await asyncio.sleep(60)
if mid in timers[cid]: replies.append(await send(message, "3 minutes remaining!"))
await asyncio.sleep(60)
if mid in timers[cid]: replies.append(await send(message, "2 minutes remaining!"))
await asyncio.sleep(60)
if mid in timers[cid]: replies.append(await send(message, "1 minute remaining!"))
await asyncio.sleep(30)
if mid in timers[cid]: replies.append(await send(message, "Protected time!"))
await asyncio.sleep(30)
if mid in timers[cid]: replies.append(await send(message, "15-second grace period!"))
await asyncio.sleep(5)
if mid in timers[cid]: replies.append(await send(message, "10 seconds!"))
await asyncio.sleep(5)
if mid in timers[cid]: replies.append(await send(message, "5 seconds!"))
await asyncio.sleep(5)
if mid in timers[cid]: replies.append(await send(message, "Time's Up!"))
await asyncio.sleep(5)
if mid in timers[cid]: await send(message, "[a 5-minute timer was run]")
if mid in timers[cid]: await message.channel.delete_messages(replies)
@client.command("Timer Commands", ["-stop"], "-stop", "Stops all timers in this channel.")
async def command_stop(command, message):
timers[message.channel.id] = []
await send(message, "All timers in this channel stopped!", reaction = "check")
set_client(client)
| 0.188847 | 0.058426 |
from dataclasses import dataclass
from typing import Final
from jupiter.domain.adate import ADate
from jupiter.domain.entity_name import EntityName
from jupiter.domain.storage_engine import StorageEngine
from jupiter.domain.vacations.infra.vacation_notion_manager import VacationNotionManager
from jupiter.framework.base.entity_id import EntityId
from jupiter.framework.update_action import UpdateAction
from jupiter.framework.use_case import UseCase
from jupiter.utils.time_provider import TimeProvider
class VacationUpdateUseCase(UseCase['VacationUpdateUseCase.Args', None]):
"""The command for updating a vacation's properties."""
@dataclass()
class Args:
"""Args."""
ref_id: EntityId
name: UpdateAction[EntityName]
start_date: UpdateAction[ADate]
end_date: UpdateAction[ADate]
_time_provider: Final[TimeProvider]
_storage_engine: Final[StorageEngine]
_vacation_notion_manager: Final[VacationNotionManager]
def __init__(
self, time_provider: TimeProvider, storage_engine: StorageEngine,
notion_manager: VacationNotionManager) -> None:
"""Constructor."""
self._time_provider = time_provider
self._storage_engine = storage_engine
self._vacation_notion_manager = notion_manager
def execute(self, args: Args) -> None:
"""Execute the command's action."""
with self._storage_engine.get_unit_of_work() as uow:
vacation = uow.vacation_repository.load_by_id(args.ref_id)
if args.name.should_change:
vacation.change_name(args.name.value, self._time_provider.get_current_time())
if args.start_date.should_change:
vacation.change_start_date(args.start_date.value, self._time_provider.get_current_time())
if args.end_date.should_change:
vacation.change_end_date(args.end_date.value, self._time_provider.get_current_time())
uow.vacation_repository.save(vacation)
self._vacation_notion_manager.upsert_vacation(vacation)
|
jupiter/use_cases/vacations/update.py
|
from dataclasses import dataclass
from typing import Final
from jupiter.domain.adate import ADate
from jupiter.domain.entity_name import EntityName
from jupiter.domain.storage_engine import StorageEngine
from jupiter.domain.vacations.infra.vacation_notion_manager import VacationNotionManager
from jupiter.framework.base.entity_id import EntityId
from jupiter.framework.update_action import UpdateAction
from jupiter.framework.use_case import UseCase
from jupiter.utils.time_provider import TimeProvider
class VacationUpdateUseCase(UseCase['VacationUpdateUseCase.Args', None]):
"""The command for updating a vacation's properties."""
@dataclass()
class Args:
"""Args."""
ref_id: EntityId
name: UpdateAction[EntityName]
start_date: UpdateAction[ADate]
end_date: UpdateAction[ADate]
_time_provider: Final[TimeProvider]
_storage_engine: Final[StorageEngine]
_vacation_notion_manager: Final[VacationNotionManager]
def __init__(
self, time_provider: TimeProvider, storage_engine: StorageEngine,
notion_manager: VacationNotionManager) -> None:
"""Constructor."""
self._time_provider = time_provider
self._storage_engine = storage_engine
self._vacation_notion_manager = notion_manager
def execute(self, args: Args) -> None:
"""Execute the command's action."""
with self._storage_engine.get_unit_of_work() as uow:
vacation = uow.vacation_repository.load_by_id(args.ref_id)
if args.name.should_change:
vacation.change_name(args.name.value, self._time_provider.get_current_time())
if args.start_date.should_change:
vacation.change_start_date(args.start_date.value, self._time_provider.get_current_time())
if args.end_date.should_change:
vacation.change_end_date(args.end_date.value, self._time_provider.get_current_time())
uow.vacation_repository.save(vacation)
self._vacation_notion_manager.upsert_vacation(vacation)
| 0.904068 | 0.123736 |
from django.shortcuts import render
from django.http import JsonResponse
import os
import json
import time
from .api import GoogleAPI
from threpose.settings import BASE_DIR
from src.caching.caching_gmap import APICaching
from decouple import config
gapi = GoogleAPI()
api_caching = APICaching()
PLACE_IMG_PATH = os.path.join(BASE_DIR, 'media', 'places_image')
# Place List page
def get_next_page_from_token(request): # pragma: no cover
"""Get places list data by next_page_token."""
# Check request
if request.method != 'POST':
return JsonResponse({"status": "INVALID METHOD"})
if 'token' not in request.POST:
return JsonResponse({"status": "INVALID PAYLOAD"})
# Get next page token from request
token = request.POST['token']
context = []
# Check next_page cache
if api_caching.get(f'{token[:30]}') is None:
for _ in range(6):
# Request data for 6 times, if response is not OK
# and reached maximum, it will return empty
data = json.loads(gapi.next_search_nearby(token))
if data['status'] == "OK":
context = restruct_nearby_place(data['results'])
break
time.sleep(0.2)
# write cache file
byte_context = json.dumps({"cache": context, "status": "OK"}, indent=3).encode()
api_caching.add(f'{token[:30]}', byte_context)
if len(context) > 0:
return JsonResponse({"places": context, "status": "OK"})
return JsonResponse({"places": context, "status": "NOT FOUND"})
else: # Have cache
# load cache
context = json.loads(api_caching.get(f'{token[:30]}'))
# check place images
context = check_downloaded_image(context['cache'])
return JsonResponse({"places": context, "status": "OK"})
def place_list(request, *args, **kwargs): # pragma: no cover
"""Place_list view for list place that nearby the user search input."""
data = request.GET # get lat and lng from url
# Our default search type
types = ['restaurant', 'zoo', 'tourist_attraction', 'museum', 'cafe', 'aquarium']
lat = data['lat']
lng = data['lng']
# Get place cache
if api_caching.get(f'{lat}{lng}searchresult'):
# data exists
data = json.loads(api_caching.get(f'{lat}{lng}searchresult'))
context = data['cache']
token = data['next_page_token']
else:
# data not exist
context, token = get_new_context(types, lat, lng)
context = check_downloaded_image(context)
# get all image file name in static/images/place_image
api_key = config('FRONTEND_API_KEY')
return render(request, "search/place_list.html", {'places': context, 'all_token': token, 'api_key': api_key})
# Helper function
def get_new_context(types: list, lat: int, lng: int) -> list: # pragma: no cover
"""Cache new data and return the new data file
Args:
types: place type
lat, lng: latitude and longitude of user search input for
Returns:
context: places nearby data
token: next page token
"""
token = {}
# This create for keeping data from search nearby
tempo_context = []
for type in types:
data = json.loads(gapi.search_nearby(lat, lng, type))
if 'next_page_token' in data:
token[type] = data['next_page_token']
places = data['results']
restructed_places = restruct_nearby_place(places)
tempo_context = add_more_place(tempo_context, restructed_places)
# Caching places nearby
cache = {'cache': tempo_context, 'next_page_token': token}
api_caching.add(f'{lat}{lng}searchresult', json.dumps(cache, indent=3).encode())
# Load data from cache
context = json.loads(api_caching.get(f'{lat}{lng}searchresult'))['cache']
return context, token
def restruct_nearby_place(places: list) -> list:
"""Process data for frontend
Args:
places: A place nearby data from google map api.
Returns:
context: A place data that place-list page needed.
Data struct:
[
{
# Essential key
'place_name': <name>,
'place_id': <place_id>,
'photo_ref': [<photo_ref],
'types': [],
# other...
}
. . .
]
"""
context = []
for place in places:
init_place = {
'place_name': None,
'place_id': None,
'photo_ref': [],
'types': [],
}
if 'photos' in place:
# Place have an image
photo_ref = place['photos'][0]['photo_reference']
init_place['photo_ref'].append(photo_ref)
else:
# Place don't have an image
continue
init_place['place_name'] = place['name']
init_place['place_id'] = place['place_id']
init_place['types'] = place['types']
context.append(init_place)
return context
def check_downloaded_image(context: list) -> list:
"""Check that image from static/images/place_image that is ready for frontend to display or not
Args:
context: place nearby data
Returns:
context: place nearby data with telling the image of each place were downloaded or not
"""
# Check places_image dir that is exists
if os.path.exists(PLACE_IMG_PATH):
# Get image file name from static/images/places_image
all_img_file = [f for f in os.listdir(PLACE_IMG_PATH)
if os.path.isfile(os.path.join(PLACE_IMG_PATH, f))]
for place in context:
# If place that have photo_ref imply that place have an images
if 'photo_ref' in place:
place_id = place['place_id']
downloaded_img = f'{place_id}photo.jpeg' in all_img_file
have_image = len(place['photo_ref']) == 0
if downloaded_img or have_image:
place['downloaded'] = True
else:
place['downloaded'] = False
return context
def add_more_place(context: list, new: list):
"""Append places to context
Args:
context: total nearby palce data
new: new data by next page tokens
Returns:
context: total nearby place that append
new to is's with out duplicated place
"""
place_exist = [place['place_id'] for place in context]
for place in new:
# Check that place is exists or not
if place['place_id'] in place_exist:
continue
context.append(place)
return context
|
search/views.py
|
from django.shortcuts import render
from django.http import JsonResponse
import os
import json
import time
from .api import GoogleAPI
from threpose.settings import BASE_DIR
from src.caching.caching_gmap import APICaching
from decouple import config
gapi = GoogleAPI()
api_caching = APICaching()
PLACE_IMG_PATH = os.path.join(BASE_DIR, 'media', 'places_image')
# Place List page
def get_next_page_from_token(request): # pragma: no cover
"""Get places list data by next_page_token."""
# Check request
if request.method != 'POST':
return JsonResponse({"status": "INVALID METHOD"})
if 'token' not in request.POST:
return JsonResponse({"status": "INVALID PAYLOAD"})
# Get next page token from request
token = request.POST['token']
context = []
# Check next_page cache
if api_caching.get(f'{token[:30]}') is None:
for _ in range(6):
# Request data for 6 times, if response is not OK
# and reached maximum, it will return empty
data = json.loads(gapi.next_search_nearby(token))
if data['status'] == "OK":
context = restruct_nearby_place(data['results'])
break
time.sleep(0.2)
# write cache file
byte_context = json.dumps({"cache": context, "status": "OK"}, indent=3).encode()
api_caching.add(f'{token[:30]}', byte_context)
if len(context) > 0:
return JsonResponse({"places": context, "status": "OK"})
return JsonResponse({"places": context, "status": "NOT FOUND"})
else: # Have cache
# load cache
context = json.loads(api_caching.get(f'{token[:30]}'))
# check place images
context = check_downloaded_image(context['cache'])
return JsonResponse({"places": context, "status": "OK"})
def place_list(request, *args, **kwargs): # pragma: no cover
"""Place_list view for list place that nearby the user search input."""
data = request.GET # get lat and lng from url
# Our default search type
types = ['restaurant', 'zoo', 'tourist_attraction', 'museum', 'cafe', 'aquarium']
lat = data['lat']
lng = data['lng']
# Get place cache
if api_caching.get(f'{lat}{lng}searchresult'):
# data exists
data = json.loads(api_caching.get(f'{lat}{lng}searchresult'))
context = data['cache']
token = data['next_page_token']
else:
# data not exist
context, token = get_new_context(types, lat, lng)
context = check_downloaded_image(context)
# get all image file name in static/images/place_image
api_key = config('FRONTEND_API_KEY')
return render(request, "search/place_list.html", {'places': context, 'all_token': token, 'api_key': api_key})
# Helper function
def get_new_context(types: list, lat: int, lng: int) -> list: # pragma: no cover
"""Cache new data and return the new data file
Args:
types: place type
lat, lng: latitude and longitude of user search input for
Returns:
context: places nearby data
token: next page token
"""
token = {}
# This create for keeping data from search nearby
tempo_context = []
for type in types:
data = json.loads(gapi.search_nearby(lat, lng, type))
if 'next_page_token' in data:
token[type] = data['next_page_token']
places = data['results']
restructed_places = restruct_nearby_place(places)
tempo_context = add_more_place(tempo_context, restructed_places)
# Caching places nearby
cache = {'cache': tempo_context, 'next_page_token': token}
api_caching.add(f'{lat}{lng}searchresult', json.dumps(cache, indent=3).encode())
# Load data from cache
context = json.loads(api_caching.get(f'{lat}{lng}searchresult'))['cache']
return context, token
def restruct_nearby_place(places: list) -> list:
"""Process data for frontend
Args:
places: A place nearby data from google map api.
Returns:
context: A place data that place-list page needed.
Data struct:
[
{
# Essential key
'place_name': <name>,
'place_id': <place_id>,
'photo_ref': [<photo_ref],
'types': [],
# other...
}
. . .
]
"""
context = []
for place in places:
init_place = {
'place_name': None,
'place_id': None,
'photo_ref': [],
'types': [],
}
if 'photos' in place:
# Place have an image
photo_ref = place['photos'][0]['photo_reference']
init_place['photo_ref'].append(photo_ref)
else:
# Place don't have an image
continue
init_place['place_name'] = place['name']
init_place['place_id'] = place['place_id']
init_place['types'] = place['types']
context.append(init_place)
return context
def check_downloaded_image(context: list) -> list:
"""Check that image from static/images/place_image that is ready for frontend to display or not
Args:
context: place nearby data
Returns:
context: place nearby data with telling the image of each place were downloaded or not
"""
# Check places_image dir that is exists
if os.path.exists(PLACE_IMG_PATH):
# Get image file name from static/images/places_image
all_img_file = [f for f in os.listdir(PLACE_IMG_PATH)
if os.path.isfile(os.path.join(PLACE_IMG_PATH, f))]
for place in context:
# If place that have photo_ref imply that place have an images
if 'photo_ref' in place:
place_id = place['place_id']
downloaded_img = f'{place_id}photo.jpeg' in all_img_file
have_image = len(place['photo_ref']) == 0
if downloaded_img or have_image:
place['downloaded'] = True
else:
place['downloaded'] = False
return context
def add_more_place(context: list, new: list):
"""Append places to context
Args:
context: total nearby palce data
new: new data by next page tokens
Returns:
context: total nearby place that append
new to is's with out duplicated place
"""
place_exist = [place['place_id'] for place in context]
for place in new:
# Check that place is exists or not
if place['place_id'] in place_exist:
continue
context.append(place)
return context
| 0.668123 | 0.118181 |
from __future__ import division, unicode_literals, print_function
import os
import sys
import hashlib
import time
import logging
import sqlite3
import numpy as np
from io import BytesIO
from ..debugging import DebugPlot
try:
import tqdm
except ImportError:
tqdm = None
logger = logging.getLogger(__name__)
def silent_progress_bar(iterable):
"""
Dummy function, just returns an iterator.
:param iterable: the iterable to turn into an iterable
:type iterable: iterable
:return: iterable
:rtype: iterable
>>> next(silent_progress_bar([1, 2, 3]))
1
"""
return iter(iterable)
def fancy_progress_bar(iterable):
"""
Returns in iterator which will show progress as well.
Will either use the tqdm module when available, or a simpler implementation.
:param iterable: the iterable to progress-ify
:type iterable: iterable
:rtype: iterable
:return: progress-ified iterable
"""
if tqdm:
# weird bug: if the threading magic in tqdm is active, multiprocessing in molyso gets stuck!
# should be investigated further, but for now, let us just disable it ...
tqdm.tqdm.monitor_interval = 0
for item in tqdm.tqdm(iterable):
yield item
else:
times = np.zeros(len(iterable), dtype=float)
for n, i in enumerate(iterable):
start_time = time.time()
yield i
stop_time = time.time()
times[n] = stop_time - start_time
eta = " ETA %.2fs" % float(np.mean(times[:n + 1]) * (len(iterable) - (n + 1)))
logger.info("processed %d/%d [took %.3fs%s]" % (n + 1, len(iterable), times[n], eta))
def iter_time(iterable):
"""
Will print the total time elapsed during iteration of ``iterable`` afterwards.
:param iterable: iterable
:type iterable: iterable
:rtype: iterable
:return: iterable
"""
start_time = time.time()
for n in iterable:
yield n
stop_time = time.time()
logger.info("whole step took %.3fs" % (stop_time - start_time,))
_fancy_progress_bar = fancy_progress_bar
def fancy_progress_bar(iterable):
"""
:param iterable:
:return:
"""
return iter_time(_fancy_progress_bar(iterable))
def dummy_progress_indicator():
"""
:return:
"""
return iter(int, 1)
def ignorant_next(iterable):
"""
Will try to iterate to the next value, or return None if none is available.
:param iterable:
:return:
"""
try:
return next(iterable)
except StopIteration:
return None
class QuickTableDumper(object):
"""
:param recipient:
"""
delimiter = '\t'
line_end = '\n'
precision = 8
def __init__(self, recipient=None):
if recipient is None:
recipient = sys.stdout
self.recipient = recipient
self.headers = []
def write_list(self, the_list):
"""
:param the_list:
"""
self.recipient.write(self.delimiter.join(map(self.stringify, the_list)) + self.line_end)
def add(self, row):
"""
:param row:
"""
if len(self.headers) == 0:
self.headers = list(sorted(row.keys()))
self.write_list(self.headers)
self.write_list(row[k] for k in self.headers)
def stringify(self, obj):
"""
:param obj:
:return:
"""
if type(obj) in (float, np.float32, np.float64) and self.precision:
return str(round(obj, self.precision))
else:
return str(obj)
try:
# noinspection PyUnresolvedReferences
import cPickle
pickle = cPickle
except ImportError:
import pickle
try:
import _thread
except ImportError:
import thread as _thread
if os.name != 'nt':
def correct_windows_signal_handlers():
"""
Dummy for non-windows os.
"""
pass
else:
def correct_windows_signal_handlers():
"""
Corrects Windows signal handling, otherwise multiprocessing solutions will not correctly
exit if Ctrl-C is used to interrupt them.
:return:
"""
os.environ['PATH'] += os.path.pathsep + os.path.dirname(os.path.abspath(sys.executable))
try:
# noinspection PyUnresolvedReferences
import win32api
def _handler(_, hook=_thread.interrupt_main):
hook()
return 1
win32api.SetConsoleCtrlHandler(_handler, 1)
except ImportError:
logger.warning("Running on Windows, but module 'win32api' could not be imported to fix signal handler.\n" +
"Ctrl-C might break the program ..." +
"Fix: Install the module!")
def debug_init():
"""
Initialized debug mode, as of now this means that DebugPlot is set to active (it will produce a debug.pdf)
"""
DebugPlot.force_active = True
np.set_printoptions(threshold=sys.maxsize)
def parse_range(s, maximum=0):
"""
:param s:
:param maximum:
:return:
"""
maximum -= 1
splits = s.replace(' ', '').replace(';', ',').split(',')
ranges = []
remove = []
not_values = False
for frag in splits:
if frag[0] == '~':
not_values = not not_values
frag = frag[1:]
if '-' in frag:
f, t = frag.split('-')
interval = 1
if '%' in t:
t, _interval = t.split('%')
interval = int(_interval)
if t == '':
t = maximum
f, t = int(f), int(t)
t = min(t, maximum)
parsed_fragment = range(f, t + 1, interval)
else:
parsed_fragment = [int(frag)]
if not_values:
remove += parsed_fragment
else:
ranges += parsed_fragment
return list(sorted(set(ranges) - set(remove)))
def prettify_numpy_array(arr, space_or_prefix):
"""
Returns a properly indented string representation of a numpy array.
:param arr:
:param space_or_prefix:
:return:
"""
six_spaces = ' ' * 6
prepared = repr(np.array(arr)).replace(')', '').replace('array(', six_spaces)
if isinstance(space_or_prefix, int):
return prepared.replace(six_spaces, ' ' * space_or_prefix)
else:
return space_or_prefix + prepared.replace(six_spaces, ' ' * len(space_or_prefix)).lstrip()
def bits_to_numpy_type(bits):
"""
Returns a numpy.dtype for one of the common image bit-depths:
8 for unsigned int, 16 for unsigned short, 32 for float
:param bits:
:return:
"""
# this is supposed to throw an error
return {
8: np.uint8,
16: np.uint16,
32: np.float32
}[int(bits)]
class BaseCache(object):
"""
A caching class
"""
@staticmethod
def prepare_key(key):
"""
:param key:
:return:
"""
if isinstance(key, type('')):
return key
else:
return repr(key)
@staticmethod
def serialize(data):
"""
:param data:
:return:
"""
try:
bio = BytesIO()
pickle.dump(data, bio, protocol=pickle.HIGHEST_PROTOCOL)
try:
# noinspection PyUnresolvedReferences
pickled_data = bio.getbuffer()
except AttributeError:
pickled_data = bio.getvalue()
except ImportError:
pickled_data = pickle.dumps(data, protocol=pickle.HIGHEST_PROTOCOL)
return pickled_data
@staticmethod
def deserialize(data):
"""
:param data:
:return:
"""
assert data is not None
bio = BytesIO(data)
return pickle.load(bio)
def __init__(self, filename_to_be_hashed, ignore_cache='nothing', cache_token=None):
self.logger = logging.getLogger(__name__ + '.' + self.__class__.__name__)
self.filename_hash_source = filename_to_be_hashed
if cache_token is None:
self.cache_token = "%s.%s" % (
os.path.basename(filename_to_be_hashed).replace('.', '_').replace('?', '_').replace(',', '_'),
hashlib.sha1(str(os.path.abspath(filename_to_be_hashed).lower()).encode()).hexdigest()[:8])
else:
self.cache_token = cache_token
if ignore_cache == 'everything':
self.ignore_cache = True
elif ignore_cache == 'nothing':
self.ignore_cache = []
else:
self.ignore_cache = ignore_cache.split(',')
def contains(self, key):
"""
:param key:
:return:
"""
return False
def get(self, key):
"""
:param key:
:return:
"""
return ''
def set(self, key, value):
"""
:param key:
:param value:
:return:
"""
return
def __contains__(self, key):
if self.ignore_cache is True or key in self.ignore_cache:
return False
else:
try:
self.logger.debug("Checking whether '%s' exists", key)
return self.contains(self.prepare_key(key))
except Exception as e:
self.logger.exception(
"While %s an Exception occurred (but continuing): %",
repr(self.__contains__), repr(e)
)
return False
def __getitem__(self, key):
try:
self.logger.debug("Getting data for '%s'", key)
return self.deserialize(self.get(self.prepare_key(key)))
except Exception as e:
self.logger.exception(
"While %s an Exception occurred (but continuing): %s. Note that this will yield undefined behavior.",
repr(self.__getitem__), repr(e)
)
# this is technically wrong ...
return None
def __setitem__(self, key, value):
if self.ignore_cache is True or key in self.ignore_cache:
return
else:
try:
self.logger.debug("Setting data for '%s'", key)
self.set(self.prepare_key(key), self.serialize(value))
except Exception as e:
self.logger.exception(
"While %s an Exception occurred (but continuing): %s",
repr(self.__setitem__), repr(e)
)
class FileCache(BaseCache):
"""
A caching class which stores the data in flat files.
"""
def build_cache_filename(self, suffix):
"""
:param suffix:
:return:
"""
return "%s.%s.cache" % (self.cache_token, suffix,)
def contains(self, key):
"""
:param key:
:return:
"""
return os.path.isfile(self.build_cache_filename(key))
def get(self, key):
"""
:param key:
:return:
"""
with open(self.build_cache_filename(key), 'rb') as fp:
return fp.read(os.path.getsize(self.build_cache_filename(key)))
def set(self, key, value):
"""
:param key:
:param value:
"""
with open(self.build_cache_filename(key), 'wb+') as fp:
fp.write(value)
Cache = FileCache
class Sqlite3Cache(BaseCache):
"""
A caching class which stores the data in a sqlite3 database.
"""
def contains(self, key):
"""
:param key:
:return:
"""
result = self.conn.execute('SELECT COUNT(*) FROM entries WHERE name = ?', (key,))
for row in result:
return row[0] == 1
return False
def get(self, key):
"""
:param key:
:return:
"""
result = self.conn.execute('SELECT value FROM entries WHERE name = ?', (key,))
for row in result:
return row[0]
def keys(self):
"""
:return:
"""
result = self.conn.execute('SELECT name FROM entries')
return [row[0] for row in result]
def set(self, key, value):
"""
:param key:
:param value:
"""
self.conn.execute('DELETE FROM entries WHERE name = ?', (key,))
self.conn.execute(
'INSERT INTO entries (name, value) VALUES (?, ?)',
(key, sqlite3.Binary(value),)
)
self.conn.commit()
def __init__(self, *args, **kwargs):
super(Sqlite3Cache, self).__init__(*args, **kwargs)
self.conn = None
if self.ignore_cache is not True:
self.conn = sqlite3.connect('%s.sq3.cache' % (self.cache_token, ))
self.conn.isolation_level = None
self.conn.execute('PRAGMA journal_mode = WAL')
self.conn.execute('PRAGMA synchronous = NORMAL')
self.conn.isolation_level = 'DEFERRED'
self.conn.execute('CREATE TABLE IF NOT EXISTS entries (name TEXT, value BLOB)')
self.conn.execute('CREATE UNIQUE INDEX IF NOT EXISTS entries_name ON entries (name)')
def __del__(self):
if self.conn:
self.conn.close()
class NotReallyATree(list):
"""
The class is a some-what duck-type compatible (it has a ``query`` method) dumb replacement
for (c)KDTrees. It can be used to find the nearest matching point to a query point.
(And does that by exhaustive search...)
"""
def __init__(self, iterable):
"""
:param iterable: input data
:type iterable: iterable
:return: the queryable object
:rtype: NotReallyAtree
"""
super(NotReallyATree, self).__init__(self)
for i in iterable:
self.append(i)
self.na = np.array(iterable)
def query(self, q): # w_numpy
"""
Finds the point which is nearest to ``q``.
Uses the Euclidean distance.
:param q: query point
:return: distance, index
:rtype: float, int
>>> t = NotReallyATree([[1.0, 1.0], [2.0, 2.0], [3.0, 3.0]])
>>> t.query([1.25, 1.25])
(0.3535533905932738, 0)
>>> t = NotReallyATree([[1.0, 1.0], [2.0, 2.0], [3.0, 3.0]])
>>> t.query([2.3535533905932737622, 2.3535533905932737622])
(0.5000000000000002, 1)
"""
distances = np.sqrt(np.sum(np.power(self.na - q, 2.0), 1))
pos = np.argmin(distances, 0)
return distances[pos], pos
|
molyso/generic/etc.py
|
from __future__ import division, unicode_literals, print_function
import os
import sys
import hashlib
import time
import logging
import sqlite3
import numpy as np
from io import BytesIO
from ..debugging import DebugPlot
try:
import tqdm
except ImportError:
tqdm = None
logger = logging.getLogger(__name__)
def silent_progress_bar(iterable):
"""
Dummy function, just returns an iterator.
:param iterable: the iterable to turn into an iterable
:type iterable: iterable
:return: iterable
:rtype: iterable
>>> next(silent_progress_bar([1, 2, 3]))
1
"""
return iter(iterable)
def fancy_progress_bar(iterable):
"""
Returns in iterator which will show progress as well.
Will either use the tqdm module when available, or a simpler implementation.
:param iterable: the iterable to progress-ify
:type iterable: iterable
:rtype: iterable
:return: progress-ified iterable
"""
if tqdm:
# weird bug: if the threading magic in tqdm is active, multiprocessing in molyso gets stuck!
# should be investigated further, but for now, let us just disable it ...
tqdm.tqdm.monitor_interval = 0
for item in tqdm.tqdm(iterable):
yield item
else:
times = np.zeros(len(iterable), dtype=float)
for n, i in enumerate(iterable):
start_time = time.time()
yield i
stop_time = time.time()
times[n] = stop_time - start_time
eta = " ETA %.2fs" % float(np.mean(times[:n + 1]) * (len(iterable) - (n + 1)))
logger.info("processed %d/%d [took %.3fs%s]" % (n + 1, len(iterable), times[n], eta))
def iter_time(iterable):
"""
Will print the total time elapsed during iteration of ``iterable`` afterwards.
:param iterable: iterable
:type iterable: iterable
:rtype: iterable
:return: iterable
"""
start_time = time.time()
for n in iterable:
yield n
stop_time = time.time()
logger.info("whole step took %.3fs" % (stop_time - start_time,))
_fancy_progress_bar = fancy_progress_bar
def fancy_progress_bar(iterable):
"""
:param iterable:
:return:
"""
return iter_time(_fancy_progress_bar(iterable))
def dummy_progress_indicator():
"""
:return:
"""
return iter(int, 1)
def ignorant_next(iterable):
"""
Will try to iterate to the next value, or return None if none is available.
:param iterable:
:return:
"""
try:
return next(iterable)
except StopIteration:
return None
class QuickTableDumper(object):
"""
:param recipient:
"""
delimiter = '\t'
line_end = '\n'
precision = 8
def __init__(self, recipient=None):
if recipient is None:
recipient = sys.stdout
self.recipient = recipient
self.headers = []
def write_list(self, the_list):
"""
:param the_list:
"""
self.recipient.write(self.delimiter.join(map(self.stringify, the_list)) + self.line_end)
def add(self, row):
"""
:param row:
"""
if len(self.headers) == 0:
self.headers = list(sorted(row.keys()))
self.write_list(self.headers)
self.write_list(row[k] for k in self.headers)
def stringify(self, obj):
"""
:param obj:
:return:
"""
if type(obj) in (float, np.float32, np.float64) and self.precision:
return str(round(obj, self.precision))
else:
return str(obj)
try:
# noinspection PyUnresolvedReferences
import cPickle
pickle = cPickle
except ImportError:
import pickle
try:
import _thread
except ImportError:
import thread as _thread
if os.name != 'nt':
def correct_windows_signal_handlers():
"""
Dummy for non-windows os.
"""
pass
else:
def correct_windows_signal_handlers():
"""
Corrects Windows signal handling, otherwise multiprocessing solutions will not correctly
exit if Ctrl-C is used to interrupt them.
:return:
"""
os.environ['PATH'] += os.path.pathsep + os.path.dirname(os.path.abspath(sys.executable))
try:
# noinspection PyUnresolvedReferences
import win32api
def _handler(_, hook=_thread.interrupt_main):
hook()
return 1
win32api.SetConsoleCtrlHandler(_handler, 1)
except ImportError:
logger.warning("Running on Windows, but module 'win32api' could not be imported to fix signal handler.\n" +
"Ctrl-C might break the program ..." +
"Fix: Install the module!")
def debug_init():
"""
Initialized debug mode, as of now this means that DebugPlot is set to active (it will produce a debug.pdf)
"""
DebugPlot.force_active = True
np.set_printoptions(threshold=sys.maxsize)
def parse_range(s, maximum=0):
"""
:param s:
:param maximum:
:return:
"""
maximum -= 1
splits = s.replace(' ', '').replace(';', ',').split(',')
ranges = []
remove = []
not_values = False
for frag in splits:
if frag[0] == '~':
not_values = not not_values
frag = frag[1:]
if '-' in frag:
f, t = frag.split('-')
interval = 1
if '%' in t:
t, _interval = t.split('%')
interval = int(_interval)
if t == '':
t = maximum
f, t = int(f), int(t)
t = min(t, maximum)
parsed_fragment = range(f, t + 1, interval)
else:
parsed_fragment = [int(frag)]
if not_values:
remove += parsed_fragment
else:
ranges += parsed_fragment
return list(sorted(set(ranges) - set(remove)))
def prettify_numpy_array(arr, space_or_prefix):
"""
Returns a properly indented string representation of a numpy array.
:param arr:
:param space_or_prefix:
:return:
"""
six_spaces = ' ' * 6
prepared = repr(np.array(arr)).replace(')', '').replace('array(', six_spaces)
if isinstance(space_or_prefix, int):
return prepared.replace(six_spaces, ' ' * space_or_prefix)
else:
return space_or_prefix + prepared.replace(six_spaces, ' ' * len(space_or_prefix)).lstrip()
def bits_to_numpy_type(bits):
"""
Returns a numpy.dtype for one of the common image bit-depths:
8 for unsigned int, 16 for unsigned short, 32 for float
:param bits:
:return:
"""
# this is supposed to throw an error
return {
8: np.uint8,
16: np.uint16,
32: np.float32
}[int(bits)]
class BaseCache(object):
"""
A caching class
"""
@staticmethod
def prepare_key(key):
"""
:param key:
:return:
"""
if isinstance(key, type('')):
return key
else:
return repr(key)
@staticmethod
def serialize(data):
"""
:param data:
:return:
"""
try:
bio = BytesIO()
pickle.dump(data, bio, protocol=pickle.HIGHEST_PROTOCOL)
try:
# noinspection PyUnresolvedReferences
pickled_data = bio.getbuffer()
except AttributeError:
pickled_data = bio.getvalue()
except ImportError:
pickled_data = pickle.dumps(data, protocol=pickle.HIGHEST_PROTOCOL)
return pickled_data
@staticmethod
def deserialize(data):
"""
:param data:
:return:
"""
assert data is not None
bio = BytesIO(data)
return pickle.load(bio)
def __init__(self, filename_to_be_hashed, ignore_cache='nothing', cache_token=None):
self.logger = logging.getLogger(__name__ + '.' + self.__class__.__name__)
self.filename_hash_source = filename_to_be_hashed
if cache_token is None:
self.cache_token = "%s.%s" % (
os.path.basename(filename_to_be_hashed).replace('.', '_').replace('?', '_').replace(',', '_'),
hashlib.sha1(str(os.path.abspath(filename_to_be_hashed).lower()).encode()).hexdigest()[:8])
else:
self.cache_token = cache_token
if ignore_cache == 'everything':
self.ignore_cache = True
elif ignore_cache == 'nothing':
self.ignore_cache = []
else:
self.ignore_cache = ignore_cache.split(',')
def contains(self, key):
"""
:param key:
:return:
"""
return False
def get(self, key):
"""
:param key:
:return:
"""
return ''
def set(self, key, value):
"""
:param key:
:param value:
:return:
"""
return
def __contains__(self, key):
if self.ignore_cache is True or key in self.ignore_cache:
return False
else:
try:
self.logger.debug("Checking whether '%s' exists", key)
return self.contains(self.prepare_key(key))
except Exception as e:
self.logger.exception(
"While %s an Exception occurred (but continuing): %",
repr(self.__contains__), repr(e)
)
return False
def __getitem__(self, key):
try:
self.logger.debug("Getting data for '%s'", key)
return self.deserialize(self.get(self.prepare_key(key)))
except Exception as e:
self.logger.exception(
"While %s an Exception occurred (but continuing): %s. Note that this will yield undefined behavior.",
repr(self.__getitem__), repr(e)
)
# this is technically wrong ...
return None
def __setitem__(self, key, value):
if self.ignore_cache is True or key in self.ignore_cache:
return
else:
try:
self.logger.debug("Setting data for '%s'", key)
self.set(self.prepare_key(key), self.serialize(value))
except Exception as e:
self.logger.exception(
"While %s an Exception occurred (but continuing): %s",
repr(self.__setitem__), repr(e)
)
class FileCache(BaseCache):
"""
A caching class which stores the data in flat files.
"""
def build_cache_filename(self, suffix):
"""
:param suffix:
:return:
"""
return "%s.%s.cache" % (self.cache_token, suffix,)
def contains(self, key):
"""
:param key:
:return:
"""
return os.path.isfile(self.build_cache_filename(key))
def get(self, key):
"""
:param key:
:return:
"""
with open(self.build_cache_filename(key), 'rb') as fp:
return fp.read(os.path.getsize(self.build_cache_filename(key)))
def set(self, key, value):
"""
:param key:
:param value:
"""
with open(self.build_cache_filename(key), 'wb+') as fp:
fp.write(value)
Cache = FileCache
class Sqlite3Cache(BaseCache):
"""
A caching class which stores the data in a sqlite3 database.
"""
def contains(self, key):
"""
:param key:
:return:
"""
result = self.conn.execute('SELECT COUNT(*) FROM entries WHERE name = ?', (key,))
for row in result:
return row[0] == 1
return False
def get(self, key):
"""
:param key:
:return:
"""
result = self.conn.execute('SELECT value FROM entries WHERE name = ?', (key,))
for row in result:
return row[0]
def keys(self):
"""
:return:
"""
result = self.conn.execute('SELECT name FROM entries')
return [row[0] for row in result]
def set(self, key, value):
"""
:param key:
:param value:
"""
self.conn.execute('DELETE FROM entries WHERE name = ?', (key,))
self.conn.execute(
'INSERT INTO entries (name, value) VALUES (?, ?)',
(key, sqlite3.Binary(value),)
)
self.conn.commit()
def __init__(self, *args, **kwargs):
super(Sqlite3Cache, self).__init__(*args, **kwargs)
self.conn = None
if self.ignore_cache is not True:
self.conn = sqlite3.connect('%s.sq3.cache' % (self.cache_token, ))
self.conn.isolation_level = None
self.conn.execute('PRAGMA journal_mode = WAL')
self.conn.execute('PRAGMA synchronous = NORMAL')
self.conn.isolation_level = 'DEFERRED'
self.conn.execute('CREATE TABLE IF NOT EXISTS entries (name TEXT, value BLOB)')
self.conn.execute('CREATE UNIQUE INDEX IF NOT EXISTS entries_name ON entries (name)')
def __del__(self):
if self.conn:
self.conn.close()
class NotReallyATree(list):
"""
The class is a some-what duck-type compatible (it has a ``query`` method) dumb replacement
for (c)KDTrees. It can be used to find the nearest matching point to a query point.
(And does that by exhaustive search...)
"""
def __init__(self, iterable):
"""
:param iterable: input data
:type iterable: iterable
:return: the queryable object
:rtype: NotReallyAtree
"""
super(NotReallyATree, self).__init__(self)
for i in iterable:
self.append(i)
self.na = np.array(iterable)
def query(self, q): # w_numpy
"""
Finds the point which is nearest to ``q``.
Uses the Euclidean distance.
:param q: query point
:return: distance, index
:rtype: float, int
>>> t = NotReallyATree([[1.0, 1.0], [2.0, 2.0], [3.0, 3.0]])
>>> t.query([1.25, 1.25])
(0.3535533905932738, 0)
>>> t = NotReallyATree([[1.0, 1.0], [2.0, 2.0], [3.0, 3.0]])
>>> t.query([2.3535533905932737622, 2.3535533905932737622])
(0.5000000000000002, 1)
"""
distances = np.sqrt(np.sum(np.power(self.na - q, 2.0), 1))
pos = np.argmin(distances, 0)
return distances[pos], pos
| 0.540681 | 0.336604 |
def countComponents1(n: int, edges: list[list[int]]) -> int:
"""
quick find based implemenation
Args:
n (int): number of nodes
edges (list[list[int]]): list of edges
Returns:
int: number of connected components
"""
connections = [n for n in range(n)]
for edge in edges:
left, right = edge[0], edge[1]
if connections[left] != connections[right]:
old_group = connections[right]
connections[right] = connections[left]
for index in range(n):
if connections[index] == old_group:
connections[index] = connections[left]
return len(set(connections))
def countComponents2(n: int, edges: list[list[int]]) -> int:
"""
quick union based implementation
Args:
n (int): number of nodes
edges (list[list[int]]): list of edges
Returns:
int: number of connected components
"""
connections = [n for n in range(n)]
def get_parent(node):
while node != connections[node]:
node = connections[node]
return node
for edge in edges:
u, v = get_parent(edge[0]), get_parent(edge[1])
connections[u] = connections[v]
components = set([get_parent(n) for n in range(n)])
return len(components)
def countComponents3(n: int, edges: list[list[int]]) -> int:
"""
union by rank based implementation
Args:
n (int): [description]
edges (list[list[int]]): [description]
Returns:
int: number of connected components
"""
connections = [idx for idx in range(n)]
rank = [1] * n
def get_group(node : int):
while node != connections[node]:
node = connections[node]
return node
def connect(u, v) -> None:
gu, gv = get_group(u), get_group(v)
if gu != gv:
if rank[gu] > rank[gv]:
connections[gv] = gu
elif rank[gv] > rank[gu]:
connections[gv] = gu
else:
connections[gv] = gu
rank[v] = rank[v] + 1
for edge in edges:
u, v = edge[0], edge[1]
connect(u, v)
components = set()
for node in range(n):
components.add(get_group(node))
return len(components)
def countComponents4(n: int, edges: list[list[int]]) -> int:
connections = [idx for idx in range(n)]
rank = [1] * n
def get_parent(node : int) -> int:
if node == connections[node]:
return node
connections[node] = get_parent(connections[node])
return connections[node]
def connect(u, v) -> None:
pu, pv = get_parent(u), get_parent(v)
if pu != pv:
if rank[pu] > rank[pv]:
connections[pv] = pu
elif rank[pu] < rank[pv]:
connections[pu] = pv
else:
connections[pv] = pu
rank[pv] = rank[pv] + 1
for edge in edges:
u, v = edge[0], edge[1]
connect(u, v)
components = set()
for node in range(n):
components.add(get_parent(node))
return len(components)
def countComponents5(n: int, edges: list[list[int]]) -> int:
from collections import defaultdict
from queue import Queue
graph = defaultdict(list)
for edge in edges:
u, v = edge[0], edge[1]
graph[u].append(v)
graph[v].append(u)
seen = set()
def bfs(node : int):
path, queue = [], Queue()
queue.put(node)
while not queue.empty():
v = queue.get()
if v not in seen:
path.append(v)
seen.add(v)
for u in graph[v]:
if u not in seen:
queue.put(u)
return path
components = []
for node in range(n):
if node not in seen:
path = bfs(node)
components.append(len(path))
return len(components)
def countComponents6(n: int, edges: list[list[int]]) -> int:
from collections import defaultdict
graph = defaultdict(list)
for edge in edges:
u, v = edge[0], edge[1]
graph[u].append(v)
graph[v].append(u)
seen = set()
def dfs(node : int):
path, stack = [], [node]
while stack:
v = stack.pop()
for u in graph[v]:
if u not in seen:
seen.add(u)
stack.append(u)
path.append(v)
seen.add(v)
return path
components = []
for node in range(n):
if node not in seen:
path = dfs(node)
components.append(len(path))
return len(components)
|
py/dcp/leetcode/graph/connected_components.py
|
def countComponents1(n: int, edges: list[list[int]]) -> int:
"""
quick find based implemenation
Args:
n (int): number of nodes
edges (list[list[int]]): list of edges
Returns:
int: number of connected components
"""
connections = [n for n in range(n)]
for edge in edges:
left, right = edge[0], edge[1]
if connections[left] != connections[right]:
old_group = connections[right]
connections[right] = connections[left]
for index in range(n):
if connections[index] == old_group:
connections[index] = connections[left]
return len(set(connections))
def countComponents2(n: int, edges: list[list[int]]) -> int:
"""
quick union based implementation
Args:
n (int): number of nodes
edges (list[list[int]]): list of edges
Returns:
int: number of connected components
"""
connections = [n for n in range(n)]
def get_parent(node):
while node != connections[node]:
node = connections[node]
return node
for edge in edges:
u, v = get_parent(edge[0]), get_parent(edge[1])
connections[u] = connections[v]
components = set([get_parent(n) for n in range(n)])
return len(components)
def countComponents3(n: int, edges: list[list[int]]) -> int:
"""
union by rank based implementation
Args:
n (int): [description]
edges (list[list[int]]): [description]
Returns:
int: number of connected components
"""
connections = [idx for idx in range(n)]
rank = [1] * n
def get_group(node : int):
while node != connections[node]:
node = connections[node]
return node
def connect(u, v) -> None:
gu, gv = get_group(u), get_group(v)
if gu != gv:
if rank[gu] > rank[gv]:
connections[gv] = gu
elif rank[gv] > rank[gu]:
connections[gv] = gu
else:
connections[gv] = gu
rank[v] = rank[v] + 1
for edge in edges:
u, v = edge[0], edge[1]
connect(u, v)
components = set()
for node in range(n):
components.add(get_group(node))
return len(components)
def countComponents4(n: int, edges: list[list[int]]) -> int:
connections = [idx for idx in range(n)]
rank = [1] * n
def get_parent(node : int) -> int:
if node == connections[node]:
return node
connections[node] = get_parent(connections[node])
return connections[node]
def connect(u, v) -> None:
pu, pv = get_parent(u), get_parent(v)
if pu != pv:
if rank[pu] > rank[pv]:
connections[pv] = pu
elif rank[pu] < rank[pv]:
connections[pu] = pv
else:
connections[pv] = pu
rank[pv] = rank[pv] + 1
for edge in edges:
u, v = edge[0], edge[1]
connect(u, v)
components = set()
for node in range(n):
components.add(get_parent(node))
return len(components)
def countComponents5(n: int, edges: list[list[int]]) -> int:
from collections import defaultdict
from queue import Queue
graph = defaultdict(list)
for edge in edges:
u, v = edge[0], edge[1]
graph[u].append(v)
graph[v].append(u)
seen = set()
def bfs(node : int):
path, queue = [], Queue()
queue.put(node)
while not queue.empty():
v = queue.get()
if v not in seen:
path.append(v)
seen.add(v)
for u in graph[v]:
if u not in seen:
queue.put(u)
return path
components = []
for node in range(n):
if node not in seen:
path = bfs(node)
components.append(len(path))
return len(components)
def countComponents6(n: int, edges: list[list[int]]) -> int:
from collections import defaultdict
graph = defaultdict(list)
for edge in edges:
u, v = edge[0], edge[1]
graph[u].append(v)
graph[v].append(u)
seen = set()
def dfs(node : int):
path, stack = [], [node]
while stack:
v = stack.pop()
for u in graph[v]:
if u not in seen:
seen.add(u)
stack.append(u)
path.append(v)
seen.add(v)
return path
components = []
for node in range(n):
if node not in seen:
path = dfs(node)
components.append(len(path))
return len(components)
| 0.796134 | 0.579876 |
from pydantic import BaseModel, IPvAnyAddress, Field, validator
from socialserver.constants import MAX_PIXEL_RATIO
from typing import Literal, Optional
class _ServerConfigNetwork(BaseModel):
host: IPvAnyAddress
# 1-65535 is the valid TCP port range, hence the limit.
port: int = Field(..., ge=1, le=65535)
class _ServerConfigMisc(BaseModel):
enable_landing_page: bool
class _ServerConfigDatabase(BaseModel):
# these are optional depending on the connector,
# handled by the connection_validation validator below.
filename: Optional[str]
username: Optional[str]
password: Optional[str]
database_name: Optional[str]
host: Optional[str]
connector: Literal["sqlite", "postgres"]
@validator("connector")
def connector_validation(cls, value, values):
if value == "sqlite":
filename = values.get("filename")
assert filename not in [
None,
"",
], "filename required when using the sqlite connector"
if value == "postgres":
required_values = ["username", "password", "database_name", "host"]
for reqd_value in required_values:
assert (
values.get(reqd_value) is not None
), "username, password, filename, database_name, host required when using the postgres connector"
return value
class _ServerConfigMediaImages(BaseModel):
quality: int = Field(..., ge=1, le=100)
post_quality: int = Field(..., ge=1, le=100)
storage_dir: str
# max size cannot be negative. god knows what would happen if it was.
# probably not much. but you definitely wouldn't be uploading any images.
max_image_request_size_mb: float = Field(..., ge=0)
class _ServerConfigMediaVideos(BaseModel):
storage_dir: str
class _ServerConfigMedia(BaseModel):
images: _ServerConfigMediaImages
videos: _ServerConfigMediaVideos
class _ServerConfigAuthRegistration(BaseModel):
enabled: bool
approval_required: bool
auto_approve_when_approval_disabled: bool
class _ServerConfigAuthTotp(BaseModel):
replay_prevention_enabled: bool
issuer: str
# it makes no sense for a time in the future to be < 0,
# and would just cause issues.
unconfirmed_expiry_time: int = Field(..., ge=0)
class _ServerConfigAuth(BaseModel):
registration: _ServerConfigAuthRegistration
totp: _ServerConfigAuthTotp
class _ServerConfigPosts(BaseModel):
silent_fail_on_double_report: bool
class _ServerConfigLegacyApiInterface(BaseModel):
enable: bool
image_pixel_ratio: int = Field(..., ge=0, le=MAX_PIXEL_RATIO)
signup_enabled: bool
deliver_full_post_images: bool
report_legacy_version: bool
enable_less_secure_password_change: bool
provide_legacy_video_thumbnails: bool
provide_incompatible_video_thumbnail_text_overlay: bool
class ServerConfig(BaseModel):
network: _ServerConfigNetwork
misc: _ServerConfigMisc
database: _ServerConfigDatabase
media: _ServerConfigMedia
auth: _ServerConfigAuth
posts: _ServerConfigPosts
legacy_api_interface: _ServerConfigLegacyApiInterface
|
socialserver/resources/config/schema.py
|
from pydantic import BaseModel, IPvAnyAddress, Field, validator
from socialserver.constants import MAX_PIXEL_RATIO
from typing import Literal, Optional
class _ServerConfigNetwork(BaseModel):
host: IPvAnyAddress
# 1-65535 is the valid TCP port range, hence the limit.
port: int = Field(..., ge=1, le=65535)
class _ServerConfigMisc(BaseModel):
enable_landing_page: bool
class _ServerConfigDatabase(BaseModel):
# these are optional depending on the connector,
# handled by the connection_validation validator below.
filename: Optional[str]
username: Optional[str]
password: Optional[str]
database_name: Optional[str]
host: Optional[str]
connector: Literal["sqlite", "postgres"]
@validator("connector")
def connector_validation(cls, value, values):
if value == "sqlite":
filename = values.get("filename")
assert filename not in [
None,
"",
], "filename required when using the sqlite connector"
if value == "postgres":
required_values = ["username", "password", "database_name", "host"]
for reqd_value in required_values:
assert (
values.get(reqd_value) is not None
), "username, password, filename, database_name, host required when using the postgres connector"
return value
class _ServerConfigMediaImages(BaseModel):
quality: int = Field(..., ge=1, le=100)
post_quality: int = Field(..., ge=1, le=100)
storage_dir: str
# max size cannot be negative. god knows what would happen if it was.
# probably not much. but you definitely wouldn't be uploading any images.
max_image_request_size_mb: float = Field(..., ge=0)
class _ServerConfigMediaVideos(BaseModel):
storage_dir: str
class _ServerConfigMedia(BaseModel):
images: _ServerConfigMediaImages
videos: _ServerConfigMediaVideos
class _ServerConfigAuthRegistration(BaseModel):
enabled: bool
approval_required: bool
auto_approve_when_approval_disabled: bool
class _ServerConfigAuthTotp(BaseModel):
replay_prevention_enabled: bool
issuer: str
# it makes no sense for a time in the future to be < 0,
# and would just cause issues.
unconfirmed_expiry_time: int = Field(..., ge=0)
class _ServerConfigAuth(BaseModel):
registration: _ServerConfigAuthRegistration
totp: _ServerConfigAuthTotp
class _ServerConfigPosts(BaseModel):
silent_fail_on_double_report: bool
class _ServerConfigLegacyApiInterface(BaseModel):
enable: bool
image_pixel_ratio: int = Field(..., ge=0, le=MAX_PIXEL_RATIO)
signup_enabled: bool
deliver_full_post_images: bool
report_legacy_version: bool
enable_less_secure_password_change: bool
provide_legacy_video_thumbnails: bool
provide_incompatible_video_thumbnail_text_overlay: bool
class ServerConfig(BaseModel):
network: _ServerConfigNetwork
misc: _ServerConfigMisc
database: _ServerConfigDatabase
media: _ServerConfigMedia
auth: _ServerConfigAuth
posts: _ServerConfigPosts
legacy_api_interface: _ServerConfigLegacyApiInterface
| 0.818519 | 0.282425 |
import os
import sys
import codecs
import re
import sem.importers
from sem.storage import Document, SEMCorpus, Annotation
from sem.exporters import BratExporter
lang2months = { # firt element is empty so index method returns values from 1 to 12
u"fr": [u"", u"janvier", u"février", u"mars", u"avril", u"mai", u"juin", u"juillet", u"août", u"septembre", u"octobre", u"novembre", u"décembre"],
u"en": [u"", u"january", u"febuary", u"march", u"april", u"may", u"june", u"july", u"august", u"september", u"october", u"november", u"december"]
}
def main(infilename,
outdir=u".", lang="fr"):
months = lang2months[lang]
try:
infilename = infilename.decode(sys.getfilesystemencoding())
except:
pass
numbers = re.compile("([0-9]+)", re.U + re.I)
corpus = SEMCorpus.from_xml(infilename)
link_filename = os.path.join(os.path.dirname(infilename), os.path.basename(infilename)[:7] + "-urls.txt")
with codecs.open(link_filename, "rU", "utf-8") as link_file:
l = [line.strip() for line in link_file if line.strip()]
documents = corpus.documents
documents.sort(key=lambda x: l.index(x.name))
try:
os.makedirs(outdir)
except:
pass
couples = {u"NER": u"NER"}
exporter = BratExporter()
prev_timestamp = u""
nth_timestamp = 1
with codecs.open(os.path.join(outdir, "%s" %(os.path.basename(link_filename))), "w", "utf-8") as O:
for nth, document in enumerate(documents, 1):
dates = [annotation for annotation in document.annotation("NER") if annotation.value == "Date"]
dates = [date for date in dates if len(document.content[date.lb : date.ub].strip().split()) == 3]
try:
parts = document.content[dates[0].lb : dates[0].ub].split()
parts[0] = int(numbers.findall(parts[0])[0])
except:
parts = document.content[dates[1].lb : dates[1].ub].split()
parts[0] = int(numbers.findall(parts[0])[0])
parts[1] = months.index(parts[1].lower())
parts[2] = int(parts[2])
timestamp = u"%04i_%02i_%02i" %(parts[2], parts[1], parts[0])
if timestamp == prev_timestamp:
nth_timestamp += 1
else:
nth_timestamp = 1
prev_timestamp = timestamp
docname = u"%s-%03i" %(timestamp, nth_timestamp)
O.write("%s\t%s\n" %(docname, document.name))
actual_outdir = os.path.join(outdir, str(parts[2]), u"%02i" %parts[1])
try:
os.makedirs(actual_outdir)
except:
pass
with codecs.open(os.path.join(actual_outdir, docname + ".txt"), "w", "utf-8") as txt:
txt.write(document.content)
with codecs.open(os.path.join(actual_outdir, docname + ".ann"), "w", "utf-8") as ann:
ann.write(exporter.document_to_unicode(document, couples))
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser("...")
parser.add_argument("infilename",
help="the input file (SEM XML)")
parser.add_argument("-o", "--outdir", default=".",
help="the output directory (default: %(default)s)")
parser.add_argument("-l", "--lang", default="fr",
help="the language for months (default: %(default)s)")
args = parser.parse_args()
main(args.infilename,
outdir=args.outdir)
sys.exit(0)
|
make_matches.py
|
import os
import sys
import codecs
import re
import sem.importers
from sem.storage import Document, SEMCorpus, Annotation
from sem.exporters import BratExporter
lang2months = { # firt element is empty so index method returns values from 1 to 12
u"fr": [u"", u"janvier", u"février", u"mars", u"avril", u"mai", u"juin", u"juillet", u"août", u"septembre", u"octobre", u"novembre", u"décembre"],
u"en": [u"", u"january", u"febuary", u"march", u"april", u"may", u"june", u"july", u"august", u"september", u"october", u"november", u"december"]
}
def main(infilename,
outdir=u".", lang="fr"):
months = lang2months[lang]
try:
infilename = infilename.decode(sys.getfilesystemencoding())
except:
pass
numbers = re.compile("([0-9]+)", re.U + re.I)
corpus = SEMCorpus.from_xml(infilename)
link_filename = os.path.join(os.path.dirname(infilename), os.path.basename(infilename)[:7] + "-urls.txt")
with codecs.open(link_filename, "rU", "utf-8") as link_file:
l = [line.strip() for line in link_file if line.strip()]
documents = corpus.documents
documents.sort(key=lambda x: l.index(x.name))
try:
os.makedirs(outdir)
except:
pass
couples = {u"NER": u"NER"}
exporter = BratExporter()
prev_timestamp = u""
nth_timestamp = 1
with codecs.open(os.path.join(outdir, "%s" %(os.path.basename(link_filename))), "w", "utf-8") as O:
for nth, document in enumerate(documents, 1):
dates = [annotation for annotation in document.annotation("NER") if annotation.value == "Date"]
dates = [date for date in dates if len(document.content[date.lb : date.ub].strip().split()) == 3]
try:
parts = document.content[dates[0].lb : dates[0].ub].split()
parts[0] = int(numbers.findall(parts[0])[0])
except:
parts = document.content[dates[1].lb : dates[1].ub].split()
parts[0] = int(numbers.findall(parts[0])[0])
parts[1] = months.index(parts[1].lower())
parts[2] = int(parts[2])
timestamp = u"%04i_%02i_%02i" %(parts[2], parts[1], parts[0])
if timestamp == prev_timestamp:
nth_timestamp += 1
else:
nth_timestamp = 1
prev_timestamp = timestamp
docname = u"%s-%03i" %(timestamp, nth_timestamp)
O.write("%s\t%s\n" %(docname, document.name))
actual_outdir = os.path.join(outdir, str(parts[2]), u"%02i" %parts[1])
try:
os.makedirs(actual_outdir)
except:
pass
with codecs.open(os.path.join(actual_outdir, docname + ".txt"), "w", "utf-8") as txt:
txt.write(document.content)
with codecs.open(os.path.join(actual_outdir, docname + ".ann"), "w", "utf-8") as ann:
ann.write(exporter.document_to_unicode(document, couples))
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser("...")
parser.add_argument("infilename",
help="the input file (SEM XML)")
parser.add_argument("-o", "--outdir", default=".",
help="the output directory (default: %(default)s)")
parser.add_argument("-l", "--lang", default="fr",
help="the language for months (default: %(default)s)")
args = parser.parse_args()
main(args.infilename,
outdir=args.outdir)
sys.exit(0)
| 0.143938 | 0.201263 |
from typing import Any, Dict, List, Optional, Tuple, Union
import numpy as np
import pandas as pd
import tensorflow as tf
import tensorflow_probability as tfp
tfd = tfp.distributions
tfb = tfp.bijectors
# K Local Level Prior Sample Size
# This is equal to the original [R package](https://github.com/google/CausalImpact/blob/07b60e1bf5c9c8d74e31ea602db39d7256a53b6f/R/impact_model.R#L25) # noqa: E501
kLocalLevelPriorSampleSize = 32
def process_model_args(model_args: Dict[str, Any]) -> Dict[str, Any]:
"""
Process general parameters related to how Causal Impact will be implemented, such
as standardization procedure or the addition of seasonal components to the model.
Args
----
model_args:
niter: int
How many iterations to run either for `hmc` or `vi` algorithms.
standardize: bool
If `True`, standardize data so result has zero mean and unitary standard
deviation.
prior_level_sd: float
Standard deviation that sets initial local level distribution. Default
value is 0.01 which means the linear regression is expected to explain
well the observed data. In cases where this is not expected, then it's also
possible to use the value 0.1. Still, this value will increase considerably
the extension of the random walk variance modeling data which can lead to
unreliable predictions (this might indicate that better covariates are
required).
fit_method: str
Which method to use when fitting the structural time series model. Can be
either `hmc` which stands for "Hamiltonian Monte Carlo" or "vi", i.e.,
"variational inference". The first is slower but more accurate whilst the
latter is the opposite. Defaults to `vi` which prioritizes performance.
nseasons: int
Specifies the duration of the period of the seasonal component; if input
data is specified in terms of days, then choosing nseasons=7 adds a weekly
seasonal effect.
season_duration: int
Specifies how many data points each value in season spans over. A good
example to understand this argument is to consider a hourly data as input.
For modeling a weekly season on this data, one can specify `nseasons=7` and
season_duration=24 which means each value that builds the season component
is repeated for 24 data points. Default value is 1 which means the season
component spans over just 1 point (this in practice doesn't change
anything). If this value is specified and bigger than 1 then `nseasons`
must be specified and bigger than 1 as well.
Returns
-------
Dict[str, Any]
standardize: bool
prior_level_sd: float
niter: int
fit_method: str
nseasons: int
season_duration: int
Raises
------
ValueError: if `standardize` is not of type `bool`.
if `prior_level_sd` is not `float`.
if `niter` is not `int`.
if `fit_method` not in {'hmc', 'vi'}.
if `nseasons` is not `int`.
if `season_duration` is not `int`.
if `season_duration` is bigger than 1 and `nseasons` is 1.
"""
standardize = model_args.get('standardize', True)
if not isinstance(standardize, bool):
raise ValueError('standardize argument must be of type bool.')
model_args['standardize'] = standardize
prior_level_sd = model_args.get('prior_level_sd', 0.01)
if not isinstance(prior_level_sd, float):
raise ValueError('prior_level_sd argument must be of type float.')
model_args['prior_level_sd'] = prior_level_sd
niter = model_args.get('niter', 1000)
if not isinstance(niter, int):
raise ValueError('niter argument must be of type int.')
model_args['niter'] = niter
fit_method = model_args.get('fit_method', 'vi')
if fit_method not in {'hmc', 'vi'}:
raise ValueError('fit_method can be either "hmc" or "vi".')
model_args['fit_method'] = fit_method
nseasons = model_args.get('nseasons', 1)
if not isinstance(nseasons, int):
raise ValueError('nseasons argument must be of type int.')
model_args['nseasons'] = nseasons
season_duration = model_args.get('season_duration', 1)
if not isinstance(season_duration, int):
raise ValueError('season_duration argument must be of type int.')
if nseasons <= 1 and season_duration > 1:
raise ValueError('nseasons must be bigger than 1 when season_duration is also '
'bigger than 1.')
model_args['season_duration'] = season_duration
return model_args
def check_input_model(
model: tfp.sts.StructuralTimeSeries,
pre_data: pd.DataFrame,
post_data: pd.DataFrame
) -> None:
"""
Checkes whether input model was properly built and is ready to be run. This function
is only invoked if the client sent a customized input model. Various assertions are
performed to guarantee it has been created appropriately, such as each component
should have `len(pre_data)` points for the argument `observed_time_series`. In case
the component is of type `tfp.sts.LinearRegression` or `SparseLinearRegression` then
the design matrix must have
`shape = (len(pre_data) + len(post_data), cols(pre_data) - 1)` which allows not only
to fit the model as well as to run the forecasts.
The model must be built with data of dtype=tf.float32 or np.float32 as otherwise an
error will be thrown when fitting the markov chains.
Args
----
model: StructuralTimeSeries
Can be either default `LocalLevel` or user specified generic model.
pre_data: pd.DataFrame
Raises
------
ValueError: if model is not of appropriate type.
if model is built without appropriate observed time series data.
if model components don't have dtype=tf.float32 or np.float32
"""
def _check_component(component):
if isinstance(
component,
(tfp.sts.LinearRegression, tfp.sts.SparseLinearRegression)
):
covariates_shape = (len(pre_data) + len(post_data),
len(pre_data.columns) - 1)
if component.design_matrix.shape != covariates_shape:
raise ValueError(
'Customized Linear Regression Models must have total '
'points equal to pre_data and post_data points and '
'same number of covariates. Input design_matrix shape was '
f'{component.design_matrix.shape} and expected '
f'{(len(pre_data) + len(post_data), len(pre_data.columns) -1)} '
'instead.'
)
assert component.design_matrix.dtype == tf.float32
else:
for parameter in component.parameters:
assert parameter.prior.dtype == tf.float32
if not isinstance(model, tfp.sts.StructuralTimeSeries):
raise ValueError('Input model must be of type StructuralTimeSeries.')
if isinstance(model, tfp.sts.Sum):
for component in model.components:
_check_component(component)
else:
_check_component(model)
def build_inv_gamma_sd_prior(sigma_guess: float) -> tfd.Distribution:
"""
helper function to build the sd_prior distribution for standard deviation
modeling.
Args
----
sigma_guess: float
Initial guess of where the standard deviation of the parameter is located.
Returns
-------
tfd.Distribution
InverseGamma distribution modeling the standard deviation.
"""
sample_size = kLocalLevelPriorSampleSize
df = sample_size
a = np.float32(df / 2)
ss = sample_size * sigma_guess ** 2
b = np.float32(ss / 2)
return tfd.InverseGamma(a, b)
def build_bijector(dist: tfd.Distribution) -> tfd.Distribution:
"""
helper function for building final bijector given sd_prior. The bijector is
implemented through the `tfd.TransformedDistribution` class.
Args
----
dist: tfd.Distribution
Distribution to receive the transformation `G(X)`.
Returns
-------
new_dist: tfd.Distribution
New distribution given by `y = G(X)`.
"""
bijector = SquareRootBijector()
new_dist = tfd.TransformedDistribution(dist, bijector)
return new_dist
def build_default_model(
observed_time_series: pd.DataFrame,
pre_data: pd.DataFrame,
post_data: pd.DataFrame,
prior_level_sd: float,
nseasons: int,
season_duration: int
) -> tfp.sts.StructuralTimeSeries:
"""
When input model is `None` then proceeds to build a default `tfp.sts.LocalLevel`
model. If input data has covariates then also adds a `tfp.sts.SparseLinearRegression`
component.
The level_prior follows `1 / prior_level_sd ** 2 ~ Gamma(a, b)` according to
the original [BOOM](https://github.com/steve-the-bayesian/BOOM/blob/63f08a708153c8405b809405fa1ab5ed7193d648/Interfaces/python/R/R/bayes.py#L4:L12) package. # noqa: E501
This is achieved by using the InverseGamma(a, b) and a [bijector](https://tiao.io/post/building-probability-distributions-with-tensorflow-probability-bijector-api/) # noqa: E501
transformation for the square root operator.
As for the linear regressor, the `tfp.sts.SparseLinearRegression` operation is similar
to the spike-and-slab from the original R package; main difference is that it follows
instead a horseshoe distribution which tends to penalize less the meaningful weights
in the shrinking process.[https://github.com/tensorflow/probability/blob/v0.12.1/tensorflow_probability/python/sts/regression.py#L265-L523] # noaq: E501
Args
----
observed_time_series: pd.DataFrame
pre_data: pd.DataFrame
post_data: pd.DataFrame
prior_level_sd: float
Sets an initial estimation for the standard deviation 'sigma' of the local
level prior. The bigger this value is, the wider is expected to be the random
walk extension on forecasts.
nseasons: int
season_duration: int
Returns
-------
model: tfp.sts.Sum
A `tfp.sts.LocalLevel` default model with possible another
`tfp.sts.SparseLinearRegression` and `tfp.sts.Seasonal` components representing
covariates and seasonal patterns.
"""
components = []
# use `values` to avoid batching dims
obs_sd = observed_time_series.std(skipna=True, ddof=0).values[0]
sd_prior = build_inv_gamma_sd_prior(prior_level_sd)
sd_prior = build_bijector(sd_prior)
# This is an approximation to simulate the bsts package from R. It's expected that
# given a few data points the posterior will converge appropriately given this
# distribution, that's why it's divided by 2.
obs_prior = build_inv_gamma_sd_prior(obs_sd / 2)
obs_prior = build_bijector(obs_prior)
level_component = tfp.sts.LocalLevel(
level_scale_prior=sd_prior,
observed_time_series=observed_time_series
)
components.append(level_component)
# If it has more than 1 column then it has covariates X so add a linear regressor
# component.
if len(pre_data.columns) > 1:
# We need to concatenate both pre and post data as this will allow the linear
# regressor component to use the post data when running forecasts. As first
# column is supposed to have response variable `y` then we filter out just the
# remaining columns for the `X` value.
complete_data = pd.concat([pre_data, post_data]).astype(np.float32)
# Set NaN values to zero so to not break TFP linear regression
complete_data.fillna(0, inplace=True)
linear_component = tfp.sts.SparseLinearRegression(
design_matrix=complete_data.iloc[:, 1:]
)
components.append(linear_component)
if nseasons > 1:
seasonal_component = tfp.sts.Seasonal(
num_seasons=nseasons,
num_steps_per_season=season_duration,
observed_time_series=observed_time_series
)
components.append(seasonal_component)
# Model must be built with `tfp.sts.Sum` so to add the observed noise `epsilon`
# parameter.
model = tfp.sts.Sum(components, observed_time_series=observed_time_series,
observation_noise_scale_prior=obs_prior)
return model
def fit_model(
model: tfp.sts.StructuralTimeSeries,
observed_time_series: pd.DataFrame,
method: str = 'hmc'
) -> Tuple[Union[List[tf.Tensor], Dict[str, tf.Tensor]], Optional[Dict[str, Any]]]:
"""
Run the Markovian Monte Carlo fitting process for finding the posterior `P(z | y)`
where z represents the structural components of the input state space model. Two
main methods can be used, either `hmc` which stands for 'Hamiltonian Monte Carlo'
and `vi` standing for 'Variational Inference'. The first method is expected to be
more accurate while less performante whereas the second is the opposite, that is,
faster but less accurate.
Args
----
model: tfp.sts.StructuralTimeSeries
Structural time series model built to explain the observed data. It may
contain several components such as local level, seasons and so on.
observed_time_series: pd.DataFrame
Contains the pre-period response variable `y`.
method: str
Either 'hmc' or 'vi' which selects which fitting process to run.
Returns
-------
(samples, kernel_results): Tuple[Union[List[tf.Tensor], Dict[str, tf.Tensor]],
Dict[str, Any]]
Raises
------
ValueError: If input method is invalid.
"""
if method == 'hmc':
# this method does not need to be wrapped in a `tf.function` context as the
# internal sampling method already is:
# https://github.com/tensorflow/probability/blob/v0.11.1/tensorflow_probability/python/sts/fitting.py#L422 # noqa: E501
# https://github.com/tensorflow/probability/issues/348
samples, kernel_results = tfp.sts.fit_with_hmc(
model=model,
observed_time_series=observed_time_series,
)
return samples, kernel_results
elif method == 'vi':
optimizer = tf.optimizers.Adam(learning_rate=0.1)
variational_steps = 200 # Hardcoded for now
variational_posteriors = tfp.sts.build_factored_surrogate_posterior(model=model)
@tf.function()
def _run_vi(): # pragma: no cover
tfp.vi.fit_surrogate_posterior(
target_log_prob_fn=model.joint_log_prob(
observed_time_series=observed_time_series
),
surrogate_posterior=variational_posteriors,
optimizer=optimizer,
num_steps=variational_steps
)
# Don't sample too much as varitional inference method is built aiming for
# performance first.
samples = variational_posteriors.sample(100)
return samples, None
return _run_vi()
else:
raise ValueError(
f'Input method "{method}" not valid. Choose between "hmc" or "vi".'
)
def build_one_step_dist(
model: tfp.sts.StructuralTimeSeries,
observed_time_series: pd.DataFrame,
parameter_samples: Union[List[tfd.Distribution], Dict[str, tfd.Distribution]]
) -> tfd.Distribution: # pragma: no cover
"""
Builds one step distribution for pre-intervention data given samples from the
posterior `P(z | y)`.
Args
----
model: tfp.StructuralTimeSeries
observed_time_series: pd.DataFrame
Corresponds to the `y` value.
parameter_samples: Union[List[tfd.Distribution], Dict[str, tfd.Distribution]]
samples from the posterior for each state component in `model`.
Returns
-------
one_step_dist: tfd.Distribution
"""
return tfp.sts.one_step_predictive(
model=model,
observed_time_series=observed_time_series,
parameter_samples=parameter_samples
)
def build_posterior_dist(
model: tfp.sts.StructuralTimeSeries,
observed_time_series: pd.DataFrame,
parameter_samples: Union[List[tfd.Distribution], Dict[str, tfd.Distribution]],
num_steps_forecast: int
) -> tfd.Distribution: # pragma: no cover
"""
Builds the distribution for post-intervention data given samples from the
posterior `P(z | y)`.
Args
----
model: tfp.StructuralTimeSeries
observed_time_series: pd.DataFrame
Corresponds to the `y` value.
parameter_samples: Union[List[tfd.Distribution], Dict[str, tfd.Distribution]]
samples from the posterior for each state component in `model`.
num_steps_forecast: int
How many time steps to forecast into the future. These will be compared against
the real value of `y` to extract the estimation of impact.
Returns
-------
posterior_dist: tfd.Distribution
"""
return tfp.sts.forecast(
model=model,
observed_time_series=observed_time_series,
parameter_samples=parameter_samples,
num_steps_forecast=num_steps_forecast
)
class SquareRootBijector(tfb.Bijector):
"""
Compute `Y = g(X) = X ** (1 / 2) which transforms variance into standard deviation.
Main reference for building this bijector is the original [PowerTransform](https://github.com/tensorflow/probability/blob/v0.11.1/tensorflow_probability/python/bijectors/power_transform.py) # noqa: E501
"""
def __init__(
self,
validate_args: bool = False,
name: str = 'square_root_bijector'
):
"""
Args
----
validate_args: bool
Indicates whether arguments should be checked for correctness.
name: str
Name given to ops managed by this object.
"""
# Without these `parameters` the code won't be compatible with future versions
# of tfp:
# https://github.com/tensorflow/probability/issues/1202
parameters = dict(locals())
with tf.name_scope(name) as name:
super().__init__(
forward_min_event_ndims=0,
validate_args=validate_args,
parameters=parameters,
name=name)
def _forward(self, x: Union[float, np.array, tf.Tensor]) -> tf.Tensor:
"""
Implements the forward pass `G` as given by `Y = G(X)`. In this case, it's a
simple square root of X.
Args
----
x: Union[float, np.array, tf.Tensor])
Variable `X` to receive the transformation.
Returns
-------
X: tf.Tensor
Square root of `x`.
"""
return tf.sqrt(x)
def _inverse(self, y: Union[float, np.array, tf.Tensor]) -> tf.Tensor:
"""
Implements G^-1(y).
Args
----
y: Union[float, np.array, tf.Tensor]
Values to be transformed back. In this case, they will be squared.
Returns
-------
y: tf.Tensor
Squared `y`.
"""
return tf.square(y)
def _inverse_log_det_jacobian(self, y: tf.Tensor) -> tf.Tensor:
"""
When transforming from `P(X)` to `P(Y)` it's necessary to compute the log of the
determinant of the Jacobian matrix for each correspondent function `G` which
accounts for the volumetric transformations on each domain.
The inverse log determinant is given by:
`ln(|J(G^-1(Y)|) = ln(|J(Y ** 2)|) = ln(|2 * Y|) = ln(2 * Y)`
Args
----
y: tf.Tensor
Returns
-------
tf.Tensor
"""
return tf.math.log(2 * y)
def _forward_log_det_jacobian(self, x: tf.Tensor) -> tf.Tensor:
"""
Computes the volumetric change when moving forward from `P(X)` to `P(Y)`, given
by:
`ln(|J(G(X))|) = ln(|J(sqrt(X))|) = ln(|(1 / 2) * X ** (-1 / 2)|) =
= (-1 / 2) * ln(4.0 * X)
Args
----
x: tf.Tensor
Returns
-------
tf.tensor
"""
return -0.5 * tf.math.log(4.0 * x)
|
causalimpact/model.py
|
from typing import Any, Dict, List, Optional, Tuple, Union
import numpy as np
import pandas as pd
import tensorflow as tf
import tensorflow_probability as tfp
tfd = tfp.distributions
tfb = tfp.bijectors
# K Local Level Prior Sample Size
# This is equal to the original [R package](https://github.com/google/CausalImpact/blob/07b60e1bf5c9c8d74e31ea602db39d7256a53b6f/R/impact_model.R#L25) # noqa: E501
kLocalLevelPriorSampleSize = 32
def process_model_args(model_args: Dict[str, Any]) -> Dict[str, Any]:
"""
Process general parameters related to how Causal Impact will be implemented, such
as standardization procedure or the addition of seasonal components to the model.
Args
----
model_args:
niter: int
How many iterations to run either for `hmc` or `vi` algorithms.
standardize: bool
If `True`, standardize data so result has zero mean and unitary standard
deviation.
prior_level_sd: float
Standard deviation that sets initial local level distribution. Default
value is 0.01 which means the linear regression is expected to explain
well the observed data. In cases where this is not expected, then it's also
possible to use the value 0.1. Still, this value will increase considerably
the extension of the random walk variance modeling data which can lead to
unreliable predictions (this might indicate that better covariates are
required).
fit_method: str
Which method to use when fitting the structural time series model. Can be
either `hmc` which stands for "Hamiltonian Monte Carlo" or "vi", i.e.,
"variational inference". The first is slower but more accurate whilst the
latter is the opposite. Defaults to `vi` which prioritizes performance.
nseasons: int
Specifies the duration of the period of the seasonal component; if input
data is specified in terms of days, then choosing nseasons=7 adds a weekly
seasonal effect.
season_duration: int
Specifies how many data points each value in season spans over. A good
example to understand this argument is to consider a hourly data as input.
For modeling a weekly season on this data, one can specify `nseasons=7` and
season_duration=24 which means each value that builds the season component
is repeated for 24 data points. Default value is 1 which means the season
component spans over just 1 point (this in practice doesn't change
anything). If this value is specified and bigger than 1 then `nseasons`
must be specified and bigger than 1 as well.
Returns
-------
Dict[str, Any]
standardize: bool
prior_level_sd: float
niter: int
fit_method: str
nseasons: int
season_duration: int
Raises
------
ValueError: if `standardize` is not of type `bool`.
if `prior_level_sd` is not `float`.
if `niter` is not `int`.
if `fit_method` not in {'hmc', 'vi'}.
if `nseasons` is not `int`.
if `season_duration` is not `int`.
if `season_duration` is bigger than 1 and `nseasons` is 1.
"""
standardize = model_args.get('standardize', True)
if not isinstance(standardize, bool):
raise ValueError('standardize argument must be of type bool.')
model_args['standardize'] = standardize
prior_level_sd = model_args.get('prior_level_sd', 0.01)
if not isinstance(prior_level_sd, float):
raise ValueError('prior_level_sd argument must be of type float.')
model_args['prior_level_sd'] = prior_level_sd
niter = model_args.get('niter', 1000)
if not isinstance(niter, int):
raise ValueError('niter argument must be of type int.')
model_args['niter'] = niter
fit_method = model_args.get('fit_method', 'vi')
if fit_method not in {'hmc', 'vi'}:
raise ValueError('fit_method can be either "hmc" or "vi".')
model_args['fit_method'] = fit_method
nseasons = model_args.get('nseasons', 1)
if not isinstance(nseasons, int):
raise ValueError('nseasons argument must be of type int.')
model_args['nseasons'] = nseasons
season_duration = model_args.get('season_duration', 1)
if not isinstance(season_duration, int):
raise ValueError('season_duration argument must be of type int.')
if nseasons <= 1 and season_duration > 1:
raise ValueError('nseasons must be bigger than 1 when season_duration is also '
'bigger than 1.')
model_args['season_duration'] = season_duration
return model_args
def check_input_model(
model: tfp.sts.StructuralTimeSeries,
pre_data: pd.DataFrame,
post_data: pd.DataFrame
) -> None:
"""
Checkes whether input model was properly built and is ready to be run. This function
is only invoked if the client sent a customized input model. Various assertions are
performed to guarantee it has been created appropriately, such as each component
should have `len(pre_data)` points for the argument `observed_time_series`. In case
the component is of type `tfp.sts.LinearRegression` or `SparseLinearRegression` then
the design matrix must have
`shape = (len(pre_data) + len(post_data), cols(pre_data) - 1)` which allows not only
to fit the model as well as to run the forecasts.
The model must be built with data of dtype=tf.float32 or np.float32 as otherwise an
error will be thrown when fitting the markov chains.
Args
----
model: StructuralTimeSeries
Can be either default `LocalLevel` or user specified generic model.
pre_data: pd.DataFrame
Raises
------
ValueError: if model is not of appropriate type.
if model is built without appropriate observed time series data.
if model components don't have dtype=tf.float32 or np.float32
"""
def _check_component(component):
if isinstance(
component,
(tfp.sts.LinearRegression, tfp.sts.SparseLinearRegression)
):
covariates_shape = (len(pre_data) + len(post_data),
len(pre_data.columns) - 1)
if component.design_matrix.shape != covariates_shape:
raise ValueError(
'Customized Linear Regression Models must have total '
'points equal to pre_data and post_data points and '
'same number of covariates. Input design_matrix shape was '
f'{component.design_matrix.shape} and expected '
f'{(len(pre_data) + len(post_data), len(pre_data.columns) -1)} '
'instead.'
)
assert component.design_matrix.dtype == tf.float32
else:
for parameter in component.parameters:
assert parameter.prior.dtype == tf.float32
if not isinstance(model, tfp.sts.StructuralTimeSeries):
raise ValueError('Input model must be of type StructuralTimeSeries.')
if isinstance(model, tfp.sts.Sum):
for component in model.components:
_check_component(component)
else:
_check_component(model)
def build_inv_gamma_sd_prior(sigma_guess: float) -> tfd.Distribution:
"""
helper function to build the sd_prior distribution for standard deviation
modeling.
Args
----
sigma_guess: float
Initial guess of where the standard deviation of the parameter is located.
Returns
-------
tfd.Distribution
InverseGamma distribution modeling the standard deviation.
"""
sample_size = kLocalLevelPriorSampleSize
df = sample_size
a = np.float32(df / 2)
ss = sample_size * sigma_guess ** 2
b = np.float32(ss / 2)
return tfd.InverseGamma(a, b)
def build_bijector(dist: tfd.Distribution) -> tfd.Distribution:
"""
helper function for building final bijector given sd_prior. The bijector is
implemented through the `tfd.TransformedDistribution` class.
Args
----
dist: tfd.Distribution
Distribution to receive the transformation `G(X)`.
Returns
-------
new_dist: tfd.Distribution
New distribution given by `y = G(X)`.
"""
bijector = SquareRootBijector()
new_dist = tfd.TransformedDistribution(dist, bijector)
return new_dist
def build_default_model(
observed_time_series: pd.DataFrame,
pre_data: pd.DataFrame,
post_data: pd.DataFrame,
prior_level_sd: float,
nseasons: int,
season_duration: int
) -> tfp.sts.StructuralTimeSeries:
"""
When input model is `None` then proceeds to build a default `tfp.sts.LocalLevel`
model. If input data has covariates then also adds a `tfp.sts.SparseLinearRegression`
component.
The level_prior follows `1 / prior_level_sd ** 2 ~ Gamma(a, b)` according to
the original [BOOM](https://github.com/steve-the-bayesian/BOOM/blob/63f08a708153c8405b809405fa1ab5ed7193d648/Interfaces/python/R/R/bayes.py#L4:L12) package. # noqa: E501
This is achieved by using the InverseGamma(a, b) and a [bijector](https://tiao.io/post/building-probability-distributions-with-tensorflow-probability-bijector-api/) # noqa: E501
transformation for the square root operator.
As for the linear regressor, the `tfp.sts.SparseLinearRegression` operation is similar
to the spike-and-slab from the original R package; main difference is that it follows
instead a horseshoe distribution which tends to penalize less the meaningful weights
in the shrinking process.[https://github.com/tensorflow/probability/blob/v0.12.1/tensorflow_probability/python/sts/regression.py#L265-L523] # noaq: E501
Args
----
observed_time_series: pd.DataFrame
pre_data: pd.DataFrame
post_data: pd.DataFrame
prior_level_sd: float
Sets an initial estimation for the standard deviation 'sigma' of the local
level prior. The bigger this value is, the wider is expected to be the random
walk extension on forecasts.
nseasons: int
season_duration: int
Returns
-------
model: tfp.sts.Sum
A `tfp.sts.LocalLevel` default model with possible another
`tfp.sts.SparseLinearRegression` and `tfp.sts.Seasonal` components representing
covariates and seasonal patterns.
"""
components = []
# use `values` to avoid batching dims
obs_sd = observed_time_series.std(skipna=True, ddof=0).values[0]
sd_prior = build_inv_gamma_sd_prior(prior_level_sd)
sd_prior = build_bijector(sd_prior)
# This is an approximation to simulate the bsts package from R. It's expected that
# given a few data points the posterior will converge appropriately given this
# distribution, that's why it's divided by 2.
obs_prior = build_inv_gamma_sd_prior(obs_sd / 2)
obs_prior = build_bijector(obs_prior)
level_component = tfp.sts.LocalLevel(
level_scale_prior=sd_prior,
observed_time_series=observed_time_series
)
components.append(level_component)
# If it has more than 1 column then it has covariates X so add a linear regressor
# component.
if len(pre_data.columns) > 1:
# We need to concatenate both pre and post data as this will allow the linear
# regressor component to use the post data when running forecasts. As first
# column is supposed to have response variable `y` then we filter out just the
# remaining columns for the `X` value.
complete_data = pd.concat([pre_data, post_data]).astype(np.float32)
# Set NaN values to zero so to not break TFP linear regression
complete_data.fillna(0, inplace=True)
linear_component = tfp.sts.SparseLinearRegression(
design_matrix=complete_data.iloc[:, 1:]
)
components.append(linear_component)
if nseasons > 1:
seasonal_component = tfp.sts.Seasonal(
num_seasons=nseasons,
num_steps_per_season=season_duration,
observed_time_series=observed_time_series
)
components.append(seasonal_component)
# Model must be built with `tfp.sts.Sum` so to add the observed noise `epsilon`
# parameter.
model = tfp.sts.Sum(components, observed_time_series=observed_time_series,
observation_noise_scale_prior=obs_prior)
return model
def fit_model(
model: tfp.sts.StructuralTimeSeries,
observed_time_series: pd.DataFrame,
method: str = 'hmc'
) -> Tuple[Union[List[tf.Tensor], Dict[str, tf.Tensor]], Optional[Dict[str, Any]]]:
"""
Run the Markovian Monte Carlo fitting process for finding the posterior `P(z | y)`
where z represents the structural components of the input state space model. Two
main methods can be used, either `hmc` which stands for 'Hamiltonian Monte Carlo'
and `vi` standing for 'Variational Inference'. The first method is expected to be
more accurate while less performante whereas the second is the opposite, that is,
faster but less accurate.
Args
----
model: tfp.sts.StructuralTimeSeries
Structural time series model built to explain the observed data. It may
contain several components such as local level, seasons and so on.
observed_time_series: pd.DataFrame
Contains the pre-period response variable `y`.
method: str
Either 'hmc' or 'vi' which selects which fitting process to run.
Returns
-------
(samples, kernel_results): Tuple[Union[List[tf.Tensor], Dict[str, tf.Tensor]],
Dict[str, Any]]
Raises
------
ValueError: If input method is invalid.
"""
if method == 'hmc':
# this method does not need to be wrapped in a `tf.function` context as the
# internal sampling method already is:
# https://github.com/tensorflow/probability/blob/v0.11.1/tensorflow_probability/python/sts/fitting.py#L422 # noqa: E501
# https://github.com/tensorflow/probability/issues/348
samples, kernel_results = tfp.sts.fit_with_hmc(
model=model,
observed_time_series=observed_time_series,
)
return samples, kernel_results
elif method == 'vi':
optimizer = tf.optimizers.Adam(learning_rate=0.1)
variational_steps = 200 # Hardcoded for now
variational_posteriors = tfp.sts.build_factored_surrogate_posterior(model=model)
@tf.function()
def _run_vi(): # pragma: no cover
tfp.vi.fit_surrogate_posterior(
target_log_prob_fn=model.joint_log_prob(
observed_time_series=observed_time_series
),
surrogate_posterior=variational_posteriors,
optimizer=optimizer,
num_steps=variational_steps
)
# Don't sample too much as varitional inference method is built aiming for
# performance first.
samples = variational_posteriors.sample(100)
return samples, None
return _run_vi()
else:
raise ValueError(
f'Input method "{method}" not valid. Choose between "hmc" or "vi".'
)
def build_one_step_dist(
model: tfp.sts.StructuralTimeSeries,
observed_time_series: pd.DataFrame,
parameter_samples: Union[List[tfd.Distribution], Dict[str, tfd.Distribution]]
) -> tfd.Distribution: # pragma: no cover
"""
Builds one step distribution for pre-intervention data given samples from the
posterior `P(z | y)`.
Args
----
model: tfp.StructuralTimeSeries
observed_time_series: pd.DataFrame
Corresponds to the `y` value.
parameter_samples: Union[List[tfd.Distribution], Dict[str, tfd.Distribution]]
samples from the posterior for each state component in `model`.
Returns
-------
one_step_dist: tfd.Distribution
"""
return tfp.sts.one_step_predictive(
model=model,
observed_time_series=observed_time_series,
parameter_samples=parameter_samples
)
def build_posterior_dist(
model: tfp.sts.StructuralTimeSeries,
observed_time_series: pd.DataFrame,
parameter_samples: Union[List[tfd.Distribution], Dict[str, tfd.Distribution]],
num_steps_forecast: int
) -> tfd.Distribution: # pragma: no cover
"""
Builds the distribution for post-intervention data given samples from the
posterior `P(z | y)`.
Args
----
model: tfp.StructuralTimeSeries
observed_time_series: pd.DataFrame
Corresponds to the `y` value.
parameter_samples: Union[List[tfd.Distribution], Dict[str, tfd.Distribution]]
samples from the posterior for each state component in `model`.
num_steps_forecast: int
How many time steps to forecast into the future. These will be compared against
the real value of `y` to extract the estimation of impact.
Returns
-------
posterior_dist: tfd.Distribution
"""
return tfp.sts.forecast(
model=model,
observed_time_series=observed_time_series,
parameter_samples=parameter_samples,
num_steps_forecast=num_steps_forecast
)
class SquareRootBijector(tfb.Bijector):
"""
Compute `Y = g(X) = X ** (1 / 2) which transforms variance into standard deviation.
Main reference for building this bijector is the original [PowerTransform](https://github.com/tensorflow/probability/blob/v0.11.1/tensorflow_probability/python/bijectors/power_transform.py) # noqa: E501
"""
def __init__(
self,
validate_args: bool = False,
name: str = 'square_root_bijector'
):
"""
Args
----
validate_args: bool
Indicates whether arguments should be checked for correctness.
name: str
Name given to ops managed by this object.
"""
# Without these `parameters` the code won't be compatible with future versions
# of tfp:
# https://github.com/tensorflow/probability/issues/1202
parameters = dict(locals())
with tf.name_scope(name) as name:
super().__init__(
forward_min_event_ndims=0,
validate_args=validate_args,
parameters=parameters,
name=name)
def _forward(self, x: Union[float, np.array, tf.Tensor]) -> tf.Tensor:
"""
Implements the forward pass `G` as given by `Y = G(X)`. In this case, it's a
simple square root of X.
Args
----
x: Union[float, np.array, tf.Tensor])
Variable `X` to receive the transformation.
Returns
-------
X: tf.Tensor
Square root of `x`.
"""
return tf.sqrt(x)
def _inverse(self, y: Union[float, np.array, tf.Tensor]) -> tf.Tensor:
"""
Implements G^-1(y).
Args
----
y: Union[float, np.array, tf.Tensor]
Values to be transformed back. In this case, they will be squared.
Returns
-------
y: tf.Tensor
Squared `y`.
"""
return tf.square(y)
def _inverse_log_det_jacobian(self, y: tf.Tensor) -> tf.Tensor:
"""
When transforming from `P(X)` to `P(Y)` it's necessary to compute the log of the
determinant of the Jacobian matrix for each correspondent function `G` which
accounts for the volumetric transformations on each domain.
The inverse log determinant is given by:
`ln(|J(G^-1(Y)|) = ln(|J(Y ** 2)|) = ln(|2 * Y|) = ln(2 * Y)`
Args
----
y: tf.Tensor
Returns
-------
tf.Tensor
"""
return tf.math.log(2 * y)
def _forward_log_det_jacobian(self, x: tf.Tensor) -> tf.Tensor:
"""
Computes the volumetric change when moving forward from `P(X)` to `P(Y)`, given
by:
`ln(|J(G(X))|) = ln(|J(sqrt(X))|) = ln(|(1 / 2) * X ** (-1 / 2)|) =
= (-1 / 2) * ln(4.0 * X)
Args
----
x: tf.Tensor
Returns
-------
tf.tensor
"""
return -0.5 * tf.math.log(4.0 * x)
| 0.959126 | 0.604749 |
import backbone.support.configurations_variables as confv
import backbone.support.data_loading as dl
import backbone.support.data_analysis as da
import backbone.support.data_cleaning as dc
import backbone.support.configuration_classes as confc
import backbone.support.saving_loading as sl
import backbone.support.plots_and_charts as pc
import backbone.support.build_features as bf
import numpy as np
import backbone.support.models as mdl
from sklearn.utils.class_weight import compute_class_weight
from tensorflow.keras.callbacks import TensorBoard
import time
import backbone.support.directory_file_checking as dfc
import os
from tensorflow.python.keras.callbacks import CSVLogger
from keras.callbacks import EarlyStopping, ModelCheckpoint, ReduceLROnPlateau
import tensorflow as tf
print("\t===========================================================================================\n"
"\t\tMain program started for MAIN-DATABASE:{database}, GENDER-ISOLATION:{gender}\n"
"\t\t\t\u2234 Dataset Name: {name}\n"
"\t==========================================================================================="
.format(database=confv.database_emodb, gender=confv.gender_male, name=confv.dataset_emodb_male))
'''
# DATA LOADING SECTION
print("\n--------------------Started loading original data from the main database: {name}--------------------".format(name=confv.database_emodb))
data_info_emodb_df = dl.load_original_data(database=confv.database_emodb)
print("No. of sample audio files in {database} database: {length}\n".format(database=confv.database_emodb, length=len(data_info_emodb_df)))
print("Dataframe head of {database} database:".format(database=confv.database_emodb))
print(data_info_emodb_df.head())
print("\nDataframe tail of {database} database:".format(database=confv.database_emodb))
print(data_info_emodb_df.tail())
print("--------------------Finished loading original data from the main database: {name}--------------------".format(name=confv.database_emodb))
# RANDOM BASE AUDIO WAVE ANALYSIS SECTION
print("\n\n--------------------Started random base audio wave analysis for the main database: {name}--------------------".format(name=confv.database_emodb))
da.base_audio_wave_analysis(data_info_emodb_df.audio_fname[500], database=confv.database_emodb, status=confv.original)
print("--------------------Finished random base audio wave analysis for the main database: {name}--------------------".format(name=confv.database_emodb))
# DATAFRAME ADJUSTMENTS SECTION
print("\n\n--------------------Started dataframe adjustment for the main database: {name}--------------------".format(name=confv.database_emodb))
data_info_emodb_df_m, data_info_emodb_df_f = dc.data_adjustments(data_info_emodb_df)
print("--------------------Finished dataframe adjustment for the main database: {name}--------------------".format(name=confv.database_emodb))
# DATAFRAME SAVING
print("\n\n--------------------Started dataframe saving for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
emodb_m_df_obj = confc.DataFrame(database=confv.database_emodb, gender=confv.gender_male, df=data_info_emodb_df_m)
sl.save_dataframe(emodb_m_df_obj)
print("--------------------Finished dataframe saving for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
'''
# LOAD REQUIRED PICKLE
print("\n\n--------------------Started dataframe loading for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
emodb_m_df_obj = confc.DataFrame(database=confv.database_emodb, gender=confv.gender_male)
emodb_m_df_obj = sl.load_dataframe(emodb_m_df_obj)
data_info_emodb_df_m = emodb_m_df_obj.df
print(emodb_m_df_obj.database)
print(emodb_m_df_obj.gender)
print(len(data_info_emodb_df_m))
print(data_info_emodb_df_m.head())
print(data_info_emodb_df_m.tail())
print(emodb_m_df_obj.dataset)
print(emodb_m_df_obj.save_path)
print("--------------------Finished dataframe loading for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
'''
# ORIGINAL DATA DISTRIBUTION ANALYSIS SECTION
print("\n\n--------------------Started original data distribution analysis for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
pc.emotion_distribution_bar_plot(df=data_info_emodb_df_m, title="{database} - {gender} Isolation - No. of Files".format(database=confv.database_emodb, gender=confv.gender_male))
pc.emotion_distribution_pie_plot(df=data_info_emodb_df_m, database=confv.database_emodb, status=confv.original, gender=confv.gender_male, title="{database} - {gender} Isolation - Class/Data/Time Distribution".format(database=confv.database_emodb, gender=confv.gender_male))
print("--------------------Finished original data distribution analysis for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
# ORIGINAL DATA VISUAL ANALYSIS (signal, fft, fbank, mfcc) SECTION
print("\n\n--------------------Started original data visual analysis for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
da.visual_analysis(df=data_info_emodb_df_m, database=confv.database_emodb, status=confv.original, gender=confv.gender_male, envelope=False, resample=False)
da.visual_analysis(df=data_info_emodb_df_m, database=confv.database_emodb, status=confv.original, gender=confv.gender_male, envelope=True, resample=True)
print("--------------------Finished original data visual analysis for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
# DATA CLEANING - DOWN SAMPLING AND NOISE FLOOR DETECTION
print("\n\n--------------------Started data cleaning for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
dc.data_cleaning(df=data_info_emodb_df_m, database=confv.database_emodb)
print("--------------------Finished data cleaning for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
'''
# DATA MINIMUM AUDIO LENGTH COMPLIANCE CHECK
print("\n\n--------------------Started data minimum audio compliance check for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
data_info_emodb_df_m = dc.check_and_adjust_df_for_minimum_audio_length_after_cleaning(df=data_info_emodb_df_m, database=confv.database_emodb, gender=confv.gender_male)
print("--------------------Finished data minimum audio compliance check for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
'''
# CLEANED DATA DISTRIBUTION ANALYSIS SECTION
print("\n\n--------------------Started cleaned data distribution analysis for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
pc.emotion_distribution_bar_plot(df=data_info_emodb_df_m, title="{database} - {gender} Isolation - No. of Files".format(database=confv.database_emodb, gender=confv.gender_male))
pc.emotion_distribution_pie_plot(df=data_info_emodb_df_m, database=confv.database_emodb, status=confv.clean, gender=confv.gender_male, title="{database} - {gender} Isolation - Class/Data/Time Distribution".format(database=confv.database_emodb, gender=confv.gender_male))
print("--------------------Finished cleaned data distribution analysis for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
# CLEANED DATA VISUAL ANALYSIS (signal, fft, fbank, mfcc) SECTION
print("\n\n--------------------Started cleaned data visual analysis for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
da.visual_analysis(df=data_info_emodb_df_m, database=confv.database_emodb, status=confv.clean, gender=confv.gender_male, envelope=False, resample=False)
# This is same as,
# da.visual_analysis(df=data_info_emodb_df_m, database=confv.database_emodb, status=confv.original, gender=confv.gender_male, envelope=True, resample=True)
# Since these cleaned data are already equipped with envelope and resampling, setting them to False or True does not matter.
# (envelope and resample does not matter when its clean)
print("--------------------Finished cleaned data visual analysis for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
'''
# Building Features
print("\n\n--------------------Started building features for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
classes = list(np.unique(data_info_emodb_df_m.stress_emotion))
mconf_emodb_m = confc.ModelConfig(database=confv.database_emodb, gender=confv.gender_male, mode=confv.ml_mode_convolutional, classes=classes)
print(mconf_emodb_m.database)
print(mconf_emodb_m.gender)
print(mconf_emodb_m.mode)
print(mconf_emodb_m.nfilt)
print(mconf_emodb_m.nfeat)
print(mconf_emodb_m.nfft)
print(mconf_emodb_m.step)
print(mconf_emodb_m.classes)
print(mconf_emodb_m.features_save_name)
print(mconf_emodb_m.model_config_save_name)
print(mconf_emodb_m.training_log_name)
print(mconf_emodb_m.model_save_name)
print(mconf_emodb_m.model_h5_save_name)
print(mconf_emodb_m.model_tflite_save_name)
print(mconf_emodb_m.feature_path)
print(mconf_emodb_m.model_config_path)
print(mconf_emodb_m.training_log_path)
print(mconf_emodb_m.model_path)
print(mconf_emodb_m.model_h5_path)
print(mconf_emodb_m.model_tflite_path)
rfpconf_emodb_m = confc.RandFeatParams(df=data_info_emodb_df_m, database=confv.database_emodb, gender=confv.gender_male)
X, y = bf.build_random_features(modelconfig=mconf_emodb_m, randfeatparams=rfpconf_emodb_m)
print("--------------------Finished building features for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
# MODEL & TRAINING
print("\n\n--------------------Started model training for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
input_shape = (X.shape[1], X.shape[2], 1)
model = mdl.get_emodb_male_model(input_shape)
y_flat = np.argmax(y, axis=1)
class_weight = compute_class_weight('balanced', np.unique(y_flat), y_flat)
class_weight = {i : class_weight[i] for i in range(2)}
NAME = "{database}-{gender}-{modeltype}-{spec}-{time}".format(database=confv.database_emodb, gender=confv.gender_male, modeltype=confv.ml_mode_convolutional, spec="1st", time=int(time.time()))
mdl_logs_pth = os.path.join(confv.base_store, confv.log_dir)
tensorboard = TensorBoard(log_dir=mdl_logs_pth + '\\{}'.format(NAME))
dfc.check_dir_inside_saved_features_and_modelconfigs_and_models(parent=confv.saved_training_metrics_logs, database=confv.database_emodb, gender=confv.gender_male)
csv_logger = CSVLogger(mconf_emodb_m.training_log_path)
# earlyStopping = EarlyStopping(monitor='val_loss', patience=10, verbose=0, mode='min')
# mcp_save = ModelCheckpoint('.mdl_wts.hdf5', save_best_only=True, monitor='val_loss', mode='min')
# reduce_lr_loss = ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=7, verbose=1, mode='min')
model.fit(X, y, epochs=35, batch_size=32, shuffle=True, class_weight=class_weight, validation_split=0.2, callbacks=[tensorboard, csv_logger])
print("--------------------Finished model training for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
# MODEL SAVING
print("\n\n--------------------Started model saving for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
dfc.check_dir_inside_saved_features_and_modelconfigs_and_models(parent=confv.saved_models, database=confv.database_emodb, gender=confv.gender_male)
model.save(mconf_emodb_m.model_path)
model.save(mconf_emodb_m.model_h5_path)
# Convert the model & save in tflite
converter = tf.lite.TFLiteConverter.from_saved_model(mconf_emodb_m.model_path)
tflite_model = converter.convert()
with open(mconf_emodb_m.model_tflite_path, 'wb') as outfile:
outfile.write(tflite_model)
print("--------------------Finished model saving for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
|
backbone/mains-dataset_wise_structural/main_emodb_male.py
|
import backbone.support.configurations_variables as confv
import backbone.support.data_loading as dl
import backbone.support.data_analysis as da
import backbone.support.data_cleaning as dc
import backbone.support.configuration_classes as confc
import backbone.support.saving_loading as sl
import backbone.support.plots_and_charts as pc
import backbone.support.build_features as bf
import numpy as np
import backbone.support.models as mdl
from sklearn.utils.class_weight import compute_class_weight
from tensorflow.keras.callbacks import TensorBoard
import time
import backbone.support.directory_file_checking as dfc
import os
from tensorflow.python.keras.callbacks import CSVLogger
from keras.callbacks import EarlyStopping, ModelCheckpoint, ReduceLROnPlateau
import tensorflow as tf
print("\t===========================================================================================\n"
"\t\tMain program started for MAIN-DATABASE:{database}, GENDER-ISOLATION:{gender}\n"
"\t\t\t\u2234 Dataset Name: {name}\n"
"\t==========================================================================================="
.format(database=confv.database_emodb, gender=confv.gender_male, name=confv.dataset_emodb_male))
'''
# DATA LOADING SECTION
print("\n--------------------Started loading original data from the main database: {name}--------------------".format(name=confv.database_emodb))
data_info_emodb_df = dl.load_original_data(database=confv.database_emodb)
print("No. of sample audio files in {database} database: {length}\n".format(database=confv.database_emodb, length=len(data_info_emodb_df)))
print("Dataframe head of {database} database:".format(database=confv.database_emodb))
print(data_info_emodb_df.head())
print("\nDataframe tail of {database} database:".format(database=confv.database_emodb))
print(data_info_emodb_df.tail())
print("--------------------Finished loading original data from the main database: {name}--------------------".format(name=confv.database_emodb))
# RANDOM BASE AUDIO WAVE ANALYSIS SECTION
print("\n\n--------------------Started random base audio wave analysis for the main database: {name}--------------------".format(name=confv.database_emodb))
da.base_audio_wave_analysis(data_info_emodb_df.audio_fname[500], database=confv.database_emodb, status=confv.original)
print("--------------------Finished random base audio wave analysis for the main database: {name}--------------------".format(name=confv.database_emodb))
# DATAFRAME ADJUSTMENTS SECTION
print("\n\n--------------------Started dataframe adjustment for the main database: {name}--------------------".format(name=confv.database_emodb))
data_info_emodb_df_m, data_info_emodb_df_f = dc.data_adjustments(data_info_emodb_df)
print("--------------------Finished dataframe adjustment for the main database: {name}--------------------".format(name=confv.database_emodb))
# DATAFRAME SAVING
print("\n\n--------------------Started dataframe saving for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
emodb_m_df_obj = confc.DataFrame(database=confv.database_emodb, gender=confv.gender_male, df=data_info_emodb_df_m)
sl.save_dataframe(emodb_m_df_obj)
print("--------------------Finished dataframe saving for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
'''
# LOAD REQUIRED PICKLE
print("\n\n--------------------Started dataframe loading for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
emodb_m_df_obj = confc.DataFrame(database=confv.database_emodb, gender=confv.gender_male)
emodb_m_df_obj = sl.load_dataframe(emodb_m_df_obj)
data_info_emodb_df_m = emodb_m_df_obj.df
print(emodb_m_df_obj.database)
print(emodb_m_df_obj.gender)
print(len(data_info_emodb_df_m))
print(data_info_emodb_df_m.head())
print(data_info_emodb_df_m.tail())
print(emodb_m_df_obj.dataset)
print(emodb_m_df_obj.save_path)
print("--------------------Finished dataframe loading for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
'''
# ORIGINAL DATA DISTRIBUTION ANALYSIS SECTION
print("\n\n--------------------Started original data distribution analysis for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
pc.emotion_distribution_bar_plot(df=data_info_emodb_df_m, title="{database} - {gender} Isolation - No. of Files".format(database=confv.database_emodb, gender=confv.gender_male))
pc.emotion_distribution_pie_plot(df=data_info_emodb_df_m, database=confv.database_emodb, status=confv.original, gender=confv.gender_male, title="{database} - {gender} Isolation - Class/Data/Time Distribution".format(database=confv.database_emodb, gender=confv.gender_male))
print("--------------------Finished original data distribution analysis for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
# ORIGINAL DATA VISUAL ANALYSIS (signal, fft, fbank, mfcc) SECTION
print("\n\n--------------------Started original data visual analysis for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
da.visual_analysis(df=data_info_emodb_df_m, database=confv.database_emodb, status=confv.original, gender=confv.gender_male, envelope=False, resample=False)
da.visual_analysis(df=data_info_emodb_df_m, database=confv.database_emodb, status=confv.original, gender=confv.gender_male, envelope=True, resample=True)
print("--------------------Finished original data visual analysis for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
# DATA CLEANING - DOWN SAMPLING AND NOISE FLOOR DETECTION
print("\n\n--------------------Started data cleaning for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
dc.data_cleaning(df=data_info_emodb_df_m, database=confv.database_emodb)
print("--------------------Finished data cleaning for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
'''
# DATA MINIMUM AUDIO LENGTH COMPLIANCE CHECK
print("\n\n--------------------Started data minimum audio compliance check for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
data_info_emodb_df_m = dc.check_and_adjust_df_for_minimum_audio_length_after_cleaning(df=data_info_emodb_df_m, database=confv.database_emodb, gender=confv.gender_male)
print("--------------------Finished data minimum audio compliance check for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
'''
# CLEANED DATA DISTRIBUTION ANALYSIS SECTION
print("\n\n--------------------Started cleaned data distribution analysis for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
pc.emotion_distribution_bar_plot(df=data_info_emodb_df_m, title="{database} - {gender} Isolation - No. of Files".format(database=confv.database_emodb, gender=confv.gender_male))
pc.emotion_distribution_pie_plot(df=data_info_emodb_df_m, database=confv.database_emodb, status=confv.clean, gender=confv.gender_male, title="{database} - {gender} Isolation - Class/Data/Time Distribution".format(database=confv.database_emodb, gender=confv.gender_male))
print("--------------------Finished cleaned data distribution analysis for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
# CLEANED DATA VISUAL ANALYSIS (signal, fft, fbank, mfcc) SECTION
print("\n\n--------------------Started cleaned data visual analysis for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
da.visual_analysis(df=data_info_emodb_df_m, database=confv.database_emodb, status=confv.clean, gender=confv.gender_male, envelope=False, resample=False)
# This is same as,
# da.visual_analysis(df=data_info_emodb_df_m, database=confv.database_emodb, status=confv.original, gender=confv.gender_male, envelope=True, resample=True)
# Since these cleaned data are already equipped with envelope and resampling, setting them to False or True does not matter.
# (envelope and resample does not matter when its clean)
print("--------------------Finished cleaned data visual analysis for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
'''
# Building Features
print("\n\n--------------------Started building features for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
classes = list(np.unique(data_info_emodb_df_m.stress_emotion))
mconf_emodb_m = confc.ModelConfig(database=confv.database_emodb, gender=confv.gender_male, mode=confv.ml_mode_convolutional, classes=classes)
print(mconf_emodb_m.database)
print(mconf_emodb_m.gender)
print(mconf_emodb_m.mode)
print(mconf_emodb_m.nfilt)
print(mconf_emodb_m.nfeat)
print(mconf_emodb_m.nfft)
print(mconf_emodb_m.step)
print(mconf_emodb_m.classes)
print(mconf_emodb_m.features_save_name)
print(mconf_emodb_m.model_config_save_name)
print(mconf_emodb_m.training_log_name)
print(mconf_emodb_m.model_save_name)
print(mconf_emodb_m.model_h5_save_name)
print(mconf_emodb_m.model_tflite_save_name)
print(mconf_emodb_m.feature_path)
print(mconf_emodb_m.model_config_path)
print(mconf_emodb_m.training_log_path)
print(mconf_emodb_m.model_path)
print(mconf_emodb_m.model_h5_path)
print(mconf_emodb_m.model_tflite_path)
rfpconf_emodb_m = confc.RandFeatParams(df=data_info_emodb_df_m, database=confv.database_emodb, gender=confv.gender_male)
X, y = bf.build_random_features(modelconfig=mconf_emodb_m, randfeatparams=rfpconf_emodb_m)
print("--------------------Finished building features for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
# MODEL & TRAINING
print("\n\n--------------------Started model training for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
input_shape = (X.shape[1], X.shape[2], 1)
model = mdl.get_emodb_male_model(input_shape)
y_flat = np.argmax(y, axis=1)
class_weight = compute_class_weight('balanced', np.unique(y_flat), y_flat)
class_weight = {i : class_weight[i] for i in range(2)}
NAME = "{database}-{gender}-{modeltype}-{spec}-{time}".format(database=confv.database_emodb, gender=confv.gender_male, modeltype=confv.ml_mode_convolutional, spec="1st", time=int(time.time()))
mdl_logs_pth = os.path.join(confv.base_store, confv.log_dir)
tensorboard = TensorBoard(log_dir=mdl_logs_pth + '\\{}'.format(NAME))
dfc.check_dir_inside_saved_features_and_modelconfigs_and_models(parent=confv.saved_training_metrics_logs, database=confv.database_emodb, gender=confv.gender_male)
csv_logger = CSVLogger(mconf_emodb_m.training_log_path)
# earlyStopping = EarlyStopping(monitor='val_loss', patience=10, verbose=0, mode='min')
# mcp_save = ModelCheckpoint('.mdl_wts.hdf5', save_best_only=True, monitor='val_loss', mode='min')
# reduce_lr_loss = ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=7, verbose=1, mode='min')
model.fit(X, y, epochs=35, batch_size=32, shuffle=True, class_weight=class_weight, validation_split=0.2, callbacks=[tensorboard, csv_logger])
print("--------------------Finished model training for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
# MODEL SAVING
print("\n\n--------------------Started model saving for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
dfc.check_dir_inside_saved_features_and_modelconfigs_and_models(parent=confv.saved_models, database=confv.database_emodb, gender=confv.gender_male)
model.save(mconf_emodb_m.model_path)
model.save(mconf_emodb_m.model_h5_path)
# Convert the model & save in tflite
converter = tf.lite.TFLiteConverter.from_saved_model(mconf_emodb_m.model_path)
tflite_model = converter.convert()
with open(mconf_emodb_m.model_tflite_path, 'wb') as outfile:
outfile.write(tflite_model)
print("--------------------Finished model saving for adjusted and {gender} isolated dataset: {name}--------------------".format(gender=confv.gender_male, name=confv.dataset_emodb_male))
| 0.464416 | 0.202187 |
import pytest
from mock import MagicMock
from powerfulseal.node import Node, NodeInventory
@pytest.fixture
def nodes():
return [
Node(id="id1", ip="192.168.127.12", az="AZ1", no=1, name="node1"),
Node(id="id2", ip="172.16.31.10", az="AZ2", no=2, name="node2"),
Node(id="id3", ip="172.16.31.10", az="AZ2", no=3, name="node3"),
]
@pytest.fixture
def mock_driver(nodes):
mock = MagicMock()
mock.nodes = nodes
def get_by_ip(ip):
for node in mock.nodes:
if node.ip == ip:
return node
mock.get_by_ip = get_by_ip
return mock
def test_sync(nodes, mock_driver):
inventory = NodeInventory(
driver=mock_driver,
restrict_to_groups={
"TEST1": ["192.168.127.12"],
"TEST2": ["192.168.127.12", "172.16.31.10"],
}
)
inventory.sync()
assert inventory.groups == {
"TEST1": nodes[0:1],
"TEST2": nodes[0:2]
}
assert inventory.get_azs() == ["AZ1", "AZ2"]
@pytest.mark.parametrize("query, expected_indices", [
(None, [0, 1]),
("all", [0, 1]),
("id1,id2", [0, 1]),
("id1", [0]),
("id2", [1]),
("192.168.127.12", [0]),
("AZ2", [1]),
("2", [1]),
("node2", [1]),
("TEST2", [0, 1]),
("up", []),
("unknown", [0, 1]),
("something-weird", []),
])
def test_find(nodes, mock_driver, query, expected_indices):
inventory = NodeInventory(
driver=mock_driver,
restrict_to_groups={
"TEST1": ["192.168.127.12"],
"TEST2": ["192.168.127.12", "172.16.31.10"],
}
)
inventory.sync()
assert list(inventory.find_nodes(query)) == [nodes[x] for x in expected_indices]
@pytest.mark.parametrize("ip, should_find, index", [
("172.16.31.10", True, 1),
("doesn't exist", False, None),
])
def test_get_by_ip(nodes, mock_driver, ip, should_find, index):
inventory = NodeInventory(
driver=mock_driver,
restrict_to_groups={
"TEST1": ["192.168.127.12"],
"TEST2": ["192.168.127.12", "172.16.31.10"],
}
)
inventory.sync()
if should_find:
assert inventory.get_node_by_ip(ip) is nodes[index]
else:
assert inventory.get_node_by_ip(ip) == None
def test_groups(mock_driver):
inventory = NodeInventory(
driver=mock_driver,
restrict_to_groups={
"TEST1": ["192.168.127.12"],
"TEST2": ["192.168.127.12", "172.16.31.10"],
}
)
inventory.sync()
assert inventory.get_groups() == ["TEST1", "TEST2"]
|
tests/node/test_node_inventory.py
|
import pytest
from mock import MagicMock
from powerfulseal.node import Node, NodeInventory
@pytest.fixture
def nodes():
return [
Node(id="id1", ip="192.168.127.12", az="AZ1", no=1, name="node1"),
Node(id="id2", ip="172.16.31.10", az="AZ2", no=2, name="node2"),
Node(id="id3", ip="172.16.31.10", az="AZ2", no=3, name="node3"),
]
@pytest.fixture
def mock_driver(nodes):
mock = MagicMock()
mock.nodes = nodes
def get_by_ip(ip):
for node in mock.nodes:
if node.ip == ip:
return node
mock.get_by_ip = get_by_ip
return mock
def test_sync(nodes, mock_driver):
inventory = NodeInventory(
driver=mock_driver,
restrict_to_groups={
"TEST1": ["192.168.127.12"],
"TEST2": ["192.168.127.12", "172.16.31.10"],
}
)
inventory.sync()
assert inventory.groups == {
"TEST1": nodes[0:1],
"TEST2": nodes[0:2]
}
assert inventory.get_azs() == ["AZ1", "AZ2"]
@pytest.mark.parametrize("query, expected_indices", [
(None, [0, 1]),
("all", [0, 1]),
("id1,id2", [0, 1]),
("id1", [0]),
("id2", [1]),
("192.168.127.12", [0]),
("AZ2", [1]),
("2", [1]),
("node2", [1]),
("TEST2", [0, 1]),
("up", []),
("unknown", [0, 1]),
("something-weird", []),
])
def test_find(nodes, mock_driver, query, expected_indices):
inventory = NodeInventory(
driver=mock_driver,
restrict_to_groups={
"TEST1": ["192.168.127.12"],
"TEST2": ["192.168.127.12", "172.16.31.10"],
}
)
inventory.sync()
assert list(inventory.find_nodes(query)) == [nodes[x] for x in expected_indices]
@pytest.mark.parametrize("ip, should_find, index", [
("172.16.31.10", True, 1),
("doesn't exist", False, None),
])
def test_get_by_ip(nodes, mock_driver, ip, should_find, index):
inventory = NodeInventory(
driver=mock_driver,
restrict_to_groups={
"TEST1": ["192.168.127.12"],
"TEST2": ["192.168.127.12", "172.16.31.10"],
}
)
inventory.sync()
if should_find:
assert inventory.get_node_by_ip(ip) is nodes[index]
else:
assert inventory.get_node_by_ip(ip) == None
def test_groups(mock_driver):
inventory = NodeInventory(
driver=mock_driver,
restrict_to_groups={
"TEST1": ["192.168.127.12"],
"TEST2": ["192.168.127.12", "172.16.31.10"],
}
)
inventory.sync()
assert inventory.get_groups() == ["TEST1", "TEST2"]
| 0.495117 | 0.634713 |
from __future__ import print_function
import json
import os
import time
from chromite.cbuildbot.stages import generic_stages
from chromite.lib import buildbucket_lib
from chromite.lib import build_requests
from chromite.lib import constants
from chromite.lib import config_lib
from chromite.lib import cros_logging as logging
from chromite.lib import failures_lib
from chromite.lib.const import waterfall
def BuilderName(build_config, active_waterfall, current_builder):
"""Gets the corresponding builder name of the build.
Args:
build_config: build config (string) of the build.
active_waterfall: active waterfall to run the build.
current_builder: buildbot builder name of the current builder, or None.
Returns:
Builder name to run the build on.
"""
# The builder name is configured differently for release builds in
# chromeos and chromeos_release waterfalls. (see crbug.com/755276)
if active_waterfall == waterfall.WATERFALL_RELEASE:
assert current_builder
# Example: master-release release-R64-10176.B
named_branch = current_builder.split()[1]
return '%s %s' % (build_config, named_branch)
else:
return build_config
class ScheduleSlavesStage(generic_stages.BuilderStage):
"""Stage that schedules slaves for the master build."""
def __init__(self, builder_run, sync_stage, **kwargs):
super(ScheduleSlavesStage, self).__init__(builder_run, **kwargs)
self.sync_stage = sync_stage
self.buildbucket_client = self.GetBuildbucketClient()
def _GetBuildbucketBucket(self, build_name, build_config):
"""Get the corresponding Buildbucket bucket.
Args:
build_name: name of the build to put to Buildbucket.
build_config: config of the build to put to Buildbucket.
Raises:
NoBuildbucketBucketFoundException when no Buildbucket bucket found.
"""
bucket = buildbucket_lib.WATERFALL_BUCKET_MAP.get(
build_config.active_waterfall)
if bucket is None:
raise buildbucket_lib.NoBuildbucketBucketFoundException(
'No Buildbucket bucket found for builder %s waterfall: %s' %
(build_name, build_config.active_waterfall))
return bucket
def PostSlaveBuildToBuildbucket(self, build_name, build_config,
master_build_id, master_buildbucket_id,
buildset_tag, dryrun=False):
"""Send a Put slave build request to Buildbucket.
Args:
build_name: Salve build name to put to Buildbucket.
build_config: Slave build config to put to Buildbucket.
master_build_id: CIDB id of the master scheduling the slave build.
master_buildbucket_id: buildbucket id of the master scheduling the
slave build.
buildset_tag: The buildset tag for strong consistent tag queries.
More context: crbug.com/661689
dryrun: Whether a dryrun, default to False.
"""
current_buildername = os.environ.get('BUILDBOT_BUILDERNAME', None)
builder_name = BuilderName(
build_name, build_config.active_waterfall, current_buildername)
# TODO: Find a way to unify these tags with
# remote_try._GetRequestBody
tags = ['buildset:%s' % buildset_tag,
'build_type:%s' % build_config.build_type,
'master:False',
'master_config:%s' % self._run.config.name,
'cbb_display_label:%s' % build_config.display_label,
'cbb_branch:%s' % self._run.manifest_branch,
'cbb_config:%s' % build_name,
'cbb_master_build_id:%s' % master_build_id,
'cbb_master_buildbucket_id:%s' % master_buildbucket_id,
'cbb_email:']
if build_config.boards:
for board in build_config.boards:
tags.append('board:%s' % board)
body = json.dumps({
'bucket': self._GetBuildbucketBucket(build_name, build_config),
'parameters_json': json.dumps({
'builder_name': builder_name,
'properties': {
'cbb_config': build_name,
'cbb_branch': self._run.manifest_branch,
'cbb_master_build_id': master_build_id,
}
}),
'tags': tags
})
content = self.buildbucket_client.PutBuildRequest(body, dryrun)
buildbucket_id = buildbucket_lib.GetBuildId(content)
created_ts = buildbucket_lib.GetBuildCreated_ts(content)
logging.info('Build_name %s buildbucket_id %s created_timestamp %s',
build_name, buildbucket_id, created_ts)
return (buildbucket_id, created_ts)
def ScheduleSlaveBuildsViaBuildbucket(self, important_only=False,
dryrun=False):
"""Schedule slave builds by sending PUT requests to Buildbucket.
Args:
important_only: Whether only schedule important slave builds, default to
False.
dryrun: Whether a dryrun, default to False.
"""
if self.buildbucket_client is None:
logging.info('No buildbucket_client. Skip scheduling slaves.')
return
build_id, db = self._run.GetCIDBHandle()
if build_id is None:
logging.info('No build id. Skip scheduling slaves.')
return
# May be None. This is okay.
master_buildbucket_id = self._run.options.buildbucket_id
buildset_tag = 'cbuildbot/%s/%s/%s' % (
self._run.manifest_branch, self._run.config.name, build_id)
scheduled_important_slave_builds = []
scheduled_experimental_slave_builds = []
unscheduled_slave_builds = []
scheduled_build_reqs = []
# Get all active slave build configs.
slave_config_map = self._GetSlaveConfigMap(important_only)
for slave_config_name, slave_config in slave_config_map.iteritems():
try:
buildbucket_id, created_ts = self.PostSlaveBuildToBuildbucket(
slave_config_name, slave_config, build_id, master_buildbucket_id,
buildset_tag, dryrun=dryrun)
request_reason = None
if slave_config.important:
scheduled_important_slave_builds.append(
(slave_config_name, buildbucket_id, created_ts))
request_reason = build_requests.REASON_IMPORTANT_CQ_SLAVE
else:
scheduled_experimental_slave_builds.append(
(slave_config_name, buildbucket_id, created_ts))
request_reason = build_requests.REASON_EXPERIMENTAL_CQ_SLAVE
scheduled_build_reqs.append(build_requests.BuildRequest(
None, build_id, slave_config_name, None, buildbucket_id,
request_reason, None))
except buildbucket_lib.BuildbucketResponseException as e:
# Use 16-digit ts to be consistent with the created_ts from Buildbucket
current_ts = int(round(time.time() * 1000000))
unscheduled_slave_builds.append((slave_config_name, None, current_ts))
if important_only or slave_config.important:
raise
else:
logging.warning('Failed to schedule %s current timestamp %s: %s'
% (slave_config_name, current_ts, e))
if config_lib.IsMasterCQ(self._run.config) and db and scheduled_build_reqs:
db.InsertBuildRequests(scheduled_build_reqs)
self._run.attrs.metadata.ExtendKeyListWithList(
constants.METADATA_SCHEDULED_IMPORTANT_SLAVES,
scheduled_important_slave_builds)
self._run.attrs.metadata.ExtendKeyListWithList(
constants.METADATA_SCHEDULED_EXPERIMENTAL_SLAVES,
scheduled_experimental_slave_builds)
self._run.attrs.metadata.ExtendKeyListWithList(
constants.METADATA_UNSCHEDULED_SLAVES, unscheduled_slave_builds)
@failures_lib.SetFailureType(failures_lib.InfrastructureFailure)
def PerformStage(self):
if (config_lib.IsMasterCQ(self._run.config) and
not self.sync_stage.pool.HasPickedUpCLs()):
logging.info('No new CLs or chumpped CLs found to verify in this CQ run,'
'do not schedule CQ slaves.')
return
self.ScheduleSlaveBuildsViaBuildbucket(important_only=False,
dryrun=self._run.options.debug)
|
third_party/chromite/cbuildbot/stages/scheduler_stages.py
|
from __future__ import print_function
import json
import os
import time
from chromite.cbuildbot.stages import generic_stages
from chromite.lib import buildbucket_lib
from chromite.lib import build_requests
from chromite.lib import constants
from chromite.lib import config_lib
from chromite.lib import cros_logging as logging
from chromite.lib import failures_lib
from chromite.lib.const import waterfall
def BuilderName(build_config, active_waterfall, current_builder):
"""Gets the corresponding builder name of the build.
Args:
build_config: build config (string) of the build.
active_waterfall: active waterfall to run the build.
current_builder: buildbot builder name of the current builder, or None.
Returns:
Builder name to run the build on.
"""
# The builder name is configured differently for release builds in
# chromeos and chromeos_release waterfalls. (see crbug.com/755276)
if active_waterfall == waterfall.WATERFALL_RELEASE:
assert current_builder
# Example: master-release release-R64-10176.B
named_branch = current_builder.split()[1]
return '%s %s' % (build_config, named_branch)
else:
return build_config
class ScheduleSlavesStage(generic_stages.BuilderStage):
"""Stage that schedules slaves for the master build."""
def __init__(self, builder_run, sync_stage, **kwargs):
super(ScheduleSlavesStage, self).__init__(builder_run, **kwargs)
self.sync_stage = sync_stage
self.buildbucket_client = self.GetBuildbucketClient()
def _GetBuildbucketBucket(self, build_name, build_config):
"""Get the corresponding Buildbucket bucket.
Args:
build_name: name of the build to put to Buildbucket.
build_config: config of the build to put to Buildbucket.
Raises:
NoBuildbucketBucketFoundException when no Buildbucket bucket found.
"""
bucket = buildbucket_lib.WATERFALL_BUCKET_MAP.get(
build_config.active_waterfall)
if bucket is None:
raise buildbucket_lib.NoBuildbucketBucketFoundException(
'No Buildbucket bucket found for builder %s waterfall: %s' %
(build_name, build_config.active_waterfall))
return bucket
def PostSlaveBuildToBuildbucket(self, build_name, build_config,
master_build_id, master_buildbucket_id,
buildset_tag, dryrun=False):
"""Send a Put slave build request to Buildbucket.
Args:
build_name: Salve build name to put to Buildbucket.
build_config: Slave build config to put to Buildbucket.
master_build_id: CIDB id of the master scheduling the slave build.
master_buildbucket_id: buildbucket id of the master scheduling the
slave build.
buildset_tag: The buildset tag for strong consistent tag queries.
More context: crbug.com/661689
dryrun: Whether a dryrun, default to False.
"""
current_buildername = os.environ.get('BUILDBOT_BUILDERNAME', None)
builder_name = BuilderName(
build_name, build_config.active_waterfall, current_buildername)
# TODO: Find a way to unify these tags with
# remote_try._GetRequestBody
tags = ['buildset:%s' % buildset_tag,
'build_type:%s' % build_config.build_type,
'master:False',
'master_config:%s' % self._run.config.name,
'cbb_display_label:%s' % build_config.display_label,
'cbb_branch:%s' % self._run.manifest_branch,
'cbb_config:%s' % build_name,
'cbb_master_build_id:%s' % master_build_id,
'cbb_master_buildbucket_id:%s' % master_buildbucket_id,
'cbb_email:']
if build_config.boards:
for board in build_config.boards:
tags.append('board:%s' % board)
body = json.dumps({
'bucket': self._GetBuildbucketBucket(build_name, build_config),
'parameters_json': json.dumps({
'builder_name': builder_name,
'properties': {
'cbb_config': build_name,
'cbb_branch': self._run.manifest_branch,
'cbb_master_build_id': master_build_id,
}
}),
'tags': tags
})
content = self.buildbucket_client.PutBuildRequest(body, dryrun)
buildbucket_id = buildbucket_lib.GetBuildId(content)
created_ts = buildbucket_lib.GetBuildCreated_ts(content)
logging.info('Build_name %s buildbucket_id %s created_timestamp %s',
build_name, buildbucket_id, created_ts)
return (buildbucket_id, created_ts)
def ScheduleSlaveBuildsViaBuildbucket(self, important_only=False,
dryrun=False):
"""Schedule slave builds by sending PUT requests to Buildbucket.
Args:
important_only: Whether only schedule important slave builds, default to
False.
dryrun: Whether a dryrun, default to False.
"""
if self.buildbucket_client is None:
logging.info('No buildbucket_client. Skip scheduling slaves.')
return
build_id, db = self._run.GetCIDBHandle()
if build_id is None:
logging.info('No build id. Skip scheduling slaves.')
return
# May be None. This is okay.
master_buildbucket_id = self._run.options.buildbucket_id
buildset_tag = 'cbuildbot/%s/%s/%s' % (
self._run.manifest_branch, self._run.config.name, build_id)
scheduled_important_slave_builds = []
scheduled_experimental_slave_builds = []
unscheduled_slave_builds = []
scheduled_build_reqs = []
# Get all active slave build configs.
slave_config_map = self._GetSlaveConfigMap(important_only)
for slave_config_name, slave_config in slave_config_map.iteritems():
try:
buildbucket_id, created_ts = self.PostSlaveBuildToBuildbucket(
slave_config_name, slave_config, build_id, master_buildbucket_id,
buildset_tag, dryrun=dryrun)
request_reason = None
if slave_config.important:
scheduled_important_slave_builds.append(
(slave_config_name, buildbucket_id, created_ts))
request_reason = build_requests.REASON_IMPORTANT_CQ_SLAVE
else:
scheduled_experimental_slave_builds.append(
(slave_config_name, buildbucket_id, created_ts))
request_reason = build_requests.REASON_EXPERIMENTAL_CQ_SLAVE
scheduled_build_reqs.append(build_requests.BuildRequest(
None, build_id, slave_config_name, None, buildbucket_id,
request_reason, None))
except buildbucket_lib.BuildbucketResponseException as e:
# Use 16-digit ts to be consistent with the created_ts from Buildbucket
current_ts = int(round(time.time() * 1000000))
unscheduled_slave_builds.append((slave_config_name, None, current_ts))
if important_only or slave_config.important:
raise
else:
logging.warning('Failed to schedule %s current timestamp %s: %s'
% (slave_config_name, current_ts, e))
if config_lib.IsMasterCQ(self._run.config) and db and scheduled_build_reqs:
db.InsertBuildRequests(scheduled_build_reqs)
self._run.attrs.metadata.ExtendKeyListWithList(
constants.METADATA_SCHEDULED_IMPORTANT_SLAVES,
scheduled_important_slave_builds)
self._run.attrs.metadata.ExtendKeyListWithList(
constants.METADATA_SCHEDULED_EXPERIMENTAL_SLAVES,
scheduled_experimental_slave_builds)
self._run.attrs.metadata.ExtendKeyListWithList(
constants.METADATA_UNSCHEDULED_SLAVES, unscheduled_slave_builds)
@failures_lib.SetFailureType(failures_lib.InfrastructureFailure)
def PerformStage(self):
if (config_lib.IsMasterCQ(self._run.config) and
not self.sync_stage.pool.HasPickedUpCLs()):
logging.info('No new CLs or chumpped CLs found to verify in this CQ run,'
'do not schedule CQ slaves.')
return
self.ScheduleSlaveBuildsViaBuildbucket(important_only=False,
dryrun=self._run.options.debug)
| 0.677687 | 0.110327 |
import torch
class Generator(torch.nn.Module):
"""
Simple Generator Network
"""
def __init__(
self,
latent_dim,
n_classes,
code_dim,
img_size,
num_channels):
"""
Parameters
----------
latent_dim : int
size of the latent dimension
n_classes : int
number of classes
code_dim : int
size of the code dimension
img_size : int
number of pixels per image side
num_channels : int
number of channels to generate
"""
super().__init__()
input_dim = latent_dim + n_classes + code_dim
self.init_size = img_size // 4 # Initial size before upsampling
self.l1 = torch.nn.Linear(input_dim, 128 * self.init_size ** 2)
self.conv_blocks = torch.nn.Sequential(
torch.nn.BatchNorm2d(128),
torch.nn.Upsample(scale_factor=2),
torch.nn.Conv2d(128, 128, 3, stride=1, padding=1),
torch.nn.BatchNorm2d(128, 0.8),
torch.nn.LeakyReLU(0.2, inplace=True),
torch.nn.Upsample(scale_factor=2),
torch.nn.Conv2d(128, 64, 3, stride=1, padding=1),
torch.nn.BatchNorm2d(64, 0.8),
torch.nn.LeakyReLU(0.2, inplace=True),
torch.nn.Conv2d(64, num_channels, 3, stride=1, padding=1),
torch.nn.Tanh(),
)
def forward(self, noise, labels, code):
"""
Forwards a single batch through the network
Parameters
----------
noise : :class:`torch.Tensor`
the noise vector
labels : :class:`torch.Tensor`
the label batch
code : :class:`torch.Tensor`
the code
Returns
-------
:class:`torch.Tensor`
the image batch
"""
gen_input = torch.cat((noise, labels.to(noise.dtype), code), -1)
out = self.l1(gen_input)
out = out.view(out.shape[0], 128, self.init_size, self.init_size)
img = self.conv_blocks(out)
return img
class Discriminator(torch.nn.Module):
"""
A simple discriminator network
"""
def __init__(self, code_dim, n_classes, num_channels, img_size):
"""
Parameters
----------
code_dim : int
size of the code dimension
n_classes : int
number of image classes
num_channels : int
number of image channels
img_size : int
number of pixels per side
"""
super().__init__()
def discriminator_block(in_filters, out_filters, bn=True):
"""Returns layers of each discriminator block"""
block = [torch.nn.Conv2d(in_filters, out_filters, 3, 2, 1),
torch.nn.LeakyReLU(0.2, inplace=True),
torch.nn.Dropout2d(0.25)]
if bn:
block.append(torch.nn.BatchNorm2d(out_filters, 0.8))
return block
self.conv_blocks = torch.nn.Sequential(
*discriminator_block(num_channels, 16, bn=False),
*discriminator_block(16, 32),
*discriminator_block(32, 64),
*discriminator_block(64, 128),
)
# The height and width of downsampled image
ds_size = self.conv_blocks(torch.rand(1, num_channels, img_size,
img_size)).size(2)
# Output layers
self.adv_layer = torch.nn.Linear(128 * ds_size ** 2, 1)
self.aux_layer = torch.nn.Sequential(
torch.nn.Linear(128 * ds_size ** 2, n_classes), torch.nn.Softmax())
self.latent_layer = torch.nn.Linear(128 * ds_size ** 2, code_dim)
def forward(self, img):
"""
Feeds a single image batch through the network
Parameters
----------
img : :class:`torch.Tensor`
the image batch
Returns
-------
:class:`torch.Tensor`
the validity for each image
:class:`torch.Tensor`
the predicted label for each image
:class:`torch.Tensor`
the predicted latent code for each image
"""
out = self.conv_blocks(img)
out = out.view(out.shape[0], -1)
validity = self.adv_layer(out)
label = self.aux_layer(out)
latent_code = self.latent_layer(out)
return validity, label, latent_code
|
dlutils/models/gans/info/models.py
|
import torch
class Generator(torch.nn.Module):
"""
Simple Generator Network
"""
def __init__(
self,
latent_dim,
n_classes,
code_dim,
img_size,
num_channels):
"""
Parameters
----------
latent_dim : int
size of the latent dimension
n_classes : int
number of classes
code_dim : int
size of the code dimension
img_size : int
number of pixels per image side
num_channels : int
number of channels to generate
"""
super().__init__()
input_dim = latent_dim + n_classes + code_dim
self.init_size = img_size // 4 # Initial size before upsampling
self.l1 = torch.nn.Linear(input_dim, 128 * self.init_size ** 2)
self.conv_blocks = torch.nn.Sequential(
torch.nn.BatchNorm2d(128),
torch.nn.Upsample(scale_factor=2),
torch.nn.Conv2d(128, 128, 3, stride=1, padding=1),
torch.nn.BatchNorm2d(128, 0.8),
torch.nn.LeakyReLU(0.2, inplace=True),
torch.nn.Upsample(scale_factor=2),
torch.nn.Conv2d(128, 64, 3, stride=1, padding=1),
torch.nn.BatchNorm2d(64, 0.8),
torch.nn.LeakyReLU(0.2, inplace=True),
torch.nn.Conv2d(64, num_channels, 3, stride=1, padding=1),
torch.nn.Tanh(),
)
def forward(self, noise, labels, code):
"""
Forwards a single batch through the network
Parameters
----------
noise : :class:`torch.Tensor`
the noise vector
labels : :class:`torch.Tensor`
the label batch
code : :class:`torch.Tensor`
the code
Returns
-------
:class:`torch.Tensor`
the image batch
"""
gen_input = torch.cat((noise, labels.to(noise.dtype), code), -1)
out = self.l1(gen_input)
out = out.view(out.shape[0], 128, self.init_size, self.init_size)
img = self.conv_blocks(out)
return img
class Discriminator(torch.nn.Module):
"""
A simple discriminator network
"""
def __init__(self, code_dim, n_classes, num_channels, img_size):
"""
Parameters
----------
code_dim : int
size of the code dimension
n_classes : int
number of image classes
num_channels : int
number of image channels
img_size : int
number of pixels per side
"""
super().__init__()
def discriminator_block(in_filters, out_filters, bn=True):
"""Returns layers of each discriminator block"""
block = [torch.nn.Conv2d(in_filters, out_filters, 3, 2, 1),
torch.nn.LeakyReLU(0.2, inplace=True),
torch.nn.Dropout2d(0.25)]
if bn:
block.append(torch.nn.BatchNorm2d(out_filters, 0.8))
return block
self.conv_blocks = torch.nn.Sequential(
*discriminator_block(num_channels, 16, bn=False),
*discriminator_block(16, 32),
*discriminator_block(32, 64),
*discriminator_block(64, 128),
)
# The height and width of downsampled image
ds_size = self.conv_blocks(torch.rand(1, num_channels, img_size,
img_size)).size(2)
# Output layers
self.adv_layer = torch.nn.Linear(128 * ds_size ** 2, 1)
self.aux_layer = torch.nn.Sequential(
torch.nn.Linear(128 * ds_size ** 2, n_classes), torch.nn.Softmax())
self.latent_layer = torch.nn.Linear(128 * ds_size ** 2, code_dim)
def forward(self, img):
"""
Feeds a single image batch through the network
Parameters
----------
img : :class:`torch.Tensor`
the image batch
Returns
-------
:class:`torch.Tensor`
the validity for each image
:class:`torch.Tensor`
the predicted label for each image
:class:`torch.Tensor`
the predicted latent code for each image
"""
out = self.conv_blocks(img)
out = out.view(out.shape[0], -1)
validity = self.adv_layer(out)
label = self.aux_layer(out)
latent_code = self.latent_layer(out)
return validity, label, latent_code
| 0.947247 | 0.695441 |
import json
import uuid
from datetime import timedelta
from unittest.mock import patch
from alpaca_trade_api.entity import Account as AlpacaAccount
from alpaca_trade_api.entity import Order as AlpacaOrder
from alpaca_trade_api.entity import Position as AlpacaPosition
from assets.models import Asset, Bar
from assets.tests.factories import AssetFactory
from core.tasks import (
fetch_bar_data_for_strategy,
moving_average_strategy,
run_strategies_for_users,
)
from core.tests.factories import StrategyFactory
from django.core.exceptions import ValidationError
from django.test import TestCase
from django.utils import timezone
from orders.models import Order
from users.tests.factories import UserFactory
class CoreTaskTests(TestCase):
def setUp(self):
self.tsla = AssetFactory(symbol="TSLA")
self.user_1 = UserFactory()
self.user_2 = UserFactory()
self.user_3 = UserFactory()
self.strategy_1 = StrategyFactory(
asset=self.tsla, user=self.user_1, trade_value=1000
)
self.strategy_2 = StrategyFactory(
asset=self.tsla, user=self.user_2, trade_value=1000
)
self.inactive_strategy = StrategyFactory(
user=self.user_3,
start_date=timezone.now() - timedelta(days=7),
end_date=timezone.now() - timedelta(days=5),
)
def refresh_tsla_bars(self, max_epoch=1648443600):
tsla = Asset.objects.get(symbol="TSLA")
Bar.objects.filter(asset=tsla).delete()
with open("assets/tests/sample_tsla_bars.json") as f:
bars = json.load(f)
objs = [
Bar(
asset=tsla,
t=bar["t"],
o=bar["o"],
h=bar["h"],
l=bar["l"],
c=bar["c"],
v=bar["v"],
)
for bar in bars
if int(bar["t"]) <= max_epoch
]
Bar.objects.bulk_create(objs, batch_size=1000, ignore_conflicts=True)
@patch("core.tasks.TradeApiRest")
@patch("core.tasks.moving_average_strategy")
def test_run_strategies_for_user(
self, mock_moving_average_strategy, mock_trade_api
):
"""Moving average strategies that are active are run for users."""
mock_trade_api.return_value.is_market_open.return_value = True
run_strategies_for_users()
self.assertEqual(mock_moving_average_strategy.call_count, 2)
mock_moving_average_strategy.assert_any_call(self.user_1)
mock_moving_average_strategy.assert_any_call(self.user_2)
@patch("core.tasks.logger")
@patch("core.tasks.fetch_bar_data_for_strategy")
@patch("core.tasks.update_bars")
@patch("core.tasks.TradeApiRest")
def test_moving_average_strategy_not_enough_data(
self,
mock_trade_api,
mock_update_bars,
mock_fetch_bar_data_for_strategy,
mock_logger,
):
"""Active strategy does not create an order if there is not enough bar data."""
mock_fetch_bar_data_for_strategy.return_value = None
moving_average_strategy(self.user_1)
mock_logger.info.assert_called_once_with(
f"Insufficient bar data for asset: {self.strategy_1.asset.id}"
)
@patch("core.tasks.time.mktime")
@patch("core.tasks.update_bars")
def test_fetch_bar_data_for_strategy(self, mock_update_bars, mock_mktime):
"""Bar data is fetched when required."""
self.refresh_tsla_bars()
with self.subTest(msg="bar data is not required."):
# Enough bar data exists in sample data at this time
mock_mktime.return_value = "1614229200"
fetch_bar_data_for_strategy(self.strategy_1)
mock_update_bars.assert_not_called()
mock_mktime.reset_mock()
mock_update_bars.reset_mock()
with self.subTest(msg="bar data is required."):
# Not enough bar data exists in sample data at this time
mock_mktime.return_value = "1648443600"
fetch_bar_data_for_strategy(self.strategy_1)
mock_update_bars.assert_called_once_with(["TSLA"], "15Min", 131)
@patch("core.tasks.TradeApiRest")
@patch("core.tasks.time.mktime")
@patch("core.tasks.update_bars")
@patch("core.tasks.fetch_bar_data_for_strategy")
def test_moving_average_strategy(
self,
mock_fetch_bar_data_for_strategy,
mock_update_bars,
mock_mktime,
mock_trade_api,
):
"""Active strategy creates an order if required."""
account_info = AlpacaAccount(
{
"account_blocked": False,
"account_number": "GS78FJEUMA4P",
"buying_power": "200000",
"cash": "100000",
"created_at": "2020-10-31T23:40:50.376107Z",
"currency": "USD",
"daytrade_count": 0,
"daytrading_buying_power": "0",
"equity": "100000",
"id": str(uuid.uuid4()),
"initial_margin": "0",
"last_equity": "100000",
"last_maintenance_margin": "0",
"long_market_value": "0",
"maintenance_margin": "0",
"multiplier": "2",
"non_marginable_buying_power": "100000",
"pattern_day_trader": True,
"pending_transfer_in": "0",
"portfolio_value": "100000",
"regt_buying_power": "200000",
"short_market_value": "0",
"shorting_enabled": True,
"sma": "0",
"status": "ACTIVE",
"trade_suspended_by_user": False,
"trading_blocked": False,
"transfers_blocked": False,
}
)
position = AlpacaPosition(
{
"asset_id": "0",
"symbol": "TSLA",
"exchange": "0",
"asset_class": "0",
"avg_entry_price": "0",
"qty": "10",
"side": "long",
"market_value": "2000.0",
"cost_basis": "0",
"unrealized_pl": "0",
"unrealized_plpc": "0",
"unrealized_intraday_pl": "0",
"unrealized_intraday_plpc": "0",
"current_price": "0",
"lastday_price": "0",
"change_today": "0",
}
)
order = AlpacaOrder(
{
"id": str(uuid.uuid4()),
"client_order_id": str(uuid.uuid4()),
"created_at": "2021-03-16T18:38:01.942282Z",
"updated_at": "2021-03-16T18:38:01.942282Z",
"submitted_at": "2021-03-16T18:38:01.937734Z",
"filled_at": None,
"expired_at": None,
"canceled_at": None,
"failed_at": None,
"replaced_at": None,
"replaced_by": None,
"replaces": None,
"asset_id": self.tsla.pk,
"symbol": "TSLA",
"asset_class": "us_equity",
"notional": "500",
"qty": None,
"filled_qty": "0",
"filled_avg_price": None,
"order_class": "",
"order_type": "market",
"type": "market",
"side": "buy",
"time_in_force": "day",
"limit_price": None,
"stop_price": None,
"status": "accepted",
"extended_hours": False,
"trail_percent": None,
"trail_price": None,
"hwm": None,
}
)
mock_fetch_bar_data_for_strategy.return_value = 130
mock_trade_api.return_value.is_market_open.return_value = True
mock_trade_api.return_value.account_info.return_value = account_info
mock_trade_api.return_value.list_position_by_symbol.return_value = position
mock_trade_api.return_value.submit_order.return_value = order
seven_days_epoch = 104 * 86400
with self.subTest(msg="buy order is placed."):
max_epoch_time = 1630818000
base_epoch = max_epoch_time - seven_days_epoch
mock_mktime.return_value = base_epoch
self.refresh_tsla_bars(max_epoch=max_epoch_time)
moving_average_strategy(self.user_1)
mock_trade_api.return_value.submit_order.assert_called_once_with(
symbol=self.strategy_1.asset.symbol,
notional=float(self.strategy_1.trade_value),
side=Order.BUY,
type=Order.MARKET,
time_in_force=Order.GTC,
)
mock_mktime.reset_mock()
mock_trade_api.reset_mock()
Order.objects.all().delete()
with self.subTest(msg="sell order is placed."):
max_epoch_time = 1618894800
base_epoch = max_epoch_time - seven_days_epoch
mock_mktime.return_value = base_epoch
self.refresh_tsla_bars(max_epoch=max_epoch_time)
moving_average_strategy(self.user_1)
mock_trade_api.return_value.submit_order.assert_called_once_with(
symbol=self.strategy_1.asset.symbol,
notional=float(self.strategy_1.trade_value),
side=Order.SELL,
type=Order.MARKET,
time_in_force=Order.GTC,
)
mock_mktime.reset_mock()
mock_trade_api.reset_mock()
Order.objects.all().delete()
with self.subTest(msg="no order is placed."):
max_epoch_time = 1633150800
base_epoch = max_epoch_time - seven_days_epoch
mock_mktime.return_value = base_epoch
self.refresh_tsla_bars(max_epoch=max_epoch_time)
moving_average_strategy(self.user_1)
mock_trade_api.return_value.submit_order.assert_not_called()
@patch("core.tasks.logger")
@patch("core.tasks.TradeApiRest")
@patch("core.tasks.time.mktime")
@patch("core.tasks.update_bars")
@patch("core.tasks.fetch_bar_data_for_strategy")
def test_moving_average_strategy_fails(
self,
mock_fetch_bar_data_for_strategy,
mock_update_bars,
mock_mktime,
mock_trade_api,
mock_logger,
):
"""If trade view api fails to submit an order, an order object is not created."""
account_info = AlpacaAccount(
{
"account_blocked": False,
"account_number": "GS78FJEUMA4P",
"buying_power": "200000",
"cash": "100000",
"created_at": "2020-10-31T23:40:50.376107Z",
"currency": "USD",
"daytrade_count": 0,
"daytrading_buying_power": "0",
"equity": "100000",
"id": str(uuid.uuid4()),
"initial_margin": "0",
"last_equity": "100000",
"last_maintenance_margin": "0",
"long_market_value": "0",
"maintenance_margin": "0",
"multiplier": "2",
"non_marginable_buying_power": "100000",
"pattern_day_trader": True,
"pending_transfer_in": "0",
"portfolio_value": "100000",
"regt_buying_power": "200000",
"short_market_value": "0",
"shorting_enabled": True,
"sma": "0",
"status": "ACTIVE",
"trade_suspended_by_user": False,
"trading_blocked": False,
"transfers_blocked": False,
}
)
position = AlpacaPosition(
{
"asset_id": "0",
"symbol": "TSLA",
"exchange": "0",
"asset_class": "0",
"avg_entry_price": "0",
"qty": "10",
"side": "long",
"market_value": "2000.0",
"cost_basis": "0",
"unrealized_pl": "0",
"unrealized_plpc": "0",
"unrealized_intraday_pl": "0",
"unrealized_intraday_plpc": "0",
"current_price": "0",
"lastday_price": "0",
"change_today": "0",
}
)
order = AlpacaOrder(
{
"id": str(uuid.uuid4()),
"client_order_id": str(uuid.uuid4()),
"created_at": "2021-03-16T18:38:01.942282Z",
"updated_at": "2021-03-16T18:38:01.942282Z",
"submitted_at": "2021-03-16T18:38:01.937734Z",
"filled_at": None,
"expired_at": None,
"canceled_at": None,
"failed_at": None,
"replaced_at": None,
"replaced_by": None,
"replaces": None,
"asset_id": self.tsla.pk,
"symbol": "TSLA",
"asset_class": "us_equity",
"notional": "500",
"qty": None,
"filled_qty": "0",
"filled_avg_price": None,
"order_class": "",
"order_type": "market",
"type": "market",
"side": "buy",
"time_in_force": "day",
"limit_price": None,
"stop_price": None,
"status": "accepted",
"extended_hours": False,
"trail_percent": None,
"trail_price": None,
"hwm": None,
}
)
mock_fetch_bar_data_for_strategy.return_value = 130
mock_trade_api.return_value.is_market_open.return_value = True
mock_trade_api.return_value.account_info.return_value = account_info
mock_trade_api.return_value.list_position_by_symbol.return_value = position
mock_trade_api.return_value.submit_order.side_effect = ValidationError(
"Mock error"
)
seven_days_epoch = 104 * 86400
max_epoch_time = 1630818000
base_epoch = max_epoch_time - seven_days_epoch
mock_mktime.return_value = base_epoch
self.refresh_tsla_bars(max_epoch=max_epoch_time)
moving_average_strategy(self.user_1)
mock_logger.warning.assert_called_once()
self.assertEqual(Order.objects.count(), 0)
|
server/core/tests/test_tasks.py
|
import json
import uuid
from datetime import timedelta
from unittest.mock import patch
from alpaca_trade_api.entity import Account as AlpacaAccount
from alpaca_trade_api.entity import Order as AlpacaOrder
from alpaca_trade_api.entity import Position as AlpacaPosition
from assets.models import Asset, Bar
from assets.tests.factories import AssetFactory
from core.tasks import (
fetch_bar_data_for_strategy,
moving_average_strategy,
run_strategies_for_users,
)
from core.tests.factories import StrategyFactory
from django.core.exceptions import ValidationError
from django.test import TestCase
from django.utils import timezone
from orders.models import Order
from users.tests.factories import UserFactory
class CoreTaskTests(TestCase):
def setUp(self):
self.tsla = AssetFactory(symbol="TSLA")
self.user_1 = UserFactory()
self.user_2 = UserFactory()
self.user_3 = UserFactory()
self.strategy_1 = StrategyFactory(
asset=self.tsla, user=self.user_1, trade_value=1000
)
self.strategy_2 = StrategyFactory(
asset=self.tsla, user=self.user_2, trade_value=1000
)
self.inactive_strategy = StrategyFactory(
user=self.user_3,
start_date=timezone.now() - timedelta(days=7),
end_date=timezone.now() - timedelta(days=5),
)
def refresh_tsla_bars(self, max_epoch=1648443600):
tsla = Asset.objects.get(symbol="TSLA")
Bar.objects.filter(asset=tsla).delete()
with open("assets/tests/sample_tsla_bars.json") as f:
bars = json.load(f)
objs = [
Bar(
asset=tsla,
t=bar["t"],
o=bar["o"],
h=bar["h"],
l=bar["l"],
c=bar["c"],
v=bar["v"],
)
for bar in bars
if int(bar["t"]) <= max_epoch
]
Bar.objects.bulk_create(objs, batch_size=1000, ignore_conflicts=True)
@patch("core.tasks.TradeApiRest")
@patch("core.tasks.moving_average_strategy")
def test_run_strategies_for_user(
self, mock_moving_average_strategy, mock_trade_api
):
"""Moving average strategies that are active are run for users."""
mock_trade_api.return_value.is_market_open.return_value = True
run_strategies_for_users()
self.assertEqual(mock_moving_average_strategy.call_count, 2)
mock_moving_average_strategy.assert_any_call(self.user_1)
mock_moving_average_strategy.assert_any_call(self.user_2)
@patch("core.tasks.logger")
@patch("core.tasks.fetch_bar_data_for_strategy")
@patch("core.tasks.update_bars")
@patch("core.tasks.TradeApiRest")
def test_moving_average_strategy_not_enough_data(
self,
mock_trade_api,
mock_update_bars,
mock_fetch_bar_data_for_strategy,
mock_logger,
):
"""Active strategy does not create an order if there is not enough bar data."""
mock_fetch_bar_data_for_strategy.return_value = None
moving_average_strategy(self.user_1)
mock_logger.info.assert_called_once_with(
f"Insufficient bar data for asset: {self.strategy_1.asset.id}"
)
@patch("core.tasks.time.mktime")
@patch("core.tasks.update_bars")
def test_fetch_bar_data_for_strategy(self, mock_update_bars, mock_mktime):
"""Bar data is fetched when required."""
self.refresh_tsla_bars()
with self.subTest(msg="bar data is not required."):
# Enough bar data exists in sample data at this time
mock_mktime.return_value = "1614229200"
fetch_bar_data_for_strategy(self.strategy_1)
mock_update_bars.assert_not_called()
mock_mktime.reset_mock()
mock_update_bars.reset_mock()
with self.subTest(msg="bar data is required."):
# Not enough bar data exists in sample data at this time
mock_mktime.return_value = "1648443600"
fetch_bar_data_for_strategy(self.strategy_1)
mock_update_bars.assert_called_once_with(["TSLA"], "15Min", 131)
@patch("core.tasks.TradeApiRest")
@patch("core.tasks.time.mktime")
@patch("core.tasks.update_bars")
@patch("core.tasks.fetch_bar_data_for_strategy")
def test_moving_average_strategy(
self,
mock_fetch_bar_data_for_strategy,
mock_update_bars,
mock_mktime,
mock_trade_api,
):
"""Active strategy creates an order if required."""
account_info = AlpacaAccount(
{
"account_blocked": False,
"account_number": "GS78FJEUMA4P",
"buying_power": "200000",
"cash": "100000",
"created_at": "2020-10-31T23:40:50.376107Z",
"currency": "USD",
"daytrade_count": 0,
"daytrading_buying_power": "0",
"equity": "100000",
"id": str(uuid.uuid4()),
"initial_margin": "0",
"last_equity": "100000",
"last_maintenance_margin": "0",
"long_market_value": "0",
"maintenance_margin": "0",
"multiplier": "2",
"non_marginable_buying_power": "100000",
"pattern_day_trader": True,
"pending_transfer_in": "0",
"portfolio_value": "100000",
"regt_buying_power": "200000",
"short_market_value": "0",
"shorting_enabled": True,
"sma": "0",
"status": "ACTIVE",
"trade_suspended_by_user": False,
"trading_blocked": False,
"transfers_blocked": False,
}
)
position = AlpacaPosition(
{
"asset_id": "0",
"symbol": "TSLA",
"exchange": "0",
"asset_class": "0",
"avg_entry_price": "0",
"qty": "10",
"side": "long",
"market_value": "2000.0",
"cost_basis": "0",
"unrealized_pl": "0",
"unrealized_plpc": "0",
"unrealized_intraday_pl": "0",
"unrealized_intraday_plpc": "0",
"current_price": "0",
"lastday_price": "0",
"change_today": "0",
}
)
order = AlpacaOrder(
{
"id": str(uuid.uuid4()),
"client_order_id": str(uuid.uuid4()),
"created_at": "2021-03-16T18:38:01.942282Z",
"updated_at": "2021-03-16T18:38:01.942282Z",
"submitted_at": "2021-03-16T18:38:01.937734Z",
"filled_at": None,
"expired_at": None,
"canceled_at": None,
"failed_at": None,
"replaced_at": None,
"replaced_by": None,
"replaces": None,
"asset_id": self.tsla.pk,
"symbol": "TSLA",
"asset_class": "us_equity",
"notional": "500",
"qty": None,
"filled_qty": "0",
"filled_avg_price": None,
"order_class": "",
"order_type": "market",
"type": "market",
"side": "buy",
"time_in_force": "day",
"limit_price": None,
"stop_price": None,
"status": "accepted",
"extended_hours": False,
"trail_percent": None,
"trail_price": None,
"hwm": None,
}
)
mock_fetch_bar_data_for_strategy.return_value = 130
mock_trade_api.return_value.is_market_open.return_value = True
mock_trade_api.return_value.account_info.return_value = account_info
mock_trade_api.return_value.list_position_by_symbol.return_value = position
mock_trade_api.return_value.submit_order.return_value = order
seven_days_epoch = 104 * 86400
with self.subTest(msg="buy order is placed."):
max_epoch_time = 1630818000
base_epoch = max_epoch_time - seven_days_epoch
mock_mktime.return_value = base_epoch
self.refresh_tsla_bars(max_epoch=max_epoch_time)
moving_average_strategy(self.user_1)
mock_trade_api.return_value.submit_order.assert_called_once_with(
symbol=self.strategy_1.asset.symbol,
notional=float(self.strategy_1.trade_value),
side=Order.BUY,
type=Order.MARKET,
time_in_force=Order.GTC,
)
mock_mktime.reset_mock()
mock_trade_api.reset_mock()
Order.objects.all().delete()
with self.subTest(msg="sell order is placed."):
max_epoch_time = 1618894800
base_epoch = max_epoch_time - seven_days_epoch
mock_mktime.return_value = base_epoch
self.refresh_tsla_bars(max_epoch=max_epoch_time)
moving_average_strategy(self.user_1)
mock_trade_api.return_value.submit_order.assert_called_once_with(
symbol=self.strategy_1.asset.symbol,
notional=float(self.strategy_1.trade_value),
side=Order.SELL,
type=Order.MARKET,
time_in_force=Order.GTC,
)
mock_mktime.reset_mock()
mock_trade_api.reset_mock()
Order.objects.all().delete()
with self.subTest(msg="no order is placed."):
max_epoch_time = 1633150800
base_epoch = max_epoch_time - seven_days_epoch
mock_mktime.return_value = base_epoch
self.refresh_tsla_bars(max_epoch=max_epoch_time)
moving_average_strategy(self.user_1)
mock_trade_api.return_value.submit_order.assert_not_called()
@patch("core.tasks.logger")
@patch("core.tasks.TradeApiRest")
@patch("core.tasks.time.mktime")
@patch("core.tasks.update_bars")
@patch("core.tasks.fetch_bar_data_for_strategy")
def test_moving_average_strategy_fails(
self,
mock_fetch_bar_data_for_strategy,
mock_update_bars,
mock_mktime,
mock_trade_api,
mock_logger,
):
"""If trade view api fails to submit an order, an order object is not created."""
account_info = AlpacaAccount(
{
"account_blocked": False,
"account_number": "GS78FJEUMA4P",
"buying_power": "200000",
"cash": "100000",
"created_at": "2020-10-31T23:40:50.376107Z",
"currency": "USD",
"daytrade_count": 0,
"daytrading_buying_power": "0",
"equity": "100000",
"id": str(uuid.uuid4()),
"initial_margin": "0",
"last_equity": "100000",
"last_maintenance_margin": "0",
"long_market_value": "0",
"maintenance_margin": "0",
"multiplier": "2",
"non_marginable_buying_power": "100000",
"pattern_day_trader": True,
"pending_transfer_in": "0",
"portfolio_value": "100000",
"regt_buying_power": "200000",
"short_market_value": "0",
"shorting_enabled": True,
"sma": "0",
"status": "ACTIVE",
"trade_suspended_by_user": False,
"trading_blocked": False,
"transfers_blocked": False,
}
)
position = AlpacaPosition(
{
"asset_id": "0",
"symbol": "TSLA",
"exchange": "0",
"asset_class": "0",
"avg_entry_price": "0",
"qty": "10",
"side": "long",
"market_value": "2000.0",
"cost_basis": "0",
"unrealized_pl": "0",
"unrealized_plpc": "0",
"unrealized_intraday_pl": "0",
"unrealized_intraday_plpc": "0",
"current_price": "0",
"lastday_price": "0",
"change_today": "0",
}
)
order = AlpacaOrder(
{
"id": str(uuid.uuid4()),
"client_order_id": str(uuid.uuid4()),
"created_at": "2021-03-16T18:38:01.942282Z",
"updated_at": "2021-03-16T18:38:01.942282Z",
"submitted_at": "2021-03-16T18:38:01.937734Z",
"filled_at": None,
"expired_at": None,
"canceled_at": None,
"failed_at": None,
"replaced_at": None,
"replaced_by": None,
"replaces": None,
"asset_id": self.tsla.pk,
"symbol": "TSLA",
"asset_class": "us_equity",
"notional": "500",
"qty": None,
"filled_qty": "0",
"filled_avg_price": None,
"order_class": "",
"order_type": "market",
"type": "market",
"side": "buy",
"time_in_force": "day",
"limit_price": None,
"stop_price": None,
"status": "accepted",
"extended_hours": False,
"trail_percent": None,
"trail_price": None,
"hwm": None,
}
)
mock_fetch_bar_data_for_strategy.return_value = 130
mock_trade_api.return_value.is_market_open.return_value = True
mock_trade_api.return_value.account_info.return_value = account_info
mock_trade_api.return_value.list_position_by_symbol.return_value = position
mock_trade_api.return_value.submit_order.side_effect = ValidationError(
"Mock error"
)
seven_days_epoch = 104 * 86400
max_epoch_time = 1630818000
base_epoch = max_epoch_time - seven_days_epoch
mock_mktime.return_value = base_epoch
self.refresh_tsla_bars(max_epoch=max_epoch_time)
moving_average_strategy(self.user_1)
mock_logger.warning.assert_called_once()
self.assertEqual(Order.objects.count(), 0)
| 0.574037 | 0.235614 |
from dash.dependencies import Input, Output
import dash_core_components as dcc
import dash_html_components as html
import pandas as pd
from textwrap import dedent
import dash_table
from tutorial import styles, tools
examples = {
example: tools.load_example('tutorial/examples/table/{}'.format(example))
for example in [
'callbacks_paging.py',
'callbacks_paging_page_count.py',
'callbacks_paging_and_sorting.py',
'callbacks_paging_multicolumn_sorting.py',
'callbacks_filtering.py',
'callbacks_sorting_filtering.py',
'callbacks_filtering_graph.py'
]
}
layout = html.Div([
dcc.Markdown('# DataTable - Python Callbacks'),
dcc.Markdown(dedent(
'''
### Backend Paging
With backend paging, we can load data into our table progressively.
Instead of loading all of the data at once, we'll only load data
as the user requests it when they click on the "previous" and "next"
buttons.
Since backend paging integrates directly with your Dash callbacks, you can
load your data from any Python data source.
''')),
dcc.Markdown(
examples['callbacks_paging.py'][0],
style=styles.code_container
),
html.Div(
examples['callbacks_paging.py'][1],
className='example-container'
),
html.Hr(),
dcc.Markdown(dedent('''
With backend paging, we can have front-end sorting and filtering
but it will only filter and sort the data that exists on the page.
This should be avoided. Your users will expect
that sorting and filtering is happening on the entire dataset and,
with large pages, might not be aware that this is only occuring
on the current page.
Instead, we recommend implmenting sorting and filtering on the
backend as well. That is, on the entire underlying dataset.
**Note for returning users - changed property names:**
- Sorted fields are now in `sort_by`, not `sorting_settings`
- The filter string is now in `filter`, not `filtering_settings`
''')),
dcc.Markdown('### Backend Paging and Page Numbers'),
dcc.Markdown(dedent('''
The pagination menu includes the number of the current page and
the total page count. With native (i.e., frontend) pagination, the
page count is calculated by the table. However, when using backend
pagination, the data are served to the table through a callback;
this makes it impossible for the table to calculate the total page
count. As a consequence, the last-page navigation button is
disabled (although all of the other buttons, as well as the direct
navigation, are still functional).
To get around this, supply a value to the `page_count` parameter
of the table. This will serve as the "last page", which will
re-enable the last-page navigation button and be displayed in the
pagination menu. *Please note that you will not be able to use the
pagination menu to navigate to a page that comes after the last
page specified by `page_count`!*
''')),
dcc.Markdown(
examples['callbacks_paging_page_count.py'][0],
style=styles.code_container
),
html.Div(
examples['callbacks_paging_page_count.py'][1],
className='example-container'
),
dcc.Markdown('### Backend Paging with Sorting'),
dcc.Markdown(
examples['callbacks_paging_and_sorting.py'][0],
style=styles.code_container
),
html.Div(
examples['callbacks_paging_and_sorting.py'][1],
className='example-container'
),
dcc.Markdown('### Backend Paging with Multi Column Sorting'),
dcc.Markdown(dedent('''
Multi-column sort allows you to sort by multiple columns.
This is useful when you have categorical columns with repeated
values and you're interested in seeing the sorted values for
each category.
In this example, try sorting by continent and then any other column.
''')),
dcc.Markdown(
examples['callbacks_paging_multicolumn_sorting.py'][0],
style=styles.code_container
),
html.Div(
examples['callbacks_paging_multicolumn_sorting.py'][1],
className='example-container'
),
dcc.Markdown('### Backend Paging with Filtering'),
dcc.Markdown(dedent('''
DataTable's front-end filtering has its own filtering expression
language.
Currently, backend filtering must parse the same filtering language.
If you write an expression that is not "valid" under the filtering
language, then it will not be passed to the backend.
This limitation will be removed in the future to allow you to
write your own expression query language.
In this example, we've written a Pandas backend for the filtering
language. It supports `eq`, `<`, and `>`. For example, try:
- Enter `eq Asia` in the "continent" column
- Enter `> 5000` in the "gdpPercap" column
- Enter `< 80` in the `lifeExp` column
> Note that unlike the front-end filtering, our backend filtering
> expression language doesn't require or support `num()` or wrapping
> items in double quotes (`"`).
> We will improve this syntax in the future,
> follow [dash-table#169](https://github.com/plotly/dash-table/issues/169)
> for more.
''')),
dcc.Markdown(
examples['callbacks_filtering.py'][0],
style=styles.code_container
),
html.Div(
examples['callbacks_filtering.py'][1],
className='example-container'
),
dcc.Markdown('### Backend Paging with Filtering and Multi-Column Sorting'),
dcc.Markdown(
examples['callbacks_sorting_filtering.py'][0],
style=styles.code_container
),
html.Div(
examples['callbacks_sorting_filtering.py'][1],
className='example-container'
),
dcc.Markdown('### Connecting Backend Paging with a Graph'),
dcc.Markdown(dedent('''
This final example ties it all together: the graph component
displays the current page of the `data`.
''')),
dcc.Markdown(
examples['callbacks_filtering_graph.py'][0],
style=styles.code_container
),
html.Div(
examples['callbacks_filtering_graph.py'][1],
className='example-container'
),
])
|
tutorial/table/table_callbacks_chapter.py
|
from dash.dependencies import Input, Output
import dash_core_components as dcc
import dash_html_components as html
import pandas as pd
from textwrap import dedent
import dash_table
from tutorial import styles, tools
examples = {
example: tools.load_example('tutorial/examples/table/{}'.format(example))
for example in [
'callbacks_paging.py',
'callbacks_paging_page_count.py',
'callbacks_paging_and_sorting.py',
'callbacks_paging_multicolumn_sorting.py',
'callbacks_filtering.py',
'callbacks_sorting_filtering.py',
'callbacks_filtering_graph.py'
]
}
layout = html.Div([
dcc.Markdown('# DataTable - Python Callbacks'),
dcc.Markdown(dedent(
'''
### Backend Paging
With backend paging, we can load data into our table progressively.
Instead of loading all of the data at once, we'll only load data
as the user requests it when they click on the "previous" and "next"
buttons.
Since backend paging integrates directly with your Dash callbacks, you can
load your data from any Python data source.
''')),
dcc.Markdown(
examples['callbacks_paging.py'][0],
style=styles.code_container
),
html.Div(
examples['callbacks_paging.py'][1],
className='example-container'
),
html.Hr(),
dcc.Markdown(dedent('''
With backend paging, we can have front-end sorting and filtering
but it will only filter and sort the data that exists on the page.
This should be avoided. Your users will expect
that sorting and filtering is happening on the entire dataset and,
with large pages, might not be aware that this is only occuring
on the current page.
Instead, we recommend implmenting sorting and filtering on the
backend as well. That is, on the entire underlying dataset.
**Note for returning users - changed property names:**
- Sorted fields are now in `sort_by`, not `sorting_settings`
- The filter string is now in `filter`, not `filtering_settings`
''')),
dcc.Markdown('### Backend Paging and Page Numbers'),
dcc.Markdown(dedent('''
The pagination menu includes the number of the current page and
the total page count. With native (i.e., frontend) pagination, the
page count is calculated by the table. However, when using backend
pagination, the data are served to the table through a callback;
this makes it impossible for the table to calculate the total page
count. As a consequence, the last-page navigation button is
disabled (although all of the other buttons, as well as the direct
navigation, are still functional).
To get around this, supply a value to the `page_count` parameter
of the table. This will serve as the "last page", which will
re-enable the last-page navigation button and be displayed in the
pagination menu. *Please note that you will not be able to use the
pagination menu to navigate to a page that comes after the last
page specified by `page_count`!*
''')),
dcc.Markdown(
examples['callbacks_paging_page_count.py'][0],
style=styles.code_container
),
html.Div(
examples['callbacks_paging_page_count.py'][1],
className='example-container'
),
dcc.Markdown('### Backend Paging with Sorting'),
dcc.Markdown(
examples['callbacks_paging_and_sorting.py'][0],
style=styles.code_container
),
html.Div(
examples['callbacks_paging_and_sorting.py'][1],
className='example-container'
),
dcc.Markdown('### Backend Paging with Multi Column Sorting'),
dcc.Markdown(dedent('''
Multi-column sort allows you to sort by multiple columns.
This is useful when you have categorical columns with repeated
values and you're interested in seeing the sorted values for
each category.
In this example, try sorting by continent and then any other column.
''')),
dcc.Markdown(
examples['callbacks_paging_multicolumn_sorting.py'][0],
style=styles.code_container
),
html.Div(
examples['callbacks_paging_multicolumn_sorting.py'][1],
className='example-container'
),
dcc.Markdown('### Backend Paging with Filtering'),
dcc.Markdown(dedent('''
DataTable's front-end filtering has its own filtering expression
language.
Currently, backend filtering must parse the same filtering language.
If you write an expression that is not "valid" under the filtering
language, then it will not be passed to the backend.
This limitation will be removed in the future to allow you to
write your own expression query language.
In this example, we've written a Pandas backend for the filtering
language. It supports `eq`, `<`, and `>`. For example, try:
- Enter `eq Asia` in the "continent" column
- Enter `> 5000` in the "gdpPercap" column
- Enter `< 80` in the `lifeExp` column
> Note that unlike the front-end filtering, our backend filtering
> expression language doesn't require or support `num()` or wrapping
> items in double quotes (`"`).
> We will improve this syntax in the future,
> follow [dash-table#169](https://github.com/plotly/dash-table/issues/169)
> for more.
''')),
dcc.Markdown(
examples['callbacks_filtering.py'][0],
style=styles.code_container
),
html.Div(
examples['callbacks_filtering.py'][1],
className='example-container'
),
dcc.Markdown('### Backend Paging with Filtering and Multi-Column Sorting'),
dcc.Markdown(
examples['callbacks_sorting_filtering.py'][0],
style=styles.code_container
),
html.Div(
examples['callbacks_sorting_filtering.py'][1],
className='example-container'
),
dcc.Markdown('### Connecting Backend Paging with a Graph'),
dcc.Markdown(dedent('''
This final example ties it all together: the graph component
displays the current page of the `data`.
''')),
dcc.Markdown(
examples['callbacks_filtering_graph.py'][0],
style=styles.code_container
),
html.Div(
examples['callbacks_filtering_graph.py'][1],
className='example-container'
),
])
| 0.76769 | 0.478529 |
from django import forms
from .models import OwnerProfileInfo, RenterProfileInfo, User, Boat, RentContract, BoatCrew, RepairContract, Crew
from django.utils import timezone
class UserForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput())
class Meta:
model = User
fields = ['username', 'email', 'password', 'first_name', 'last_name']
class OwnerProfileInfoForm(forms.ModelForm):
class Meta():
model = OwnerProfileInfo
fields = ('date_of_birth', 'phone_number', 'avatar')
class RenterProfileInfoForm(forms.ModelForm):
class Meta():
model = RenterProfileInfo
fields = ('date_of_birth', 'phone_number', 'avatar')
class BoatForm(forms.ModelForm):
#owner_id = forms.IntegerField(required=False, widget=forms.HiddenInput())
type = forms.ChoiceField(widget=forms.Select(), choices=([('Go-Fast Boat', 'Go-Fast Boat'),
('Luxury Yacht', 'Luxury Yacht'),
('Cabin cruiser', 'Cabin cruiser'),
('Yacht', 'Yacht'), ]))
date_of_registration = forms.DateField(widget=forms.HiddenInput(), required=False,
initial=timezone.localtime(timezone.now()).date())
class Meta:
model = Boat
fields = ['name', 'type', 'licence_plate', 'price', 'date_of_registration', 'boat_photo', 'bay_id', 'owner_id']
class RentForm(forms.ModelForm):
#renter = forms.IntegerField(required=False, widget=forms.HiddenInput())
#boat = forms.IntegerField(required=False, widget=forms.HiddenInput())
#total_price = forms.IntegerField(required=False, widget=forms.HiddenInput())
class Meta:
model = RentContract
fields = ['date_begin', 'date_end', 'renter', 'boat', 'total_price']
class CrewContractForm(forms.ModelForm):
post = forms.ChoiceField(widget=forms.Select(), choices=([('Sailor', 'Sailor'),
('Lieutenant', 'Lieutenant'),
('Midshipman', 'Midshipman'),
('Navigator', 'Navigator'),
('Captain', 'Captain'), ]))
date_take_post = forms.DateField(widget=forms.HiddenInput(), required=False, initial=timezone.localtime(timezone.now()).date())
class Meta:
model = BoatCrew
fields = ['crew', 'boat', 'post', 'salary', 'date_take_post']
def __init__(self, *args, **kwargs):
super(CrewContractForm, self).__init__(*args, **kwargs)
self.fields["crew"].queryset = Crew.objects.filter(recruited=False)
class RepairContractForm(forms.ModelForm):
date_end = forms.DateField(widget=forms.HiddenInput(), required=False)
class Meta:
model = RepairContract
fields = ['elling', 'boat', 'date_begin', 'date_end', 'repair_price', 'repair_cause']
def __init__(self, u_id, *args, **kwargs):
super(RepairContractForm, self).__init__(*args, **kwargs)
self.fields["boat"].queryset = Boat.objects.filter(owner_id=u_id)
|
boats/forms.py
|
from django import forms
from .models import OwnerProfileInfo, RenterProfileInfo, User, Boat, RentContract, BoatCrew, RepairContract, Crew
from django.utils import timezone
class UserForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput())
class Meta:
model = User
fields = ['username', 'email', 'password', 'first_name', 'last_name']
class OwnerProfileInfoForm(forms.ModelForm):
class Meta():
model = OwnerProfileInfo
fields = ('date_of_birth', 'phone_number', 'avatar')
class RenterProfileInfoForm(forms.ModelForm):
class Meta():
model = RenterProfileInfo
fields = ('date_of_birth', 'phone_number', 'avatar')
class BoatForm(forms.ModelForm):
#owner_id = forms.IntegerField(required=False, widget=forms.HiddenInput())
type = forms.ChoiceField(widget=forms.Select(), choices=([('Go-Fast Boat', 'Go-Fast Boat'),
('Luxury Yacht', 'Luxury Yacht'),
('Cabin cruiser', 'Cabin cruiser'),
('Yacht', 'Yacht'), ]))
date_of_registration = forms.DateField(widget=forms.HiddenInput(), required=False,
initial=timezone.localtime(timezone.now()).date())
class Meta:
model = Boat
fields = ['name', 'type', 'licence_plate', 'price', 'date_of_registration', 'boat_photo', 'bay_id', 'owner_id']
class RentForm(forms.ModelForm):
#renter = forms.IntegerField(required=False, widget=forms.HiddenInput())
#boat = forms.IntegerField(required=False, widget=forms.HiddenInput())
#total_price = forms.IntegerField(required=False, widget=forms.HiddenInput())
class Meta:
model = RentContract
fields = ['date_begin', 'date_end', 'renter', 'boat', 'total_price']
class CrewContractForm(forms.ModelForm):
post = forms.ChoiceField(widget=forms.Select(), choices=([('Sailor', 'Sailor'),
('Lieutenant', 'Lieutenant'),
('Midshipman', 'Midshipman'),
('Navigator', 'Navigator'),
('Captain', 'Captain'), ]))
date_take_post = forms.DateField(widget=forms.HiddenInput(), required=False, initial=timezone.localtime(timezone.now()).date())
class Meta:
model = BoatCrew
fields = ['crew', 'boat', 'post', 'salary', 'date_take_post']
def __init__(self, *args, **kwargs):
super(CrewContractForm, self).__init__(*args, **kwargs)
self.fields["crew"].queryset = Crew.objects.filter(recruited=False)
class RepairContractForm(forms.ModelForm):
date_end = forms.DateField(widget=forms.HiddenInput(), required=False)
class Meta:
model = RepairContract
fields = ['elling', 'boat', 'date_begin', 'date_end', 'repair_price', 'repair_cause']
def __init__(self, u_id, *args, **kwargs):
super(RepairContractForm, self).__init__(*args, **kwargs)
self.fields["boat"].queryset = Boat.objects.filter(owner_id=u_id)
| 0.563138 | 0.084041 |
# Common Imports
import rospy
import roslib
from harmoni_common_lib.constants import State
from harmoni_common_lib.service_server import HarmoniServiceServer
from harmoni_common_lib.service_manager import HarmoniServiceManager
import harmoni_common_lib.helper_functions as hf
# Specific Imports
from harmoni_common_lib.constants import ActuatorNameSpace
from std_msgs.msg import String
import boto3
import json
import ast
class WebService(HarmoniServiceManager):
"""
Web service
"""
def __init__(self, name, param):
""" Initialization of variables and web parameters """
super().__init__(name)
self.name = name
self.user_id = param["user_id"]
self.timer_interval = param["timer_interval"]
self.service_id = hf.get_child_id(self.name)
self.is_request = True
"""Setup publisher and subscriber """
self.web_sub = rospy.Subscriber(
ActuatorNameSpace.web.value + self.service_id + "/listen_click_event",
String,
self._event_click_callback,
queue_size=1,
)
print(ActuatorNameSpace.web.value + self.service_id + "/set_view")
self.web_pub = rospy.Publisher(
ActuatorNameSpace.web.value + self.service_id + "/set_view",
String,
queue_size=1,
)
""" Setup the web request """
self.setup_web()
"""Setup the web service as server """
self.state = State.INIT
return
def setup_web(self):
rospy.loginfo("Setting up the %s" % self.name)
rospy.loginfo("Checking that web is connected to ROS websocket")
rospy.wait_for_service(
ActuatorNameSpace.web.value + self.service_id + "/is_connected"
)
rospy.loginfo("Done, web is connected to ROS websocket")
return
def do(self, data):
""" Do the display view"""
rospy.loginfo("Start the %s do" % self.name)
self.state = State.REQUEST
self.actuation_completed = False
data_array = self._get_web_data(data)
try:
rospy.sleep(1)
for data in data_array:
self.send_request(data)
rospy.sleep(0.2)
self.state = State.SUCCESS
self.actuation_completed = True
except Exception:
self.state = State.FAILED
self.actuation_completed = True
return
def _get_web_data(self, data):
data = ast.literal_eval(data)
web_array = []
if not isinstance(data, list):
if "behavior_data" in data.keys():
behavior_data = ast.literal_eval(data["behavior_data"])
for b in behavior_data:
if "type" in b.keys():
if b["type"] == "web":
container_id = b["args"][0]
set_view = ""
if len(b["args"]) > 1:
set_view = b["args"][1]
web_array.append(
str(
{
"component_id": container_id,
"set_content": set_view,
"start": b["start"],
}
)
)
else:
web_array.append(str(data))
else:
for item in data:
web_array.append(str(item))
return web_array
def send_request(self, display_view):
""" Send the request to the web page"""
rospy.loginfo("Sending request to webpage")
print(display_view)
self.web_pub.publish(display_view)
return
def _event_click_callback(self, event):
"""Callback for subscription to the web page"""
rospy.loginfo("Received an event from the webpage")
return
def main():
service_name = ActuatorNameSpace.web.name
name = rospy.get_param("/name_" + service_name + "/")
test = rospy.get_param("/test_" + service_name + "/")
test_input = rospy.get_param("/test_input_" + service_name + "/")
test_id = rospy.get_param("/test_id_" + service_name + "/")
try:
rospy.init_node(service_name)
param = rospy.get_param(name + "/" + test_id + "_param/")
if not hf.check_if_id_exist(service_name, test_id):
rospy.logerr("ERROR: Remember to add your configuration ID also in the harmoni_core config file")
return
service = hf.set_service_server(service_name, test_id)
s = WebService(service, param)
service_server = HarmoniServiceServer(name=service, service_manager=s)
if test:
rospy.loginfo("Testing the %s" % (service))
rospy.sleep(2)
s.do(test_input)
else:
service_server.update_feedback()
rospy.spin()
except rospy.ROSInterruptException:
pass
if __name__ == "__main__":
main()
|
harmoni_actuators/harmoni_web/scripts/harmoni_web/web_service.py
|
# Common Imports
import rospy
import roslib
from harmoni_common_lib.constants import State
from harmoni_common_lib.service_server import HarmoniServiceServer
from harmoni_common_lib.service_manager import HarmoniServiceManager
import harmoni_common_lib.helper_functions as hf
# Specific Imports
from harmoni_common_lib.constants import ActuatorNameSpace
from std_msgs.msg import String
import boto3
import json
import ast
class WebService(HarmoniServiceManager):
"""
Web service
"""
def __init__(self, name, param):
""" Initialization of variables and web parameters """
super().__init__(name)
self.name = name
self.user_id = param["user_id"]
self.timer_interval = param["timer_interval"]
self.service_id = hf.get_child_id(self.name)
self.is_request = True
"""Setup publisher and subscriber """
self.web_sub = rospy.Subscriber(
ActuatorNameSpace.web.value + self.service_id + "/listen_click_event",
String,
self._event_click_callback,
queue_size=1,
)
print(ActuatorNameSpace.web.value + self.service_id + "/set_view")
self.web_pub = rospy.Publisher(
ActuatorNameSpace.web.value + self.service_id + "/set_view",
String,
queue_size=1,
)
""" Setup the web request """
self.setup_web()
"""Setup the web service as server """
self.state = State.INIT
return
def setup_web(self):
rospy.loginfo("Setting up the %s" % self.name)
rospy.loginfo("Checking that web is connected to ROS websocket")
rospy.wait_for_service(
ActuatorNameSpace.web.value + self.service_id + "/is_connected"
)
rospy.loginfo("Done, web is connected to ROS websocket")
return
def do(self, data):
""" Do the display view"""
rospy.loginfo("Start the %s do" % self.name)
self.state = State.REQUEST
self.actuation_completed = False
data_array = self._get_web_data(data)
try:
rospy.sleep(1)
for data in data_array:
self.send_request(data)
rospy.sleep(0.2)
self.state = State.SUCCESS
self.actuation_completed = True
except Exception:
self.state = State.FAILED
self.actuation_completed = True
return
def _get_web_data(self, data):
data = ast.literal_eval(data)
web_array = []
if not isinstance(data, list):
if "behavior_data" in data.keys():
behavior_data = ast.literal_eval(data["behavior_data"])
for b in behavior_data:
if "type" in b.keys():
if b["type"] == "web":
container_id = b["args"][0]
set_view = ""
if len(b["args"]) > 1:
set_view = b["args"][1]
web_array.append(
str(
{
"component_id": container_id,
"set_content": set_view,
"start": b["start"],
}
)
)
else:
web_array.append(str(data))
else:
for item in data:
web_array.append(str(item))
return web_array
def send_request(self, display_view):
""" Send the request to the web page"""
rospy.loginfo("Sending request to webpage")
print(display_view)
self.web_pub.publish(display_view)
return
def _event_click_callback(self, event):
"""Callback for subscription to the web page"""
rospy.loginfo("Received an event from the webpage")
return
def main():
service_name = ActuatorNameSpace.web.name
name = rospy.get_param("/name_" + service_name + "/")
test = rospy.get_param("/test_" + service_name + "/")
test_input = rospy.get_param("/test_input_" + service_name + "/")
test_id = rospy.get_param("/test_id_" + service_name + "/")
try:
rospy.init_node(service_name)
param = rospy.get_param(name + "/" + test_id + "_param/")
if not hf.check_if_id_exist(service_name, test_id):
rospy.logerr("ERROR: Remember to add your configuration ID also in the harmoni_core config file")
return
service = hf.set_service_server(service_name, test_id)
s = WebService(service, param)
service_server = HarmoniServiceServer(name=service, service_manager=s)
if test:
rospy.loginfo("Testing the %s" % (service))
rospy.sleep(2)
s.do(test_input)
else:
service_server.update_feedback()
rospy.spin()
except rospy.ROSInterruptException:
pass
if __name__ == "__main__":
main()
| 0.474388 | 0.103703 |
import tensorflow as tf
from tensorflow import keras
from tensorflow.python.keras.models import Model
from tensorflow.python.keras import layers
from tensorflow.python.keras.models import Sequential
from tensorflow.python.keras.optimizers import Adam, SGD, RMSprop
from tensorflow.python.keras.layers import concatenate, Input, Conv2DTranspose, Activation, Reshape, Dropout, Flatten, Conv2D, MaxPooling2D, Dense, BatchNormalization, GlobalAveragePooling2D
def conv2d_block(input_tensor, n_filters, kernel_size = 3, batchnorm = True):
# first layer
x = Conv2D(filters = n_filters, kernel_size = (kernel_size, kernel_size),\
kernel_initializer = 'he_normal', padding = 'same')(input_tensor)
if batchnorm:
x = BatchNormalization()(x)
x = Activation('relu')(x)
# second layer
x = Conv2D(filters = n_filters, kernel_size = (kernel_size, kernel_size),\
kernel_initializer = 'he_normal', padding = 'same')(input_tensor)
if batchnorm:
x = BatchNormalization()(x)
x = Activation('relu')(x)
return x
def get_unet(input_data, n_filters = 16, dropout = 0.1, batchnorm = True):
# Contracting Path
c1 = conv2d_block(input_data, n_filters * 1, kernel_size = 3, batchnorm = batchnorm)
p1 = MaxPooling2D((2, 2))(c1)
p1 = Dropout(dropout)(p1)
c2 = conv2d_block(p1, n_filters * 2, kernel_size = 3, batchnorm = batchnorm)
p2 = MaxPooling2D((2, 2))(c2)
p2 = Dropout(dropout)(p2)
c3 = conv2d_block(p2, n_filters * 4, kernel_size = 3, batchnorm = batchnorm)
p3 = MaxPooling2D((2, 2))(c3)
p3 = Dropout(dropout)(p3)
c4 = conv2d_block(p3, n_filters * 8, kernel_size = 3, batchnorm = batchnorm)
p4 = MaxPooling2D((2, 2))(c4)
p4 = Dropout(dropout)(p4)
c5 = conv2d_block(p4, n_filters = n_filters * 16, kernel_size = 3, batchnorm = batchnorm)
# Expansive Path
u6 = Conv2DTranspose(n_filters * 8, (3, 3), strides = (2, 2), padding = 'same')(c5)
u6 = concatenate([u6, c4])
u6 = Dropout(dropout)(u6)
c6 = conv2d_block(u6, n_filters * 8, kernel_size = 3, batchnorm = batchnorm)
u7 = Conv2DTranspose(n_filters * 4, (3, 3), strides = (2, 2), padding = 'same')(c6)
u7 = concatenate([u7, c3])
u7 = Dropout(dropout)(u7)
c7 = conv2d_block(u7, n_filters * 4, kernel_size = 3, batchnorm = batchnorm)
u8 = Conv2DTranspose(n_filters * 2, (3, 3), strides = (2, 2), padding = 'same')(c7)
u8 = concatenate([u8, c2])
u8 = Dropout(dropout)(u8)
c8 = conv2d_block(u8, n_filters * 2, kernel_size = 3, batchnorm = batchnorm)
u9 = Conv2DTranspose(n_filters * 1, (3, 3), strides = (2, 2), padding = 'same')(c8)
u9 = concatenate([u9, c1])
u9 = Dropout(dropout)(u9)
c9 = conv2d_block(u9, n_filters * 1, kernel_size = 3, batchnorm = batchnorm)
outputs = Conv2D(1, (1, 1), activation='sigmoid')(c9)
model = Model(inputs=[input_data], outputs=[outputs])
return model
|
unet.py
|
import tensorflow as tf
from tensorflow import keras
from tensorflow.python.keras.models import Model
from tensorflow.python.keras import layers
from tensorflow.python.keras.models import Sequential
from tensorflow.python.keras.optimizers import Adam, SGD, RMSprop
from tensorflow.python.keras.layers import concatenate, Input, Conv2DTranspose, Activation, Reshape, Dropout, Flatten, Conv2D, MaxPooling2D, Dense, BatchNormalization, GlobalAveragePooling2D
def conv2d_block(input_tensor, n_filters, kernel_size = 3, batchnorm = True):
# first layer
x = Conv2D(filters = n_filters, kernel_size = (kernel_size, kernel_size),\
kernel_initializer = 'he_normal', padding = 'same')(input_tensor)
if batchnorm:
x = BatchNormalization()(x)
x = Activation('relu')(x)
# second layer
x = Conv2D(filters = n_filters, kernel_size = (kernel_size, kernel_size),\
kernel_initializer = 'he_normal', padding = 'same')(input_tensor)
if batchnorm:
x = BatchNormalization()(x)
x = Activation('relu')(x)
return x
def get_unet(input_data, n_filters = 16, dropout = 0.1, batchnorm = True):
# Contracting Path
c1 = conv2d_block(input_data, n_filters * 1, kernel_size = 3, batchnorm = batchnorm)
p1 = MaxPooling2D((2, 2))(c1)
p1 = Dropout(dropout)(p1)
c2 = conv2d_block(p1, n_filters * 2, kernel_size = 3, batchnorm = batchnorm)
p2 = MaxPooling2D((2, 2))(c2)
p2 = Dropout(dropout)(p2)
c3 = conv2d_block(p2, n_filters * 4, kernel_size = 3, batchnorm = batchnorm)
p3 = MaxPooling2D((2, 2))(c3)
p3 = Dropout(dropout)(p3)
c4 = conv2d_block(p3, n_filters * 8, kernel_size = 3, batchnorm = batchnorm)
p4 = MaxPooling2D((2, 2))(c4)
p4 = Dropout(dropout)(p4)
c5 = conv2d_block(p4, n_filters = n_filters * 16, kernel_size = 3, batchnorm = batchnorm)
# Expansive Path
u6 = Conv2DTranspose(n_filters * 8, (3, 3), strides = (2, 2), padding = 'same')(c5)
u6 = concatenate([u6, c4])
u6 = Dropout(dropout)(u6)
c6 = conv2d_block(u6, n_filters * 8, kernel_size = 3, batchnorm = batchnorm)
u7 = Conv2DTranspose(n_filters * 4, (3, 3), strides = (2, 2), padding = 'same')(c6)
u7 = concatenate([u7, c3])
u7 = Dropout(dropout)(u7)
c7 = conv2d_block(u7, n_filters * 4, kernel_size = 3, batchnorm = batchnorm)
u8 = Conv2DTranspose(n_filters * 2, (3, 3), strides = (2, 2), padding = 'same')(c7)
u8 = concatenate([u8, c2])
u8 = Dropout(dropout)(u8)
c8 = conv2d_block(u8, n_filters * 2, kernel_size = 3, batchnorm = batchnorm)
u9 = Conv2DTranspose(n_filters * 1, (3, 3), strides = (2, 2), padding = 'same')(c8)
u9 = concatenate([u9, c1])
u9 = Dropout(dropout)(u9)
c9 = conv2d_block(u9, n_filters * 1, kernel_size = 3, batchnorm = batchnorm)
outputs = Conv2D(1, (1, 1), activation='sigmoid')(c9)
model = Model(inputs=[input_data], outputs=[outputs])
return model
| 0.896733 | 0.569583 |
import arcanelaunch
from arcanelaunch import setenv,getenv
import sys
import os
import copy
import shutil
import optparse
import re
link_dirs = "@ARCANE_LINK_DIRECTORIES@"
#TODO: traiter correctement les espaces dans les chemins
link_dirs.replace(" ",os.pathsep)
#print "link_dirs=",link_dirs
path_bin = "@ARCANE_INSTALL_BIN@"
path_lib = "@ARCANE_INSTALL_LIB@"
path_shr = "@ARCANE_INSTALL_SHR@"
stdenv_exe = os.path.join(path_lib,"arcane_axl") + getenv("STDENV_PURE","")
setenv("STDENV_PARALLEL","FALSE")
setenv("STDENV_APPLICATION_NAME","axl2cc")
setenv("STDENV_QUIET","TRUE")
setenv("STDENV_TRACE","off")
nb_arg = len(sys.argv)
pargs = []
do_copy = False
if nb_arg==6 and sys.argv[5]==".xml":
# Vieux format.
print "WARNING: this format is deprecated. Use axl"
path = sys.argv[2]
component_name = sys.argv[3]
name = sys.argv[4]
extension = sys.argv[5]
pargs = copy.copy(sys.argv)
full_name = os.path.join(path,name) + extension
else:
parser = optparse.OptionParser(usage="%prog [-i header] [-o output_path] axlfile")
#print "nb_arg",nb_arg
parser.add_option("-i","--header-path",type="string",dest="header_path",help="header sub path")
parser.add_option("-o","--output-path",type="string",dest="output_path",help="path to write output files")
parser.add_option("-c","--copy",action="store_true",dest="do_copy",help="true if installing in share path")
(options, args) = parser.parse_args()
print str(options)
print str(args)
output_path = os.getcwd()
if options.output_path:
output_path = options.output_path
print "OutputPath=",output_path
component_name = "."
if options.header_path:
component_name = options.header_path
if len(args)!=1:
parser.error("axl file not specified")
sys.exit(1)
full_name = args[0]
file_name = os.path.basename(full_name)
file_path = os.path.dirname(full_name)
if len(file_path)==0:
file_path = "."
file_name_no_extension_re = re.compile("(.*)\.axl").match(file_name)
if file_name_no_extension_re == None:
parser.error("axlfile has to have extension '.axl'")
sys.exit(1)
file_name_no_extension = file_name_no_extension_re.group(1)
print "Infos: file_path=",file_path," name=",file_name_no_extension
name = file_name_no_extension
extension = ".axl"
pargs.append(sys.argv[0])
pargs.append(output_path)
pargs.append(file_path)
pargs.append(component_name)
pargs.append(file_name_no_extension)
pargs.append(".axl")
output_name = os.path.join(path_shr,name)
if component_name != ".":
output_name += "_" + component_name
output_name += extension
al = arcanelaunch.ArcaneLaunchExec()
al.setApplicationExecutable(stdenv_exe)
al.setParallelService(None)
al.addToLdPath(link_dirs)
r = al.process(pargs)
if r == 0 and do_copy:
print "Installing file input=",full_name,"output=",output_name
shutil.copy(full_name,output_name)
print "Return value: v=",r
sys.exit(r)
|
cmake/build-system/csharp/axl/axl2cc.py
|
import arcanelaunch
from arcanelaunch import setenv,getenv
import sys
import os
import copy
import shutil
import optparse
import re
link_dirs = "@ARCANE_LINK_DIRECTORIES@"
#TODO: traiter correctement les espaces dans les chemins
link_dirs.replace(" ",os.pathsep)
#print "link_dirs=",link_dirs
path_bin = "@ARCANE_INSTALL_BIN@"
path_lib = "@ARCANE_INSTALL_LIB@"
path_shr = "@ARCANE_INSTALL_SHR@"
stdenv_exe = os.path.join(path_lib,"arcane_axl") + getenv("STDENV_PURE","")
setenv("STDENV_PARALLEL","FALSE")
setenv("STDENV_APPLICATION_NAME","axl2cc")
setenv("STDENV_QUIET","TRUE")
setenv("STDENV_TRACE","off")
nb_arg = len(sys.argv)
pargs = []
do_copy = False
if nb_arg==6 and sys.argv[5]==".xml":
# Vieux format.
print "WARNING: this format is deprecated. Use axl"
path = sys.argv[2]
component_name = sys.argv[3]
name = sys.argv[4]
extension = sys.argv[5]
pargs = copy.copy(sys.argv)
full_name = os.path.join(path,name) + extension
else:
parser = optparse.OptionParser(usage="%prog [-i header] [-o output_path] axlfile")
#print "nb_arg",nb_arg
parser.add_option("-i","--header-path",type="string",dest="header_path",help="header sub path")
parser.add_option("-o","--output-path",type="string",dest="output_path",help="path to write output files")
parser.add_option("-c","--copy",action="store_true",dest="do_copy",help="true if installing in share path")
(options, args) = parser.parse_args()
print str(options)
print str(args)
output_path = os.getcwd()
if options.output_path:
output_path = options.output_path
print "OutputPath=",output_path
component_name = "."
if options.header_path:
component_name = options.header_path
if len(args)!=1:
parser.error("axl file not specified")
sys.exit(1)
full_name = args[0]
file_name = os.path.basename(full_name)
file_path = os.path.dirname(full_name)
if len(file_path)==0:
file_path = "."
file_name_no_extension_re = re.compile("(.*)\.axl").match(file_name)
if file_name_no_extension_re == None:
parser.error("axlfile has to have extension '.axl'")
sys.exit(1)
file_name_no_extension = file_name_no_extension_re.group(1)
print "Infos: file_path=",file_path," name=",file_name_no_extension
name = file_name_no_extension
extension = ".axl"
pargs.append(sys.argv[0])
pargs.append(output_path)
pargs.append(file_path)
pargs.append(component_name)
pargs.append(file_name_no_extension)
pargs.append(".axl")
output_name = os.path.join(path_shr,name)
if component_name != ".":
output_name += "_" + component_name
output_name += extension
al = arcanelaunch.ArcaneLaunchExec()
al.setApplicationExecutable(stdenv_exe)
al.setParallelService(None)
al.addToLdPath(link_dirs)
r = al.process(pargs)
if r == 0 and do_copy:
print "Installing file input=",full_name,"output=",output_name
shutil.copy(full_name,output_name)
print "Return value: v=",r
sys.exit(r)
| 0.052838 | 0.062445 |
import sys
import time
import boto3
import logging
import kraken.kubernetes.client as kubecli
import kraken.node_actions.common_node_functions as nodeaction
from kraken.node_actions.abstract_node_scenarios import abstract_node_scenarios
class AWS:
def __init__(self):
self.boto_client = boto3.client("ec2")
self.boto_instance = boto3.resource("ec2").Instance("id")
# Get the instance ID of the node
def get_instance_id(self, node):
return self.boto_client.describe_instances(Filters=[{"Name": "private-dns-name", "Values": [node]}])[
"Reservations"
][0]["Instances"][0]["InstanceId"]
# Start the node instance
def start_instances(self, instance_id):
try:
self.boto_client.start_instances(InstanceIds=[instance_id])
logging.info("EC2 instance: " + str(instance_id) + " started")
except Exception as e:
logging.error(
"Failed to start node instance %s. Encountered following " "exception: %s." % (instance_id, e)
)
sys.exit(1)
# Stop the node instance
def stop_instances(self, instance_id):
try:
self.boto_client.stop_instances(InstanceIds=[instance_id])
logging.info("EC2 instance: " + str(instance_id) + " stopped")
except Exception as e:
logging.error("Failed to stop node instance %s. Encountered following " "exception: %s." % (instance_id, e))
sys.exit(1)
# Terminate the node instance
def terminate_instances(self, instance_id):
try:
self.boto_client.terminate_instances(InstanceIds=[instance_id])
logging.info("EC2 instance: " + str(instance_id) + " terminated")
except Exception as e:
logging.error(
"Failed to terminate node instance %s. Encountered following " "exception: %s." % (instance_id, e)
)
sys.exit(1)
# Reboot the node instance
def reboot_instances(self, instance_id):
try:
self.boto_client.reboot_instances(InstanceIds=[instance_id])
logging.info("EC2 instance " + str(instance_id) + " rebooted")
except Exception as e:
logging.error(
"Failed to reboot node instance %s. Encountered following " "exception: %s." % (instance_id, e)
)
sys.exit(1)
# Below functions poll EC2.Client.describe_instances() every 15 seconds
# until a successful state is reached. An error is returned after 40 failed checks
# Setting timeout for consistency with other cloud functions
# Wait until the node instance is running
def wait_until_running(self, instance_id, timeout=600):
try:
self.boto_instance.wait_until_running(InstanceIds=[instance_id])
return True
except Exception as e:
logging.error("Failed to get status waiting for %s to be running %s" % (instance_id, e))
return False
# Wait until the node instance is stopped
def wait_until_stopped(self, instance_id, timeout=600):
try:
self.boto_instance.wait_until_stopped(InstanceIds=[instance_id])
return True
except Exception as e:
logging.error("Failed to get status waiting for %s to be stopped %s" % (instance_id, e))
return False
# Wait until the node instance is terminated
def wait_until_terminated(self, instance_id, timeout=600):
try:
self.boto_instance.wait_until_terminated(InstanceIds=[instance_id])
return True
except Exception as e:
logging.error("Failed to get status waiting for %s to be terminated %s" % (instance_id, e))
return False
# Creates a deny network acl and returns the id
def create_default_network_acl(self, vpc_id):
try:
logging.info("Trying to create a default deny network acl")
response = self.boto_client.create_network_acl(VpcId=vpc_id)
acl_id = response["NetworkAcl"]["NetworkAclId"]
logging.info("Created a network acl, id=%s" % acl_id)
except Exception as e:
logging.error(
"Failed to create the default network_acl: %s"
"Making sure you have aws cli configured on the host and set for the region of your vpc/subnet" % (e)
)
sys.exit(1)
return acl_id
# Replace network acl association
def replace_network_acl_association(self, association_id, acl_id):
try:
logging.info("Replacing the network acl associated with the subnet")
status = self.boto_client.replace_network_acl_association(AssociationId=association_id, NetworkAclId=acl_id)
logging.info(status)
new_association_id = status["NewAssociationId"]
except Exception as e:
logging.error("Failed to replace network acl association: %s" % (e))
sys.exit(1)
return new_association_id
# Describe network acl
def describe_network_acls(self, vpc_id, subnet_id):
try:
response = self.boto_client.describe_network_acls(
Filters=[
{"Name": "vpc-id", "Values": [vpc_id]},
{"Name": "association.subnet-id", "Values": [subnet_id]},
]
)
except Exception as e:
logging.error(
"Failed to describe network acl: %s."
"Making sure you have aws cli configured on the host and set for the region of your vpc/subnet" % (e)
)
sys.exit(1)
associations = response["NetworkAcls"][0]["Associations"]
# grab the current network_acl in use
original_acl_id = response["NetworkAcls"][0]["Associations"][0]["NetworkAclId"]
return associations, original_acl_id
# Delete network acl
def delete_network_acl(self, acl_id):
try:
logging.info("Deleting the network acl: %s" % (acl_id))
self.boto_client.delete_network_acl(NetworkAclId=acl_id)
except Exception as e:
logging.error(
"Failed to delete network_acl %s: %s"
"Making sure you have aws cli configured on the host and set for the region of your vpc/subnet"
% (acl_id, e)
)
sys.exit(1)
class aws_node_scenarios(abstract_node_scenarios):
def __init__(self):
self.aws = AWS()
# Node scenario to start the node
def node_start_scenario(self, instance_kill_count, node, timeout):
for _ in range(instance_kill_count):
try:
logging.info("Starting node_start_scenario injection")
instance_id = self.aws.get_instance_id(node)
logging.info("Starting the node %s with instance ID: %s " % (node, instance_id))
self.aws.start_instances(instance_id)
self.aws.wait_until_running(instance_id)
nodeaction.wait_for_ready_status(node, timeout)
logging.info("Node with instance ID: %s is in running state" % (instance_id))
logging.info("node_start_scenario has been successfully injected!")
except Exception as e:
logging.error(
"Failed to start node instance. Encountered following " "exception: %s. Test Failed" % (e)
)
logging.error("node_start_scenario injection failed!")
sys.exit(1)
# Node scenario to stop the node
def node_stop_scenario(self, instance_kill_count, node, timeout):
for _ in range(instance_kill_count):
try:
logging.info("Starting node_stop_scenario injection")
instance_id = self.aws.get_instance_id(node)
logging.info("Stopping the node %s with instance ID: %s " % (node, instance_id))
self.aws.stop_instances(instance_id)
self.aws.wait_until_stopped(instance_id)
logging.info("Node with instance ID: %s is in stopped state" % (instance_id))
nodeaction.wait_for_unknown_status(node, timeout)
except Exception as e:
logging.error("Failed to stop node instance. Encountered following exception: %s. " "Test Failed" % (e))
logging.error("node_stop_scenario injection failed!")
sys.exit(1)
# Node scenario to terminate the node
def node_termination_scenario(self, instance_kill_count, node, timeout):
for _ in range(instance_kill_count):
try:
logging.info("Starting node_termination_scenario injection")
instance_id = self.aws.get_instance_id(node)
logging.info("Terminating the node %s with instance ID: %s " % (node, instance_id))
self.aws.terminate_instances(instance_id)
self.aws.wait_until_terminated(instance_id)
for _ in range(timeout):
if node not in kubecli.list_nodes():
break
time.sleep(1)
if node in kubecli.list_nodes():
raise Exception("Node could not be terminated")
logging.info("Node with instance ID: %s has been terminated" % (instance_id))
logging.info("node_termination_scenario has been successfuly injected!")
except Exception as e:
logging.error(
"Failed to terminate node instance. Encountered following exception:" " %s. Test Failed" % (e)
)
logging.error("node_termination_scenario injection failed!")
sys.exit(1)
# Node scenario to reboot the node
def node_reboot_scenario(self, instance_kill_count, node, timeout):
for _ in range(instance_kill_count):
try:
logging.info("Starting node_reboot_scenario injection" + str(node))
instance_id = self.aws.get_instance_id(node)
logging.info("Rebooting the node %s with instance ID: %s " % (node, instance_id))
self.aws.reboot_instances(instance_id)
nodeaction.wait_for_unknown_status(node, timeout)
nodeaction.wait_for_ready_status(node, timeout)
logging.info("Node with instance ID: %s has been rebooted" % (instance_id))
logging.info("node_reboot_scenario has been successfuly injected!")
except Exception as e:
logging.error(
"Failed to reboot node instance. Encountered following exception:" " %s. Test Failed" % (e)
)
logging.error("node_reboot_scenario injection failed!")
sys.exit(1)
|
kraken/node_actions/aws_node_scenarios.py
|
import sys
import time
import boto3
import logging
import kraken.kubernetes.client as kubecli
import kraken.node_actions.common_node_functions as nodeaction
from kraken.node_actions.abstract_node_scenarios import abstract_node_scenarios
class AWS:
def __init__(self):
self.boto_client = boto3.client("ec2")
self.boto_instance = boto3.resource("ec2").Instance("id")
# Get the instance ID of the node
def get_instance_id(self, node):
return self.boto_client.describe_instances(Filters=[{"Name": "private-dns-name", "Values": [node]}])[
"Reservations"
][0]["Instances"][0]["InstanceId"]
# Start the node instance
def start_instances(self, instance_id):
try:
self.boto_client.start_instances(InstanceIds=[instance_id])
logging.info("EC2 instance: " + str(instance_id) + " started")
except Exception as e:
logging.error(
"Failed to start node instance %s. Encountered following " "exception: %s." % (instance_id, e)
)
sys.exit(1)
# Stop the node instance
def stop_instances(self, instance_id):
try:
self.boto_client.stop_instances(InstanceIds=[instance_id])
logging.info("EC2 instance: " + str(instance_id) + " stopped")
except Exception as e:
logging.error("Failed to stop node instance %s. Encountered following " "exception: %s." % (instance_id, e))
sys.exit(1)
# Terminate the node instance
def terminate_instances(self, instance_id):
try:
self.boto_client.terminate_instances(InstanceIds=[instance_id])
logging.info("EC2 instance: " + str(instance_id) + " terminated")
except Exception as e:
logging.error(
"Failed to terminate node instance %s. Encountered following " "exception: %s." % (instance_id, e)
)
sys.exit(1)
# Reboot the node instance
def reboot_instances(self, instance_id):
try:
self.boto_client.reboot_instances(InstanceIds=[instance_id])
logging.info("EC2 instance " + str(instance_id) + " rebooted")
except Exception as e:
logging.error(
"Failed to reboot node instance %s. Encountered following " "exception: %s." % (instance_id, e)
)
sys.exit(1)
# Below functions poll EC2.Client.describe_instances() every 15 seconds
# until a successful state is reached. An error is returned after 40 failed checks
# Setting timeout for consistency with other cloud functions
# Wait until the node instance is running
def wait_until_running(self, instance_id, timeout=600):
try:
self.boto_instance.wait_until_running(InstanceIds=[instance_id])
return True
except Exception as e:
logging.error("Failed to get status waiting for %s to be running %s" % (instance_id, e))
return False
# Wait until the node instance is stopped
def wait_until_stopped(self, instance_id, timeout=600):
try:
self.boto_instance.wait_until_stopped(InstanceIds=[instance_id])
return True
except Exception as e:
logging.error("Failed to get status waiting for %s to be stopped %s" % (instance_id, e))
return False
# Wait until the node instance is terminated
def wait_until_terminated(self, instance_id, timeout=600):
try:
self.boto_instance.wait_until_terminated(InstanceIds=[instance_id])
return True
except Exception as e:
logging.error("Failed to get status waiting for %s to be terminated %s" % (instance_id, e))
return False
# Creates a deny network acl and returns the id
def create_default_network_acl(self, vpc_id):
try:
logging.info("Trying to create a default deny network acl")
response = self.boto_client.create_network_acl(VpcId=vpc_id)
acl_id = response["NetworkAcl"]["NetworkAclId"]
logging.info("Created a network acl, id=%s" % acl_id)
except Exception as e:
logging.error(
"Failed to create the default network_acl: %s"
"Making sure you have aws cli configured on the host and set for the region of your vpc/subnet" % (e)
)
sys.exit(1)
return acl_id
# Replace network acl association
def replace_network_acl_association(self, association_id, acl_id):
try:
logging.info("Replacing the network acl associated with the subnet")
status = self.boto_client.replace_network_acl_association(AssociationId=association_id, NetworkAclId=acl_id)
logging.info(status)
new_association_id = status["NewAssociationId"]
except Exception as e:
logging.error("Failed to replace network acl association: %s" % (e))
sys.exit(1)
return new_association_id
# Describe network acl
def describe_network_acls(self, vpc_id, subnet_id):
try:
response = self.boto_client.describe_network_acls(
Filters=[
{"Name": "vpc-id", "Values": [vpc_id]},
{"Name": "association.subnet-id", "Values": [subnet_id]},
]
)
except Exception as e:
logging.error(
"Failed to describe network acl: %s."
"Making sure you have aws cli configured on the host and set for the region of your vpc/subnet" % (e)
)
sys.exit(1)
associations = response["NetworkAcls"][0]["Associations"]
# grab the current network_acl in use
original_acl_id = response["NetworkAcls"][0]["Associations"][0]["NetworkAclId"]
return associations, original_acl_id
# Delete network acl
def delete_network_acl(self, acl_id):
try:
logging.info("Deleting the network acl: %s" % (acl_id))
self.boto_client.delete_network_acl(NetworkAclId=acl_id)
except Exception as e:
logging.error(
"Failed to delete network_acl %s: %s"
"Making sure you have aws cli configured on the host and set for the region of your vpc/subnet"
% (acl_id, e)
)
sys.exit(1)
class aws_node_scenarios(abstract_node_scenarios):
def __init__(self):
self.aws = AWS()
# Node scenario to start the node
def node_start_scenario(self, instance_kill_count, node, timeout):
for _ in range(instance_kill_count):
try:
logging.info("Starting node_start_scenario injection")
instance_id = self.aws.get_instance_id(node)
logging.info("Starting the node %s with instance ID: %s " % (node, instance_id))
self.aws.start_instances(instance_id)
self.aws.wait_until_running(instance_id)
nodeaction.wait_for_ready_status(node, timeout)
logging.info("Node with instance ID: %s is in running state" % (instance_id))
logging.info("node_start_scenario has been successfully injected!")
except Exception as e:
logging.error(
"Failed to start node instance. Encountered following " "exception: %s. Test Failed" % (e)
)
logging.error("node_start_scenario injection failed!")
sys.exit(1)
# Node scenario to stop the node
def node_stop_scenario(self, instance_kill_count, node, timeout):
for _ in range(instance_kill_count):
try:
logging.info("Starting node_stop_scenario injection")
instance_id = self.aws.get_instance_id(node)
logging.info("Stopping the node %s with instance ID: %s " % (node, instance_id))
self.aws.stop_instances(instance_id)
self.aws.wait_until_stopped(instance_id)
logging.info("Node with instance ID: %s is in stopped state" % (instance_id))
nodeaction.wait_for_unknown_status(node, timeout)
except Exception as e:
logging.error("Failed to stop node instance. Encountered following exception: %s. " "Test Failed" % (e))
logging.error("node_stop_scenario injection failed!")
sys.exit(1)
# Node scenario to terminate the node
def node_termination_scenario(self, instance_kill_count, node, timeout):
for _ in range(instance_kill_count):
try:
logging.info("Starting node_termination_scenario injection")
instance_id = self.aws.get_instance_id(node)
logging.info("Terminating the node %s with instance ID: %s " % (node, instance_id))
self.aws.terminate_instances(instance_id)
self.aws.wait_until_terminated(instance_id)
for _ in range(timeout):
if node not in kubecli.list_nodes():
break
time.sleep(1)
if node in kubecli.list_nodes():
raise Exception("Node could not be terminated")
logging.info("Node with instance ID: %s has been terminated" % (instance_id))
logging.info("node_termination_scenario has been successfuly injected!")
except Exception as e:
logging.error(
"Failed to terminate node instance. Encountered following exception:" " %s. Test Failed" % (e)
)
logging.error("node_termination_scenario injection failed!")
sys.exit(1)
# Node scenario to reboot the node
def node_reboot_scenario(self, instance_kill_count, node, timeout):
for _ in range(instance_kill_count):
try:
logging.info("Starting node_reboot_scenario injection" + str(node))
instance_id = self.aws.get_instance_id(node)
logging.info("Rebooting the node %s with instance ID: %s " % (node, instance_id))
self.aws.reboot_instances(instance_id)
nodeaction.wait_for_unknown_status(node, timeout)
nodeaction.wait_for_ready_status(node, timeout)
logging.info("Node with instance ID: %s has been rebooted" % (instance_id))
logging.info("node_reboot_scenario has been successfuly injected!")
except Exception as e:
logging.error(
"Failed to reboot node instance. Encountered following exception:" " %s. Test Failed" % (e)
)
logging.error("node_reboot_scenario injection failed!")
sys.exit(1)
| 0.349755 | 0.125708 |
# ----------------------------------------------------------------------
# Imports
# ----------------------------------------------------------------------
import numpy as np
import SUAVE
from SUAVE.Core import Units
from SUAVE.Methods.Propulsion.turbofan_sizing import turbofan_sizing
from SUAVE.Methods.Geometry.Two_Dimensional.Planform import segment_properties
from copy import deepcopy
# ----------------------------------------------------------------------
# Define the Vehicle
# ----------------------------------------------------------------------
def vehicle_setup():
# ------------------------------------------------------------------
# Initialize the Vehicle
# ------------------------------------------------------------------
vehicle = SUAVE.Vehicle()
vehicle.tag = 'Boeing_737800'
# ------------------------------------------------------------------
# Vehicle-level Properties
# ------------------------------------------------------------------
# mass properties
vehicle.mass_properties.max_takeoff = 79015.8 # kg
vehicle.mass_properties.takeoff = 79015.8 # kg
vehicle.mass_properties.operating_empty = 62746.4 # kg
vehicle.mass_properties.takeoff = 79015.8 # kg
vehicle.mass_properties.max_zero_fuel = 62732.0 # kg
vehicle.mass_properties.cargo = 10000. * Units.kilogram
vehicle.mass_properties.center_of_gravity = [[ 15.30987849, 0. , -0.48023939]]
vehicle.mass_properties.moments_of_inertia.tensor = [[3173074.17, 0 , 28752.77565],[0 , 3019041.443, 0],[0, 0, 5730017.433]] # estimated, not correct
vehicle.design_mach_number = 0.78
vehicle.design_range = 3582 * Units.miles
vehicle.design_cruise_alt = 35000.0 * Units.ft
# envelope properties
vehicle.envelope.ultimate_load = 3.75
vehicle.envelope.limit_load = 1.5
# basic parameters
vehicle.reference_area = 124.862
vehicle.passengers = 170
vehicle.systems.control = "fully powered"
vehicle.systems.accessories = "medium range"
# ------------------------------------------------------------------
# Main Wing
# ------------------------------------------------------------------
wing = SUAVE.Components.Wings.Main_Wing()
wing.tag = 'main_wing'
wing.aspect_ratio = 10.18
wing.sweeps.quarter_chord = 25 * Units.deg
wing.thickness_to_chord = 0.1
wing.taper = 0.1
wing.spans.projected = 34.32
wing.chords.root = 7.760 * Units.meter
wing.chords.tip = 0.782 * Units.meter
wing.chords.mean_aerodynamic = 4.235 * Units.meter
wing.areas.reference = 124.862
wing.areas.wetted = 225.08
wing.twists.root = 4.0 * Units.degrees
wing.twists.tip = 0.0 * Units.degrees
wing.origin = [[13.61,0,-0.93]]
wing.aerodynamic_center = [0,0,0]
wing.vertical = False
wing.symmetric = True
wing.high_lift = True
wing.dynamic_pressure_ratio = 1.0
# Wing Segments
root_airfoil = SUAVE.Components.Airfoils.Airfoil()
root_airfoil.coordinate_file = '../Vehicles/Airfoils/B737a.txt'
segment = SUAVE.Components.Wings.Segment()
segment.tag = 'Root'
segment.percent_span_location = 0.0
segment.twist = 4. * Units.deg
segment.root_chord_percent = 1.
segment.thickness_to_chord = 0.1
segment.dihedral_outboard = 2.5 * Units.degrees
segment.sweeps.quarter_chord = 28.225 * Units.degrees
segment.thickness_to_chord = .1
segment.append_airfoil(root_airfoil)
wing.append_segment(segment)
yehudi_airfoil = SUAVE.Components.Airfoils.Airfoil()
yehudi_airfoil.coordinate_file = '../Vehicles/Airfoils/B737b.txt'
segment = SUAVE.Components.Wings.Segment()
segment.tag = 'Yehudi'
segment.percent_span_location = 0.324
segment.twist = 0.047193 * Units.deg
segment.root_chord_percent = 0.5
segment.thickness_to_chord = 0.1
segment.dihedral_outboard = 5.5 * Units.degrees
segment.sweeps.quarter_chord = 25. * Units.degrees
segment.thickness_to_chord = .1
segment.append_airfoil(yehudi_airfoil)
wing.append_segment(segment)
mid_airfoil = SUAVE.Components.Airfoils.Airfoil()
mid_airfoil.coordinate_file = '../Vehicles/Airfoils/B737c.txt'
segment = SUAVE.Components.Wings.Segment()
segment.tag = 'Section_2'
segment.percent_span_location = 0.963
segment.twist = 0.00258 * Units.deg
segment.root_chord_percent = 0.220
segment.thickness_to_chord = 0.1
segment.dihedral_outboard = 5.5 * Units.degrees
segment.sweeps.quarter_chord = 56.75 * Units.degrees
segment.thickness_to_chord = .1
segment.append_airfoil(mid_airfoil)
wing.append_segment(segment)
tip_airfoil = SUAVE.Components.Airfoils.Airfoil()
tip_airfoil.coordinate_file = '../Vehicles/Airfoils/B737d.txt'
segment = SUAVE.Components.Wings.Segment()
segment.tag = 'Tip'
segment.percent_span_location = 1.
segment.twist = 0. * Units.degrees
segment.root_chord_percent = 0.10077
segment.thickness_to_chord = 0.1
segment.dihedral_outboard = 0.
segment.sweeps.quarter_chord = 0.
segment.thickness_to_chord = .1
segment.append_airfoil(tip_airfoil)
wing.append_segment(segment)
# Fill out more segment properties automatically
wing = segment_properties(wing)
# control surfaces -------------------------------------------
slat = SUAVE.Components.Wings.Control_Surfaces.Slat()
slat.tag = 'slat'
slat.span_fraction_start = 0.2
slat.span_fraction_end = 0.963
slat.deflection = 0.0 * Units.degrees
slat.chord_fraction = 0.075
wing.append_control_surface(slat)
flap = SUAVE.Components.Wings.Control_Surfaces.Flap()
flap.tag = 'flap'
flap.span_fraction_start = 0.2
flap.span_fraction_end = 0.7
flap.deflection = 0.0 * Units.degrees
flap.configuration_type = 'double_slotted'
flap.chord_fraction = 0.30
wing.append_control_surface(flap)
aileron = SUAVE.Components.Wings.Control_Surfaces.Aileron()
aileron.tag = 'aileron'
aileron.span_fraction_start = 0.7
aileron.span_fraction_end = 0.963
aileron.deflection = 0.0 * Units.degrees
aileron.chord_fraction = 0.16
wing.append_control_surface(aileron)
# add to vehicle
vehicle.append_component(wing)
# ------------------------------------------------------------------
# Horizontal Stabilizer
# ------------------------------------------------------------------
wing = SUAVE.Components.Wings.Horizontal_Tail()
wing.tag = 'horizontal_stabilizer'
wing.aspect_ratio = 4.99
wing.sweeps.quarter_chord = 28.2250 * Units.deg
wing.thickness_to_chord = 0.08
wing.taper = 0.3333
wing.spans.projected = 14.4
wing.chords.root = 4.2731
wing.chords.tip = 1.4243
wing.chords.mean_aerodynamic = 8.0
wing.areas.reference = 41.49
wing.areas.exposed = 59.354 # Exposed area of the horizontal tail
wing.areas.wetted = 71.81 # Wetted area of the horizontal tail
wing.twists.root = 3.0 * Units.degrees
wing.twists.tip = 3.0 * Units.degrees
wing.origin = [[33.02,0,1.466]]
wing.aerodynamic_center = [0,0,0]
wing.vertical = False
wing.symmetric = True
wing.dynamic_pressure_ratio = 0.9
# Wing Segments
segment = SUAVE.Components.Wings.Segment()
segment.tag = 'root_segment'
segment.percent_span_location = 0.0
segment.twist = 0. * Units.deg
segment.root_chord_percent = 1.0
segment.dihedral_outboard = 8.63 * Units.degrees
segment.sweeps.quarter_chord = 28.2250 * Units.degrees
segment.thickness_to_chord = .1
wing.append_segment(segment)
segment = SUAVE.Components.Wings.Segment()
segment.tag = 'tip_segment'
segment.percent_span_location = 1.
segment.twist = 0. * Units.deg
segment.root_chord_percent = 0.3333
segment.dihedral_outboard = 0 * Units.degrees
segment.sweeps.quarter_chord = 0 * Units.degrees
segment.thickness_to_chord = .1
wing.append_segment(segment)
# Fill out more segment properties automatically
wing = segment_properties(wing)
# control surfaces -------------------------------------------
elevator = SUAVE.Components.Wings.Control_Surfaces.Elevator()
elevator.tag = 'elevator'
elevator.span_fraction_start = 0.09
elevator.span_fraction_end = 0.92
elevator.deflection = 0.0 * Units.deg
elevator.chord_fraction = 0.3
wing.append_control_surface(elevator)
# add to vehicle
vehicle.append_component(wing)
# ------------------------------------------------------------------
# Vertical Stabilizer
# ------------------------------------------------------------------
wing = SUAVE.Components.Wings.Vertical_Tail()
wing.tag = 'vertical_stabilizer'
wing.aspect_ratio = 1.98865
wing.sweeps.quarter_chord = 31.2 * Units.deg
wing.thickness_to_chord = 0.08
wing.taper = 0.1183
wing.spans.projected = 8.33
wing.total_length = wing.spans.projected
wing.chords.root = 10.1
wing.chords.tip = 1.20
wing.chords.mean_aerodynamic = 4.0
wing.areas.reference = 34.89
wing.areas.wetted = 57.25
wing.twists.root = 0.0 * Units.degrees
wing.twists.tip = 0.0 * Units.degrees
wing.origin = [[26.944,0,1.54]]
wing.aerodynamic_center = [0,0,0]
wing.vertical = True
wing.symmetric = False
wing.t_tail = False
wing.dynamic_pressure_ratio = 1.0
# Wing Segments
segment = SUAVE.Components.Wings.Segment()
segment.tag = 'root'
segment.percent_span_location = 0.0
segment.twist = 0. * Units.deg
segment.root_chord_percent = 1.
segment.dihedral_outboard = 0 * Units.degrees
segment.sweeps.quarter_chord = 61.485 * Units.degrees
segment.thickness_to_chord = .1
wing.append_segment(segment)
segment = SUAVE.Components.Wings.Segment()
segment.tag = 'segment_1'
segment.percent_span_location = 0.2962
segment.twist = 0. * Units.deg
segment.root_chord_percent = 0.45
segment.dihedral_outboard = 0. * Units.degrees
segment.sweeps.quarter_chord = 31.2 * Units.degrees
segment.thickness_to_chord = .1
wing.append_segment(segment)
segment = SUAVE.Components.Wings.Segment()
segment.tag = 'segment_2'
segment.percent_span_location = 1.0
segment.twist = 0. * Units.deg
segment.root_chord_percent = 0.1183
segment.dihedral_outboard = 0.0 * Units.degrees
segment.sweeps.quarter_chord = 0.0
segment.thickness_to_chord = .1
wing.append_segment(segment)
# Fill out more segment properties automatically
wing = segment_properties(wing)
# add to vehicle
vehicle.append_component(wing)
# ------------------------------------------------------------------
# Fuselage
# ------------------------------------------------------------------
fuselage = SUAVE.Components.Fuselages.Fuselage()
fuselage.tag = 'fuselage'
fuselage.number_coach_seats = vehicle.passengers
fuselage.seats_abreast = 6
fuselage.seat_pitch = 31. * Units.inches
fuselage.fineness.nose = 1.6
fuselage.fineness.tail = 2.
fuselage.lengths.nose = 6.4
fuselage.lengths.tail = 8.0
fuselage.lengths.cabin = 28.85
fuselage.lengths.total = 38.02
fuselage.lengths.fore_space = 6.
fuselage.lengths.aft_space = 5.
fuselage.width = 3.74
fuselage.heights.maximum = 3.74
fuselage.heights.at_quarter_length = 3.74
fuselage.heights.at_three_quarters_length = 3.65
fuselage.heights.at_wing_root_quarter_chord = 3.74
fuselage.areas.side_projected = 142.1948
fuselage.areas.wetted = 385.51
fuselage.areas.front_projected = 12.57
fuselage.effective_diameter = 3.74
fuselage.differential_pressure = 5.0e4 * Units.pascal # Maximum differential pressure
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_0'
segment.percent_x_location = 0.0000
segment.percent_z_location = -0.00144
segment.height = 0.0100
segment.width = 0.0100
fuselage.Segments.append(segment)
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_1'
segment.percent_x_location = 0.00576
segment.percent_z_location = -0.00144
segment.height = 0.7500
segment.width = 0.6500
fuselage.Segments.append(segment)
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_2'
segment.percent_x_location = 0.02017
segment.percent_z_location = 0.00000
segment.height = 1.52783
segment.width = 1.20043
fuselage.Segments.append(segment)
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_3'
segment.percent_x_location = 0.03170
segment.percent_z_location = 0.00000
segment.height = 1.96435
segment.width = 1.52783
fuselage.Segments.append(segment)
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_4'
segment.percent_x_location = 0.04899
segment.percent_z_location = 0.00431
segment.height = 2.72826
segment.width = 1.96435
fuselage.Segments.append(segment)
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_5'
segment.percent_x_location = 0.07781
segment.percent_z_location = 0.00861
segment.height = 3.49217
segment.width = 2.61913
fuselage.Segments.append(segment)
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_6'
segment.percent_x_location = 0.10375
segment.percent_z_location = 0.01005
segment.height = 3.70130
segment.width = 3.05565
fuselage.Segments.append(segment)
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_7'
segment.percent_x_location = 0.16427
segment.percent_z_location = 0.01148
segment.height = 3.92870
segment.width = 3.71043
fuselage.Segments.append(segment)
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_8'
segment.percent_x_location = 0.22478
segment.percent_z_location = 0.01148
segment.height = 3.92870
segment.width = 3.92870
fuselage.Segments.append(segment)
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_9'
segment.percent_x_location = 0.69164
segment.percent_z_location = 0.01292
segment.height = 3.81957
segment.width = 3.81957
fuselage.Segments.append(segment)
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_10'
segment.percent_x_location = 0.71758
segment.percent_z_location = 0.01292
segment.height = 3.81957
segment.width = 3.81957
fuselage.Segments.append(segment)
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_11'
segment.percent_x_location = 0.78098
segment.percent_z_location = 0.01722
segment.height = 3.49217
segment.width = 3.71043
fuselage.Segments.append(segment)
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_12'
segment.percent_x_location = 0.85303
segment.percent_z_location = 0.02296
segment.height = 3.05565
segment.width = 3.16478
fuselage.Segments.append(segment)
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_13'
segment.percent_x_location = 0.91931
segment.percent_z_location = 0.03157
segment.height = 2.40087
segment.width = 1.96435
fuselage.Segments.append(segment)
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_14'
segment.percent_x_location = 1.00
segment.percent_z_location = 0.04593
segment.height = 1.09130
segment.width = 0.21826
fuselage.Segments.append(segment)
# add to vehicle
vehicle.append_component(fuselage)
# ------------------------------------------------------------------
# Nacelles
# ------------------------------------------------------------------
nacelle = SUAVE.Components.Nacelles.Nacelle()
nacelle.tag = 'nacelle_1'
nacelle.length = 2.71
nacelle.inlet_diameter = 1.90
nacelle.diameter = 2.05
nacelle.areas.wetted = 1.1*np.pi*nacelle.diameter*nacelle.length
nacelle.origin = [[13.72, -4.86,-1.9]]
nacelle.flow_through = True
nacelle_airfoil = SUAVE.Components.Airfoils.Airfoil()
nacelle_airfoil.naca_4_series_airfoil = '2410'
nacelle.append_airfoil(nacelle_airfoil)
nacelle_2 = deepcopy(nacelle)
nacelle_2.tag = 'nacelle_2'
nacelle_2.origin = [[13.72, 4.86,-1.9]]
vehicle.append_component(nacelle)
vehicle.append_component(nacelle_2)
# ------------------------------------------------------------------
# Turbofan Network
# ------------------------------------------------------------------
#instantiate the gas turbine network
turbofan = SUAVE.Components.Energy.Networks.Turbofan()
turbofan.tag = 'turbofan'
# setup
turbofan.number_of_engines = 2.0
turbofan.bypass_ratio = 5.4
turbofan.engine_length = 2.71
# This origin is overwritten by compute_component_centers_of_gravity(base,compute_propulsor_origin=True)
turbofan.origin = [[13.72, 4.86,-1.9],[13.72, -4.86,-1.9]]
# working fluid
turbofan.working_fluid = SUAVE.Attributes.Gases.Air()
# ------------------------------------------------------------------
# Component 1 - Ram
# to convert freestream static to stagnation quantities
# instantiate
ram = SUAVE.Components.Energy.Converters.Ram()
ram.tag = 'ram'
# add to the network
turbofan.append(ram)
# ------------------------------------------------------------------
# Component 2 - Inlet Nozzle
# instantiate
inlet_nozzle = SUAVE.Components.Energy.Converters.Compression_Nozzle()
inlet_nozzle.tag = 'inlet_nozzle'
# setup
inlet_nozzle.polytropic_efficiency = 0.98
inlet_nozzle.pressure_ratio = 0.98
# add to network
turbofan.append(inlet_nozzle)
# ------------------------------------------------------------------
# Component 3 - Low Pressure Compressor
# instantiate
compressor = SUAVE.Components.Energy.Converters.Compressor()
compressor.tag = 'low_pressure_compressor'
# setup
compressor.polytropic_efficiency = 0.91
compressor.pressure_ratio = 1.14
# add to network
turbofan.append(compressor)
# ------------------------------------------------------------------
# Component 4 - High Pressure Compressor
# instantiate
compressor = SUAVE.Components.Energy.Converters.Compressor()
compressor.tag = 'high_pressure_compressor'
# setup
compressor.polytropic_efficiency = 0.91
compressor.pressure_ratio = 13.415
# add to network
turbofan.append(compressor)
# ------------------------------------------------------------------
# Component 5 - Low Pressure Turbine
# instantiate
turbine = SUAVE.Components.Energy.Converters.Turbine()
turbine.tag='low_pressure_turbine'
# setup
turbine.mechanical_efficiency = 0.99
turbine.polytropic_efficiency = 0.93
# add to network
turbofan.append(turbine)
# ------------------------------------------------------------------
# Component 6 - High Pressure Turbine
# instantiate
turbine = SUAVE.Components.Energy.Converters.Turbine()
turbine.tag='high_pressure_turbine'
# setup
turbine.mechanical_efficiency = 0.99
turbine.polytropic_efficiency = 0.93
# add to network
turbofan.append(turbine)
# ------------------------------------------------------------------
# Component 7 - Combustor
# instantiate
combustor = SUAVE.Components.Energy.Converters.Combustor()
combustor.tag = 'combustor'
# setup
combustor.efficiency = 0.99
combustor.alphac = 1.0
combustor.turbine_inlet_temperature = 1450
combustor.pressure_ratio = 0.95
combustor.fuel_data = SUAVE.Attributes.Propellants.Jet_A()
# add to network
turbofan.append(combustor)
# ------------------------------------------------------------------
# Component 8 - Core Nozzle
# instantiate
nozzle = SUAVE.Components.Energy.Converters.Expansion_Nozzle()
nozzle.tag = 'core_nozzle'
# setup
nozzle.polytropic_efficiency = 0.95
nozzle.pressure_ratio = 0.99
# add to network
turbofan.append(nozzle)
# ------------------------------------------------------------------
# Component 9 - Fan Nozzle
# instantiate
nozzle = SUAVE.Components.Energy.Converters.Expansion_Nozzle()
nozzle.tag = 'fan_nozzle'
# setup
nozzle.polytropic_efficiency = 0.95
nozzle.pressure_ratio = 0.99
# add to network
turbofan.append(nozzle)
# ------------------------------------------------------------------
# Component 10 - Fan
# instantiate
fan = SUAVE.Components.Energy.Converters.Fan()
fan.tag = 'fan'
# setup
fan.polytropic_efficiency = 0.93
fan.pressure_ratio = 1.7
# add to network
turbofan.append(fan)
# ------------------------------------------------------------------
#Component 10 : thrust (to compute the thrust)
thrust = SUAVE.Components.Energy.Processes.Thrust()
thrust.tag ='compute_thrust'
#total design thrust (includes all the engines)
thrust.total_design = 2*24000. * Units.N #Newtons
#design sizing conditions
altitude = 35000.0*Units.ft
mach_number = 0.78
isa_deviation = 0.
#Engine setup for noise module
# add to network
turbofan.thrust = thrust
turbofan.core_nozzle_diameter = 0.92
turbofan.fan_nozzle_diameter = 1.659
turbofan.engine_height = 0.5 #Engine centerline heigh above the ground plane
turbofan.exa = 1 #distance from fan face to fan exit/ fan diameter)
turbofan.plug_diameter = 0.1 #dimater of the engine plug
turbofan.geometry_xe = 1. # Geometry information for the installation effects function
turbofan.geometry_ye = 1. # Geometry information for the installation effects function
turbofan.geometry_Ce = 2. # Geometry information for the installation effects function
#size the turbofan
turbofan_sizing(turbofan,mach_number,altitude)
# add gas turbine network turbofan to the vehicle
vehicle.append_component(turbofan)
# ------------------------------------------------------------------
# Fuel
# ------------------------------------------------------------------
fuel = SUAVE.Components.Physical_Component()
vehicle.fuel = fuel
fuel.mass_properties.mass = vehicle.mass_properties.max_takeoff-vehicle.mass_properties.max_fuel
fuel.origin = vehicle.wings.main_wing.mass_properties.center_of_gravity
fuel.mass_properties.center_of_gravity= vehicle.wings.main_wing.aerodynamic_center
# ------------------------------------------------------------------
# Landing Gear
# ------------------------------------------------------------------
landing_gear = SUAVE.Components.Landing_Gear.Landing_Gear()
landing_gear.tag = "main_landing_gear"
landing_gear.main_tire_diameter = 1.12000 * Units.m
landing_gear.nose_tire_diameter = 0.6858 * Units.m
landing_gear.main_strut_length = 1.8 * Units.m
landing_gear.nose_strut_length = 1.3 * Units.m
landing_gear.main_units = 1 #number of nose landing gear
landing_gear.nose_units = 1 #number of nose landing gear
landing_gear.main_wheels = 2 #number of wheels on the main landing gear
landing_gear.nose_wheels = 2 #number of wheels on the nose landing gear
vehicle.landing_gear = landing_gear
# ------------------------------------------------------------------
# Vehicle Definition Complete
# ------------------------------------------------------------------
return vehicle
# ----------------------------------------------------------------------
# Define the Configurations
# ---------------------------------------------------------------------
def configs_setup(vehicle):
# ------------------------------------------------------------------
# Initialize Configurations
# ------------------------------------------------------------------
configs = SUAVE.Components.Configs.Config.Container()
base_config = SUAVE.Components.Configs.Config(vehicle)
base_config.tag = 'base'
base_config.landing_gear.gear_condition = 'up'
configs.append(base_config)
# ------------------------------------------------------------------
# Cruise Configuration
# ------------------------------------------------------------------
config = SUAVE.Components.Configs.Config(base_config)
config.tag = 'cruise'
configs.append(config)
config.wings['main_wing'].control_surfaces.flap.deflection = 0. * Units.deg
config.wings['main_wing'].control_surfaces.slat.deflection = 0. * Units.deg
# ------------------------------------------------------------------
# Takeoff Configuration
# ------------------------------------------------------------------
config = SUAVE.Components.Configs.Config(base_config)
config.tag = 'takeoff'
config.wings['main_wing'].control_surfaces.flap.deflection = 20. * Units.deg
config.wings['main_wing'].control_surfaces.slat.deflection = 25. * Units.deg
#Noise input for the landing gear
config.landing_gear.gear_condition = 'up'
config.output_filename = 'Flyover_'
config.networks['turbofan'].fan.rotation = 3470. #N1 speed
config.networks['turbofan'].fan_nozzle.noise_speed = 315.
config.networks['turbofan'].core_nozzle.noise_speed = 415.
configs.append(config)
# ------------------------------------------------------------------
# Cutback Configuration
# ------------------------------------------------------------------
config = SUAVE.Components.Configs.Config(base_config)
config.tag = 'cutback'
config.wings['main_wing'].control_surfaces.flap.deflection = 20. * Units.deg
config.wings['main_wing'].control_surfaces.slat.deflection = 20. * Units.deg
#Noise input for the landing gear
config.landing_gear.gear_condition = 'up'
config.output_filename = 'Cutback_'
config.networks['turbofan'].fan.rotation = 2780. #N1 speed
config.networks['turbofan'].fan_nozzle.noise_speed = 210.
config.networks['turbofan'].core_nozzle.noise_speed = 360.
configs.append(config)
# ------------------------------------------------------------------
# Landing Configuration
# ------------------------------------------------------------------
config = SUAVE.Components.Configs.Config(base_config)
config.tag = 'landing'
config.wings['main_wing'].control_surfaces.flap.deflection = 30. * Units.deg
config.wings['main_wing'].control_surfaces.slat.deflection = 25. * Units.deg
#Noise input for the landing gear
config.landing_gear.gear_condition = 'down'
config.output_filename = 'Approach_'
config.networks['turbofan'].fan.rotation = 2030. #N1 speed
config.networks['turbofan'].fan_nozzle.noise_speed = 109.3
config.networks['turbofan'].core_nozzle.noise_speed = 92.
configs.append(config)
# ------------------------------------------------------------------
# Short Field Takeoff Configuration
# ------------------------------------------------------------------
config = SUAVE.Components.Configs.Config(base_config)
config.tag = 'short_field_takeoff'
config.wings['main_wing'].control_surfaces.flap.deflection = 20. * Units.deg
config.wings['main_wing'].control_surfaces.slat.deflection = 20. * Units.deg
configs.append(config)
return configs
|
SUAVE/SUAVE-2.5.0/regression/scripts/Vehicles/Boeing_737.py
|
# ----------------------------------------------------------------------
# Imports
# ----------------------------------------------------------------------
import numpy as np
import SUAVE
from SUAVE.Core import Units
from SUAVE.Methods.Propulsion.turbofan_sizing import turbofan_sizing
from SUAVE.Methods.Geometry.Two_Dimensional.Planform import segment_properties
from copy import deepcopy
# ----------------------------------------------------------------------
# Define the Vehicle
# ----------------------------------------------------------------------
def vehicle_setup():
# ------------------------------------------------------------------
# Initialize the Vehicle
# ------------------------------------------------------------------
vehicle = SUAVE.Vehicle()
vehicle.tag = 'Boeing_737800'
# ------------------------------------------------------------------
# Vehicle-level Properties
# ------------------------------------------------------------------
# mass properties
vehicle.mass_properties.max_takeoff = 79015.8 # kg
vehicle.mass_properties.takeoff = 79015.8 # kg
vehicle.mass_properties.operating_empty = 62746.4 # kg
vehicle.mass_properties.takeoff = 79015.8 # kg
vehicle.mass_properties.max_zero_fuel = 62732.0 # kg
vehicle.mass_properties.cargo = 10000. * Units.kilogram
vehicle.mass_properties.center_of_gravity = [[ 15.30987849, 0. , -0.48023939]]
vehicle.mass_properties.moments_of_inertia.tensor = [[3173074.17, 0 , 28752.77565],[0 , 3019041.443, 0],[0, 0, 5730017.433]] # estimated, not correct
vehicle.design_mach_number = 0.78
vehicle.design_range = 3582 * Units.miles
vehicle.design_cruise_alt = 35000.0 * Units.ft
# envelope properties
vehicle.envelope.ultimate_load = 3.75
vehicle.envelope.limit_load = 1.5
# basic parameters
vehicle.reference_area = 124.862
vehicle.passengers = 170
vehicle.systems.control = "fully powered"
vehicle.systems.accessories = "medium range"
# ------------------------------------------------------------------
# Main Wing
# ------------------------------------------------------------------
wing = SUAVE.Components.Wings.Main_Wing()
wing.tag = 'main_wing'
wing.aspect_ratio = 10.18
wing.sweeps.quarter_chord = 25 * Units.deg
wing.thickness_to_chord = 0.1
wing.taper = 0.1
wing.spans.projected = 34.32
wing.chords.root = 7.760 * Units.meter
wing.chords.tip = 0.782 * Units.meter
wing.chords.mean_aerodynamic = 4.235 * Units.meter
wing.areas.reference = 124.862
wing.areas.wetted = 225.08
wing.twists.root = 4.0 * Units.degrees
wing.twists.tip = 0.0 * Units.degrees
wing.origin = [[13.61,0,-0.93]]
wing.aerodynamic_center = [0,0,0]
wing.vertical = False
wing.symmetric = True
wing.high_lift = True
wing.dynamic_pressure_ratio = 1.0
# Wing Segments
root_airfoil = SUAVE.Components.Airfoils.Airfoil()
root_airfoil.coordinate_file = '../Vehicles/Airfoils/B737a.txt'
segment = SUAVE.Components.Wings.Segment()
segment.tag = 'Root'
segment.percent_span_location = 0.0
segment.twist = 4. * Units.deg
segment.root_chord_percent = 1.
segment.thickness_to_chord = 0.1
segment.dihedral_outboard = 2.5 * Units.degrees
segment.sweeps.quarter_chord = 28.225 * Units.degrees
segment.thickness_to_chord = .1
segment.append_airfoil(root_airfoil)
wing.append_segment(segment)
yehudi_airfoil = SUAVE.Components.Airfoils.Airfoil()
yehudi_airfoil.coordinate_file = '../Vehicles/Airfoils/B737b.txt'
segment = SUAVE.Components.Wings.Segment()
segment.tag = 'Yehudi'
segment.percent_span_location = 0.324
segment.twist = 0.047193 * Units.deg
segment.root_chord_percent = 0.5
segment.thickness_to_chord = 0.1
segment.dihedral_outboard = 5.5 * Units.degrees
segment.sweeps.quarter_chord = 25. * Units.degrees
segment.thickness_to_chord = .1
segment.append_airfoil(yehudi_airfoil)
wing.append_segment(segment)
mid_airfoil = SUAVE.Components.Airfoils.Airfoil()
mid_airfoil.coordinate_file = '../Vehicles/Airfoils/B737c.txt'
segment = SUAVE.Components.Wings.Segment()
segment.tag = 'Section_2'
segment.percent_span_location = 0.963
segment.twist = 0.00258 * Units.deg
segment.root_chord_percent = 0.220
segment.thickness_to_chord = 0.1
segment.dihedral_outboard = 5.5 * Units.degrees
segment.sweeps.quarter_chord = 56.75 * Units.degrees
segment.thickness_to_chord = .1
segment.append_airfoil(mid_airfoil)
wing.append_segment(segment)
tip_airfoil = SUAVE.Components.Airfoils.Airfoil()
tip_airfoil.coordinate_file = '../Vehicles/Airfoils/B737d.txt'
segment = SUAVE.Components.Wings.Segment()
segment.tag = 'Tip'
segment.percent_span_location = 1.
segment.twist = 0. * Units.degrees
segment.root_chord_percent = 0.10077
segment.thickness_to_chord = 0.1
segment.dihedral_outboard = 0.
segment.sweeps.quarter_chord = 0.
segment.thickness_to_chord = .1
segment.append_airfoil(tip_airfoil)
wing.append_segment(segment)
# Fill out more segment properties automatically
wing = segment_properties(wing)
# control surfaces -------------------------------------------
slat = SUAVE.Components.Wings.Control_Surfaces.Slat()
slat.tag = 'slat'
slat.span_fraction_start = 0.2
slat.span_fraction_end = 0.963
slat.deflection = 0.0 * Units.degrees
slat.chord_fraction = 0.075
wing.append_control_surface(slat)
flap = SUAVE.Components.Wings.Control_Surfaces.Flap()
flap.tag = 'flap'
flap.span_fraction_start = 0.2
flap.span_fraction_end = 0.7
flap.deflection = 0.0 * Units.degrees
flap.configuration_type = 'double_slotted'
flap.chord_fraction = 0.30
wing.append_control_surface(flap)
aileron = SUAVE.Components.Wings.Control_Surfaces.Aileron()
aileron.tag = 'aileron'
aileron.span_fraction_start = 0.7
aileron.span_fraction_end = 0.963
aileron.deflection = 0.0 * Units.degrees
aileron.chord_fraction = 0.16
wing.append_control_surface(aileron)
# add to vehicle
vehicle.append_component(wing)
# ------------------------------------------------------------------
# Horizontal Stabilizer
# ------------------------------------------------------------------
wing = SUAVE.Components.Wings.Horizontal_Tail()
wing.tag = 'horizontal_stabilizer'
wing.aspect_ratio = 4.99
wing.sweeps.quarter_chord = 28.2250 * Units.deg
wing.thickness_to_chord = 0.08
wing.taper = 0.3333
wing.spans.projected = 14.4
wing.chords.root = 4.2731
wing.chords.tip = 1.4243
wing.chords.mean_aerodynamic = 8.0
wing.areas.reference = 41.49
wing.areas.exposed = 59.354 # Exposed area of the horizontal tail
wing.areas.wetted = 71.81 # Wetted area of the horizontal tail
wing.twists.root = 3.0 * Units.degrees
wing.twists.tip = 3.0 * Units.degrees
wing.origin = [[33.02,0,1.466]]
wing.aerodynamic_center = [0,0,0]
wing.vertical = False
wing.symmetric = True
wing.dynamic_pressure_ratio = 0.9
# Wing Segments
segment = SUAVE.Components.Wings.Segment()
segment.tag = 'root_segment'
segment.percent_span_location = 0.0
segment.twist = 0. * Units.deg
segment.root_chord_percent = 1.0
segment.dihedral_outboard = 8.63 * Units.degrees
segment.sweeps.quarter_chord = 28.2250 * Units.degrees
segment.thickness_to_chord = .1
wing.append_segment(segment)
segment = SUAVE.Components.Wings.Segment()
segment.tag = 'tip_segment'
segment.percent_span_location = 1.
segment.twist = 0. * Units.deg
segment.root_chord_percent = 0.3333
segment.dihedral_outboard = 0 * Units.degrees
segment.sweeps.quarter_chord = 0 * Units.degrees
segment.thickness_to_chord = .1
wing.append_segment(segment)
# Fill out more segment properties automatically
wing = segment_properties(wing)
# control surfaces -------------------------------------------
elevator = SUAVE.Components.Wings.Control_Surfaces.Elevator()
elevator.tag = 'elevator'
elevator.span_fraction_start = 0.09
elevator.span_fraction_end = 0.92
elevator.deflection = 0.0 * Units.deg
elevator.chord_fraction = 0.3
wing.append_control_surface(elevator)
# add to vehicle
vehicle.append_component(wing)
# ------------------------------------------------------------------
# Vertical Stabilizer
# ------------------------------------------------------------------
wing = SUAVE.Components.Wings.Vertical_Tail()
wing.tag = 'vertical_stabilizer'
wing.aspect_ratio = 1.98865
wing.sweeps.quarter_chord = 31.2 * Units.deg
wing.thickness_to_chord = 0.08
wing.taper = 0.1183
wing.spans.projected = 8.33
wing.total_length = wing.spans.projected
wing.chords.root = 10.1
wing.chords.tip = 1.20
wing.chords.mean_aerodynamic = 4.0
wing.areas.reference = 34.89
wing.areas.wetted = 57.25
wing.twists.root = 0.0 * Units.degrees
wing.twists.tip = 0.0 * Units.degrees
wing.origin = [[26.944,0,1.54]]
wing.aerodynamic_center = [0,0,0]
wing.vertical = True
wing.symmetric = False
wing.t_tail = False
wing.dynamic_pressure_ratio = 1.0
# Wing Segments
segment = SUAVE.Components.Wings.Segment()
segment.tag = 'root'
segment.percent_span_location = 0.0
segment.twist = 0. * Units.deg
segment.root_chord_percent = 1.
segment.dihedral_outboard = 0 * Units.degrees
segment.sweeps.quarter_chord = 61.485 * Units.degrees
segment.thickness_to_chord = .1
wing.append_segment(segment)
segment = SUAVE.Components.Wings.Segment()
segment.tag = 'segment_1'
segment.percent_span_location = 0.2962
segment.twist = 0. * Units.deg
segment.root_chord_percent = 0.45
segment.dihedral_outboard = 0. * Units.degrees
segment.sweeps.quarter_chord = 31.2 * Units.degrees
segment.thickness_to_chord = .1
wing.append_segment(segment)
segment = SUAVE.Components.Wings.Segment()
segment.tag = 'segment_2'
segment.percent_span_location = 1.0
segment.twist = 0. * Units.deg
segment.root_chord_percent = 0.1183
segment.dihedral_outboard = 0.0 * Units.degrees
segment.sweeps.quarter_chord = 0.0
segment.thickness_to_chord = .1
wing.append_segment(segment)
# Fill out more segment properties automatically
wing = segment_properties(wing)
# add to vehicle
vehicle.append_component(wing)
# ------------------------------------------------------------------
# Fuselage
# ------------------------------------------------------------------
fuselage = SUAVE.Components.Fuselages.Fuselage()
fuselage.tag = 'fuselage'
fuselage.number_coach_seats = vehicle.passengers
fuselage.seats_abreast = 6
fuselage.seat_pitch = 31. * Units.inches
fuselage.fineness.nose = 1.6
fuselage.fineness.tail = 2.
fuselage.lengths.nose = 6.4
fuselage.lengths.tail = 8.0
fuselage.lengths.cabin = 28.85
fuselage.lengths.total = 38.02
fuselage.lengths.fore_space = 6.
fuselage.lengths.aft_space = 5.
fuselage.width = 3.74
fuselage.heights.maximum = 3.74
fuselage.heights.at_quarter_length = 3.74
fuselage.heights.at_three_quarters_length = 3.65
fuselage.heights.at_wing_root_quarter_chord = 3.74
fuselage.areas.side_projected = 142.1948
fuselage.areas.wetted = 385.51
fuselage.areas.front_projected = 12.57
fuselage.effective_diameter = 3.74
fuselage.differential_pressure = 5.0e4 * Units.pascal # Maximum differential pressure
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_0'
segment.percent_x_location = 0.0000
segment.percent_z_location = -0.00144
segment.height = 0.0100
segment.width = 0.0100
fuselage.Segments.append(segment)
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_1'
segment.percent_x_location = 0.00576
segment.percent_z_location = -0.00144
segment.height = 0.7500
segment.width = 0.6500
fuselage.Segments.append(segment)
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_2'
segment.percent_x_location = 0.02017
segment.percent_z_location = 0.00000
segment.height = 1.52783
segment.width = 1.20043
fuselage.Segments.append(segment)
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_3'
segment.percent_x_location = 0.03170
segment.percent_z_location = 0.00000
segment.height = 1.96435
segment.width = 1.52783
fuselage.Segments.append(segment)
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_4'
segment.percent_x_location = 0.04899
segment.percent_z_location = 0.00431
segment.height = 2.72826
segment.width = 1.96435
fuselage.Segments.append(segment)
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_5'
segment.percent_x_location = 0.07781
segment.percent_z_location = 0.00861
segment.height = 3.49217
segment.width = 2.61913
fuselage.Segments.append(segment)
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_6'
segment.percent_x_location = 0.10375
segment.percent_z_location = 0.01005
segment.height = 3.70130
segment.width = 3.05565
fuselage.Segments.append(segment)
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_7'
segment.percent_x_location = 0.16427
segment.percent_z_location = 0.01148
segment.height = 3.92870
segment.width = 3.71043
fuselage.Segments.append(segment)
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_8'
segment.percent_x_location = 0.22478
segment.percent_z_location = 0.01148
segment.height = 3.92870
segment.width = 3.92870
fuselage.Segments.append(segment)
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_9'
segment.percent_x_location = 0.69164
segment.percent_z_location = 0.01292
segment.height = 3.81957
segment.width = 3.81957
fuselage.Segments.append(segment)
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_10'
segment.percent_x_location = 0.71758
segment.percent_z_location = 0.01292
segment.height = 3.81957
segment.width = 3.81957
fuselage.Segments.append(segment)
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_11'
segment.percent_x_location = 0.78098
segment.percent_z_location = 0.01722
segment.height = 3.49217
segment.width = 3.71043
fuselage.Segments.append(segment)
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_12'
segment.percent_x_location = 0.85303
segment.percent_z_location = 0.02296
segment.height = 3.05565
segment.width = 3.16478
fuselage.Segments.append(segment)
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_13'
segment.percent_x_location = 0.91931
segment.percent_z_location = 0.03157
segment.height = 2.40087
segment.width = 1.96435
fuselage.Segments.append(segment)
# Segment
segment = SUAVE.Components.Lofted_Body_Segment.Segment()
segment.tag = 'segment_14'
segment.percent_x_location = 1.00
segment.percent_z_location = 0.04593
segment.height = 1.09130
segment.width = 0.21826
fuselage.Segments.append(segment)
# add to vehicle
vehicle.append_component(fuselage)
# ------------------------------------------------------------------
# Nacelles
# ------------------------------------------------------------------
nacelle = SUAVE.Components.Nacelles.Nacelle()
nacelle.tag = 'nacelle_1'
nacelle.length = 2.71
nacelle.inlet_diameter = 1.90
nacelle.diameter = 2.05
nacelle.areas.wetted = 1.1*np.pi*nacelle.diameter*nacelle.length
nacelle.origin = [[13.72, -4.86,-1.9]]
nacelle.flow_through = True
nacelle_airfoil = SUAVE.Components.Airfoils.Airfoil()
nacelle_airfoil.naca_4_series_airfoil = '2410'
nacelle.append_airfoil(nacelle_airfoil)
nacelle_2 = deepcopy(nacelle)
nacelle_2.tag = 'nacelle_2'
nacelle_2.origin = [[13.72, 4.86,-1.9]]
vehicle.append_component(nacelle)
vehicle.append_component(nacelle_2)
# ------------------------------------------------------------------
# Turbofan Network
# ------------------------------------------------------------------
#instantiate the gas turbine network
turbofan = SUAVE.Components.Energy.Networks.Turbofan()
turbofan.tag = 'turbofan'
# setup
turbofan.number_of_engines = 2.0
turbofan.bypass_ratio = 5.4
turbofan.engine_length = 2.71
# This origin is overwritten by compute_component_centers_of_gravity(base,compute_propulsor_origin=True)
turbofan.origin = [[13.72, 4.86,-1.9],[13.72, -4.86,-1.9]]
# working fluid
turbofan.working_fluid = SUAVE.Attributes.Gases.Air()
# ------------------------------------------------------------------
# Component 1 - Ram
# to convert freestream static to stagnation quantities
# instantiate
ram = SUAVE.Components.Energy.Converters.Ram()
ram.tag = 'ram'
# add to the network
turbofan.append(ram)
# ------------------------------------------------------------------
# Component 2 - Inlet Nozzle
# instantiate
inlet_nozzle = SUAVE.Components.Energy.Converters.Compression_Nozzle()
inlet_nozzle.tag = 'inlet_nozzle'
# setup
inlet_nozzle.polytropic_efficiency = 0.98
inlet_nozzle.pressure_ratio = 0.98
# add to network
turbofan.append(inlet_nozzle)
# ------------------------------------------------------------------
# Component 3 - Low Pressure Compressor
# instantiate
compressor = SUAVE.Components.Energy.Converters.Compressor()
compressor.tag = 'low_pressure_compressor'
# setup
compressor.polytropic_efficiency = 0.91
compressor.pressure_ratio = 1.14
# add to network
turbofan.append(compressor)
# ------------------------------------------------------------------
# Component 4 - High Pressure Compressor
# instantiate
compressor = SUAVE.Components.Energy.Converters.Compressor()
compressor.tag = 'high_pressure_compressor'
# setup
compressor.polytropic_efficiency = 0.91
compressor.pressure_ratio = 13.415
# add to network
turbofan.append(compressor)
# ------------------------------------------------------------------
# Component 5 - Low Pressure Turbine
# instantiate
turbine = SUAVE.Components.Energy.Converters.Turbine()
turbine.tag='low_pressure_turbine'
# setup
turbine.mechanical_efficiency = 0.99
turbine.polytropic_efficiency = 0.93
# add to network
turbofan.append(turbine)
# ------------------------------------------------------------------
# Component 6 - High Pressure Turbine
# instantiate
turbine = SUAVE.Components.Energy.Converters.Turbine()
turbine.tag='high_pressure_turbine'
# setup
turbine.mechanical_efficiency = 0.99
turbine.polytropic_efficiency = 0.93
# add to network
turbofan.append(turbine)
# ------------------------------------------------------------------
# Component 7 - Combustor
# instantiate
combustor = SUAVE.Components.Energy.Converters.Combustor()
combustor.tag = 'combustor'
# setup
combustor.efficiency = 0.99
combustor.alphac = 1.0
combustor.turbine_inlet_temperature = 1450
combustor.pressure_ratio = 0.95
combustor.fuel_data = SUAVE.Attributes.Propellants.Jet_A()
# add to network
turbofan.append(combustor)
# ------------------------------------------------------------------
# Component 8 - Core Nozzle
# instantiate
nozzle = SUAVE.Components.Energy.Converters.Expansion_Nozzle()
nozzle.tag = 'core_nozzle'
# setup
nozzle.polytropic_efficiency = 0.95
nozzle.pressure_ratio = 0.99
# add to network
turbofan.append(nozzle)
# ------------------------------------------------------------------
# Component 9 - Fan Nozzle
# instantiate
nozzle = SUAVE.Components.Energy.Converters.Expansion_Nozzle()
nozzle.tag = 'fan_nozzle'
# setup
nozzle.polytropic_efficiency = 0.95
nozzle.pressure_ratio = 0.99
# add to network
turbofan.append(nozzle)
# ------------------------------------------------------------------
# Component 10 - Fan
# instantiate
fan = SUAVE.Components.Energy.Converters.Fan()
fan.tag = 'fan'
# setup
fan.polytropic_efficiency = 0.93
fan.pressure_ratio = 1.7
# add to network
turbofan.append(fan)
# ------------------------------------------------------------------
#Component 10 : thrust (to compute the thrust)
thrust = SUAVE.Components.Energy.Processes.Thrust()
thrust.tag ='compute_thrust'
#total design thrust (includes all the engines)
thrust.total_design = 2*24000. * Units.N #Newtons
#design sizing conditions
altitude = 35000.0*Units.ft
mach_number = 0.78
isa_deviation = 0.
#Engine setup for noise module
# add to network
turbofan.thrust = thrust
turbofan.core_nozzle_diameter = 0.92
turbofan.fan_nozzle_diameter = 1.659
turbofan.engine_height = 0.5 #Engine centerline heigh above the ground plane
turbofan.exa = 1 #distance from fan face to fan exit/ fan diameter)
turbofan.plug_diameter = 0.1 #dimater of the engine plug
turbofan.geometry_xe = 1. # Geometry information for the installation effects function
turbofan.geometry_ye = 1. # Geometry information for the installation effects function
turbofan.geometry_Ce = 2. # Geometry information for the installation effects function
#size the turbofan
turbofan_sizing(turbofan,mach_number,altitude)
# add gas turbine network turbofan to the vehicle
vehicle.append_component(turbofan)
# ------------------------------------------------------------------
# Fuel
# ------------------------------------------------------------------
fuel = SUAVE.Components.Physical_Component()
vehicle.fuel = fuel
fuel.mass_properties.mass = vehicle.mass_properties.max_takeoff-vehicle.mass_properties.max_fuel
fuel.origin = vehicle.wings.main_wing.mass_properties.center_of_gravity
fuel.mass_properties.center_of_gravity= vehicle.wings.main_wing.aerodynamic_center
# ------------------------------------------------------------------
# Landing Gear
# ------------------------------------------------------------------
landing_gear = SUAVE.Components.Landing_Gear.Landing_Gear()
landing_gear.tag = "main_landing_gear"
landing_gear.main_tire_diameter = 1.12000 * Units.m
landing_gear.nose_tire_diameter = 0.6858 * Units.m
landing_gear.main_strut_length = 1.8 * Units.m
landing_gear.nose_strut_length = 1.3 * Units.m
landing_gear.main_units = 1 #number of nose landing gear
landing_gear.nose_units = 1 #number of nose landing gear
landing_gear.main_wheels = 2 #number of wheels on the main landing gear
landing_gear.nose_wheels = 2 #number of wheels on the nose landing gear
vehicle.landing_gear = landing_gear
# ------------------------------------------------------------------
# Vehicle Definition Complete
# ------------------------------------------------------------------
return vehicle
# ----------------------------------------------------------------------
# Define the Configurations
# ---------------------------------------------------------------------
def configs_setup(vehicle):
# ------------------------------------------------------------------
# Initialize Configurations
# ------------------------------------------------------------------
configs = SUAVE.Components.Configs.Config.Container()
base_config = SUAVE.Components.Configs.Config(vehicle)
base_config.tag = 'base'
base_config.landing_gear.gear_condition = 'up'
configs.append(base_config)
# ------------------------------------------------------------------
# Cruise Configuration
# ------------------------------------------------------------------
config = SUAVE.Components.Configs.Config(base_config)
config.tag = 'cruise'
configs.append(config)
config.wings['main_wing'].control_surfaces.flap.deflection = 0. * Units.deg
config.wings['main_wing'].control_surfaces.slat.deflection = 0. * Units.deg
# ------------------------------------------------------------------
# Takeoff Configuration
# ------------------------------------------------------------------
config = SUAVE.Components.Configs.Config(base_config)
config.tag = 'takeoff'
config.wings['main_wing'].control_surfaces.flap.deflection = 20. * Units.deg
config.wings['main_wing'].control_surfaces.slat.deflection = 25. * Units.deg
#Noise input for the landing gear
config.landing_gear.gear_condition = 'up'
config.output_filename = 'Flyover_'
config.networks['turbofan'].fan.rotation = 3470. #N1 speed
config.networks['turbofan'].fan_nozzle.noise_speed = 315.
config.networks['turbofan'].core_nozzle.noise_speed = 415.
configs.append(config)
# ------------------------------------------------------------------
# Cutback Configuration
# ------------------------------------------------------------------
config = SUAVE.Components.Configs.Config(base_config)
config.tag = 'cutback'
config.wings['main_wing'].control_surfaces.flap.deflection = 20. * Units.deg
config.wings['main_wing'].control_surfaces.slat.deflection = 20. * Units.deg
#Noise input for the landing gear
config.landing_gear.gear_condition = 'up'
config.output_filename = 'Cutback_'
config.networks['turbofan'].fan.rotation = 2780. #N1 speed
config.networks['turbofan'].fan_nozzle.noise_speed = 210.
config.networks['turbofan'].core_nozzle.noise_speed = 360.
configs.append(config)
# ------------------------------------------------------------------
# Landing Configuration
# ------------------------------------------------------------------
config = SUAVE.Components.Configs.Config(base_config)
config.tag = 'landing'
config.wings['main_wing'].control_surfaces.flap.deflection = 30. * Units.deg
config.wings['main_wing'].control_surfaces.slat.deflection = 25. * Units.deg
#Noise input for the landing gear
config.landing_gear.gear_condition = 'down'
config.output_filename = 'Approach_'
config.networks['turbofan'].fan.rotation = 2030. #N1 speed
config.networks['turbofan'].fan_nozzle.noise_speed = 109.3
config.networks['turbofan'].core_nozzle.noise_speed = 92.
configs.append(config)
# ------------------------------------------------------------------
# Short Field Takeoff Configuration
# ------------------------------------------------------------------
config = SUAVE.Components.Configs.Config(base_config)
config.tag = 'short_field_takeoff'
config.wings['main_wing'].control_surfaces.flap.deflection = 20. * Units.deg
config.wings['main_wing'].control_surfaces.slat.deflection = 20. * Units.deg
configs.append(config)
return configs
| 0.584271 | 0.254871 |
import logging
from newrelic_plugin_agent.plugins import base
LOGGER = logging.getLogger(__name__)
class Riak(base.JSONStatsPlugin):
DEFAULT_PATH = '/stats'
GUID = 'com.meetme.newrelic_riak_agent'
def add_datapoints(self, stats):
"""Add all of the data points for a node
:param dict stats: all of the nodes
"""
self.add_gauge_value('Delays/Convergence', 'us',
stats.get('converge_delay_total', 0),
min_val=stats.get('converge_delay_min', 0),
max_val=stats.get('converge_delay_max', 0))
self.add_gauge_value('Delays/Rebalance', 'us',
stats.get('rebalance_delay_total', 0),
min_val=stats.get('rebalance_delay_min', 0),
max_val=stats.get('rebalance_delay_max', 0))
self.add_gauge_value('FSM/Object Size/Mean', 'bytes',
stats.get('node_get_fsm_objsize_mean', 0))
self.add_gauge_value('FSM/Object Size/Median', 'bytes',
stats.get('node_get_fsm_objsize_median', 0))
self.add_gauge_value('FSM/Object Size/90th Percentile', 'bytes',
stats.get('node_get_fsm_objsize_90', 0))
self.add_gauge_value('FSM/Object Size/95th Percentile', 'bytes',
stats.get('node_get_fsm_objsize_95', 0))
self.add_gauge_value('FSM/Object Size/100th Percentile', 'bytes',
stats.get('node_get_fsm_objsize_100', 0))
self.add_gauge_value('FSM/Siblings/Mean', 'siblings',
stats.get('node_get_fsm_siblings_mean', 0))
self.add_gauge_value('FSM/Siblings/Mean', 'siblings',
stats.get('node_get_fsm_siblings_media', 0))
self.add_gauge_value('FSM/Siblings/90th Percentile', 'siblings',
stats.get('node_get_fsm_siblings_90', 0))
self.add_gauge_value('FSM/Siblings/95th Percentile', 'siblings',
stats.get('node_get_fsm_siblings_95', 0))
self.add_gauge_value('FSM/Siblings/100th Percentile', 'siblings',
stats.get('node_get_fsm_siblings_100', 0))
self.add_gauge_value('FSM/Time/Get/Mean', 'us',
stats.get('node_get_fsm_time_mean', 0))
self.add_gauge_value('FSM/Time/Get/Median', 'us',
stats.get('node_get_fsm_time_media', 0))
self.add_gauge_value('FSM/Time/Get/90th Percentile', 'us',
stats.get('node_get_fsm_time_90', 0))
self.add_gauge_value('FSM/Time/Get/95th Percentile', 'us',
stats.get('node_get_fsm_time_95', 0))
self.add_gauge_value('FSM/Time/Get/100th Percentile', 'us',
stats.get('node_get_fsm_time_100', 0))
self.add_gauge_value('FSM/Time/Put/Mean', 'us',
stats.get('node_put_fsm_time_mean', 0))
self.add_gauge_value('FSM/Time/Put/Median', 'us',
stats.get('node_put_fsm_time_media', 0))
self.add_gauge_value('FSM/Time/Put/90th Percentile', 'us',
stats.get('node_put_fsm_time_90', 0))
self.add_gauge_value('FSM/Time/Put/95th Percentile', 'us',
stats.get('node_put_fsm_time_95', 0))
self.add_gauge_value('FSM/Time/Put/100th Percentile', 'us',
stats.get('node_put_fsm_time_100', 0))
self.add_derive_value('Failures/Pre-commit', 'failures',
stats.get('precommit_fail', 0))
self.add_derive_value('Failures/Post-commit', 'failures',
stats.get('postcommit_fail', 0))
self.add_derive_value('Gossip/Ignored', 'gossip',
stats.get('ignored_gossip_total', 0))
self.add_derive_value('Gossip/Received', 'gossip',
stats.get('gossip_received', 0))
self.add_derive_value('Handoff Timeouts', '',
stats.get('handoff_timeouts', 0))
self.add_gauge_value('Mappers/Executing', 'timeouts',
stats.get('executing_mappers', 0))
self.add_gauge_value('Memory/Allocated', 'bytes',
stats.get('mem_allocated', 0))
self.add_gauge_value('Memory/Total', 'bytes',
stats.get('mem_total', 0))
self.add_gauge_value('Memory/Erlang/Atom/Allocated', 'bytes',
stats.get('memory_atom', 0))
self.add_gauge_value('Memory/Erlang/Atom/Used', 'bytes',
stats.get('memory_atom_used', 0))
self.add_gauge_value('Memory/Erlang/Binary', 'bytes',
stats.get('memory_binary', 0))
self.add_gauge_value('Memory/Erlang/Code', 'bytes',
stats.get('memory_code', 0))
self.add_gauge_value('Memory/Erlang/ETS', 'bytes',
stats.get('memory_ets', 0))
self.add_gauge_value('Memory/Erlang/Processes/Allocated', 'bytes',
stats.get('memory_processes', 0))
self.add_gauge_value('Memory/Erlang/Processes/Used', 'bytes',
stats.get('memory_processes_used', 0))
self.add_gauge_value('Memory/Erlang/System', 'bytes',
stats.get('memory_system', 0))
self.add_gauge_value('Memory/Erlang/Total', 'bytes',
stats.get('memory_total', 0))
self.add_gauge_value('Nodes/Connected', 'nodes',
len(stats.get('connected_nodes', list())))
self.add_gauge_value('Pipeline/Active', 'pipelines',
stats.get('pipeline_active', 0))
self.add_derive_value('Pipeline/Created', 'pipelines',
stats.get('pipeline_create_count', 0))
self.add_derive_value('Pipeline/Creation Errors', 'pipelines',
stats.get('pipeline_create_error_count', 0))
self.add_gauge_value('Processes/OS', 'processes',
stats.get('cpu_nprocs', 0))
self.add_gauge_value('Processes/Erlang', 'processes',
stats.get('cpu_nprocs', 0))
self.add_gauge_value('Protocol Buffer Connections', 'active',
stats.get('pbc_active', 0))
self.add_derive_value('Protocol Buffer Connections', 'total',
stats.get('pbc_connects_total', 0))
self.add_derive_value('Read Repairs', 'reads',
stats.get('read_repairs_total', 0))
self.add_derive_value('Requests/Gets', 'requests',
stats.get('node_gets_total', 0))
self.add_derive_value('Requests/Puts', 'requests',
stats.get('node_puts_total', 0))
self.add_derive_value('Requests/Redirected', 'requests',
stats.get('coord_redirs_total', 0))
self.add_gauge_value('Ring/Members', 'members',
len(stats.get('ring_members', list())))
self.add_gauge_value('Ring/Partitions', 'partitions',
stats.get('ring_num_partitions', 0))
self.add_gauge_value('Ring/Size', 'members',
stats.get('ring_creation_size', 0))
self.add_derive_value('Ring/Reconciled', 'members',
stats.get('rings_reconciled_total', 0))
self.add_derive_value('VNodes/Gets', 'vnodes',
stats.get('vnode_gets_total', 0))
self.add_derive_value('VNodes/Puts', 'vnodes',
stats.get('vnode_puts_total', 0))
self.add_derive_value('VNodes/Index', 'deletes',
stats.get('vnode_index_deletes_total', 0))
self.add_derive_value('VNodes/Index', 'delete-postings',
stats.get('vnode_index_deletes_postings_total',
0))
self.add_derive_value('VNodes/Index', 'reads',
stats.get('vnode_index_reads_total', 0))
self.add_derive_value('VNodes/Index', 'writes',
stats.get('vnode_index_writes_total', 0))
self.add_derive_value('VNodes/Index', 'postings',
stats.get('vnode_writes_postings_total', 0))
|
newrelic_plugin_agent/plugins/riak.py
|
import logging
from newrelic_plugin_agent.plugins import base
LOGGER = logging.getLogger(__name__)
class Riak(base.JSONStatsPlugin):
DEFAULT_PATH = '/stats'
GUID = 'com.meetme.newrelic_riak_agent'
def add_datapoints(self, stats):
"""Add all of the data points for a node
:param dict stats: all of the nodes
"""
self.add_gauge_value('Delays/Convergence', 'us',
stats.get('converge_delay_total', 0),
min_val=stats.get('converge_delay_min', 0),
max_val=stats.get('converge_delay_max', 0))
self.add_gauge_value('Delays/Rebalance', 'us',
stats.get('rebalance_delay_total', 0),
min_val=stats.get('rebalance_delay_min', 0),
max_val=stats.get('rebalance_delay_max', 0))
self.add_gauge_value('FSM/Object Size/Mean', 'bytes',
stats.get('node_get_fsm_objsize_mean', 0))
self.add_gauge_value('FSM/Object Size/Median', 'bytes',
stats.get('node_get_fsm_objsize_median', 0))
self.add_gauge_value('FSM/Object Size/90th Percentile', 'bytes',
stats.get('node_get_fsm_objsize_90', 0))
self.add_gauge_value('FSM/Object Size/95th Percentile', 'bytes',
stats.get('node_get_fsm_objsize_95', 0))
self.add_gauge_value('FSM/Object Size/100th Percentile', 'bytes',
stats.get('node_get_fsm_objsize_100', 0))
self.add_gauge_value('FSM/Siblings/Mean', 'siblings',
stats.get('node_get_fsm_siblings_mean', 0))
self.add_gauge_value('FSM/Siblings/Mean', 'siblings',
stats.get('node_get_fsm_siblings_media', 0))
self.add_gauge_value('FSM/Siblings/90th Percentile', 'siblings',
stats.get('node_get_fsm_siblings_90', 0))
self.add_gauge_value('FSM/Siblings/95th Percentile', 'siblings',
stats.get('node_get_fsm_siblings_95', 0))
self.add_gauge_value('FSM/Siblings/100th Percentile', 'siblings',
stats.get('node_get_fsm_siblings_100', 0))
self.add_gauge_value('FSM/Time/Get/Mean', 'us',
stats.get('node_get_fsm_time_mean', 0))
self.add_gauge_value('FSM/Time/Get/Median', 'us',
stats.get('node_get_fsm_time_media', 0))
self.add_gauge_value('FSM/Time/Get/90th Percentile', 'us',
stats.get('node_get_fsm_time_90', 0))
self.add_gauge_value('FSM/Time/Get/95th Percentile', 'us',
stats.get('node_get_fsm_time_95', 0))
self.add_gauge_value('FSM/Time/Get/100th Percentile', 'us',
stats.get('node_get_fsm_time_100', 0))
self.add_gauge_value('FSM/Time/Put/Mean', 'us',
stats.get('node_put_fsm_time_mean', 0))
self.add_gauge_value('FSM/Time/Put/Median', 'us',
stats.get('node_put_fsm_time_media', 0))
self.add_gauge_value('FSM/Time/Put/90th Percentile', 'us',
stats.get('node_put_fsm_time_90', 0))
self.add_gauge_value('FSM/Time/Put/95th Percentile', 'us',
stats.get('node_put_fsm_time_95', 0))
self.add_gauge_value('FSM/Time/Put/100th Percentile', 'us',
stats.get('node_put_fsm_time_100', 0))
self.add_derive_value('Failures/Pre-commit', 'failures',
stats.get('precommit_fail', 0))
self.add_derive_value('Failures/Post-commit', 'failures',
stats.get('postcommit_fail', 0))
self.add_derive_value('Gossip/Ignored', 'gossip',
stats.get('ignored_gossip_total', 0))
self.add_derive_value('Gossip/Received', 'gossip',
stats.get('gossip_received', 0))
self.add_derive_value('Handoff Timeouts', '',
stats.get('handoff_timeouts', 0))
self.add_gauge_value('Mappers/Executing', 'timeouts',
stats.get('executing_mappers', 0))
self.add_gauge_value('Memory/Allocated', 'bytes',
stats.get('mem_allocated', 0))
self.add_gauge_value('Memory/Total', 'bytes',
stats.get('mem_total', 0))
self.add_gauge_value('Memory/Erlang/Atom/Allocated', 'bytes',
stats.get('memory_atom', 0))
self.add_gauge_value('Memory/Erlang/Atom/Used', 'bytes',
stats.get('memory_atom_used', 0))
self.add_gauge_value('Memory/Erlang/Binary', 'bytes',
stats.get('memory_binary', 0))
self.add_gauge_value('Memory/Erlang/Code', 'bytes',
stats.get('memory_code', 0))
self.add_gauge_value('Memory/Erlang/ETS', 'bytes',
stats.get('memory_ets', 0))
self.add_gauge_value('Memory/Erlang/Processes/Allocated', 'bytes',
stats.get('memory_processes', 0))
self.add_gauge_value('Memory/Erlang/Processes/Used', 'bytes',
stats.get('memory_processes_used', 0))
self.add_gauge_value('Memory/Erlang/System', 'bytes',
stats.get('memory_system', 0))
self.add_gauge_value('Memory/Erlang/Total', 'bytes',
stats.get('memory_total', 0))
self.add_gauge_value('Nodes/Connected', 'nodes',
len(stats.get('connected_nodes', list())))
self.add_gauge_value('Pipeline/Active', 'pipelines',
stats.get('pipeline_active', 0))
self.add_derive_value('Pipeline/Created', 'pipelines',
stats.get('pipeline_create_count', 0))
self.add_derive_value('Pipeline/Creation Errors', 'pipelines',
stats.get('pipeline_create_error_count', 0))
self.add_gauge_value('Processes/OS', 'processes',
stats.get('cpu_nprocs', 0))
self.add_gauge_value('Processes/Erlang', 'processes',
stats.get('cpu_nprocs', 0))
self.add_gauge_value('Protocol Buffer Connections', 'active',
stats.get('pbc_active', 0))
self.add_derive_value('Protocol Buffer Connections', 'total',
stats.get('pbc_connects_total', 0))
self.add_derive_value('Read Repairs', 'reads',
stats.get('read_repairs_total', 0))
self.add_derive_value('Requests/Gets', 'requests',
stats.get('node_gets_total', 0))
self.add_derive_value('Requests/Puts', 'requests',
stats.get('node_puts_total', 0))
self.add_derive_value('Requests/Redirected', 'requests',
stats.get('coord_redirs_total', 0))
self.add_gauge_value('Ring/Members', 'members',
len(stats.get('ring_members', list())))
self.add_gauge_value('Ring/Partitions', 'partitions',
stats.get('ring_num_partitions', 0))
self.add_gauge_value('Ring/Size', 'members',
stats.get('ring_creation_size', 0))
self.add_derive_value('Ring/Reconciled', 'members',
stats.get('rings_reconciled_total', 0))
self.add_derive_value('VNodes/Gets', 'vnodes',
stats.get('vnode_gets_total', 0))
self.add_derive_value('VNodes/Puts', 'vnodes',
stats.get('vnode_puts_total', 0))
self.add_derive_value('VNodes/Index', 'deletes',
stats.get('vnode_index_deletes_total', 0))
self.add_derive_value('VNodes/Index', 'delete-postings',
stats.get('vnode_index_deletes_postings_total',
0))
self.add_derive_value('VNodes/Index', 'reads',
stats.get('vnode_index_reads_total', 0))
self.add_derive_value('VNodes/Index', 'writes',
stats.get('vnode_index_writes_total', 0))
self.add_derive_value('VNodes/Index', 'postings',
stats.get('vnode_writes_postings_total', 0))
| 0.558327 | 0.104112 |
from django.db import models
from django.contrib.auth import get_user_model
from django.contrib.contenttypes.models import ContentType
from rest_framework.exceptions import NotAuthenticated
from baserow.core.user_files.models import UserFile
from .mixins import (
OrderableMixin,
PolymorphicContentTypeMixin,
CreatedAndUpdatedOnMixin,
TrashableModelMixin,
ParentGroupTrashableModelMixin,
)
from .exceptions import UserNotInGroup, UserInvalidGroupPermissionsError
__all__ = [
"Settings",
"Group",
"GroupUser",
"GroupInvitation",
"Application",
"TemplateCategory",
"Template",
"UserLogEntry",
"TrashEntry",
"UserFile",
]
User = get_user_model()
# The difference between an admin and member right now is that an admin has
# permissions to update, delete and manage the members of a group.
GROUP_USER_PERMISSION_ADMIN = "ADMIN"
GROUP_USER_PERMISSION_MEMBER = "MEMBER"
GROUP_USER_PERMISSION_CHOICES = (
(GROUP_USER_PERMISSION_ADMIN, "Admin"),
(GROUP_USER_PERMISSION_MEMBER, "Member"),
)
def get_default_application_content_type():
return ContentType.objects.get_for_model(Application)
class Settings(models.Model):
"""
The settings model represents the application wide settings that only admins can
change. This table can only contain a single row.
"""
allow_new_signups = models.BooleanField(
default=True,
help_text="Indicates whether new users can create a new account when signing "
"up.",
)
class Group(TrashableModelMixin, CreatedAndUpdatedOnMixin):
name = models.CharField(max_length=100)
users = models.ManyToManyField(User, through="GroupUser")
def application_set_including_trash(self):
"""
:return: The applications for this group including any trashed applications.
"""
return self.application_set(manager="objects_and_trash")
def has_template(self):
return self.template_set.all().exists()
def has_user(
self,
user,
permissions=None,
raise_error=False,
allow_if_template=False,
include_trash=False,
):
"""
Checks if the provided user belongs to the group.
:param user: The user that must be in the group.
:type user: User
:param permissions: One or multiple permissions can optionally be provided
and if so, the user must have one of those permissions.
:type permissions: None, str or list
:param raise_error: If True an error will be raised when the user does not
belong to the group or doesn't have the right permissions.
:type raise_error: bool
:param allow_if_template: If true and if the group is related to a template,
then True is always returned and no exception will be raised.
:type allow_if_template: bool
:param include_trash: If true then also checks if the group has been trashed
instead of raising a DoesNotExist exception.
:type include_trash: bool
:raises UserNotInGroup: If the user does not belong to the group.
:raises UserInvalidGroupPermissionsError: If the user does belong to the group,
but doesn't have the right permissions.
:return: Indicates if the user belongs to the group.
:rtype: bool
"""
if permissions and not isinstance(permissions, list):
permissions = [permissions]
if allow_if_template and self.has_template():
return True
elif not bool(user and user.is_authenticated):
if raise_error:
raise NotAuthenticated()
else:
return False
if include_trash:
manager = GroupUser.objects_and_trash
else:
manager = GroupUser.objects
queryset = manager.filter(user_id=user.id, group_id=self.id)
if raise_error:
try:
group_user = queryset.get()
except GroupUser.DoesNotExist:
raise UserNotInGroup(user, self)
if permissions is not None and group_user.permissions not in permissions:
raise UserInvalidGroupPermissionsError(user, self, permissions)
else:
if permissions is not None:
queryset = queryset.filter(permissions__in=permissions)
return queryset.exists()
def __str__(self):
return f"<Group id={self.id}, name={self.name}>"
class GroupUser(
ParentGroupTrashableModelMixin,
CreatedAndUpdatedOnMixin,
OrderableMixin,
models.Model,
):
user = models.ForeignKey(
User,
on_delete=models.CASCADE,
help_text="The user that has access to the group.",
)
group = models.ForeignKey(
Group,
on_delete=models.CASCADE,
help_text="The group that the user has access to.",
)
order = models.PositiveIntegerField(
help_text="Unique order that the group has for the user."
)
permissions = models.CharField(
default=GROUP_USER_PERMISSION_MEMBER,
max_length=32,
choices=GROUP_USER_PERMISSION_CHOICES,
help_text="The permissions that the user has within the group.",
)
class Meta:
unique_together = [["user", "group"]]
ordering = ("order",)
@classmethod
def get_last_order(cls, user):
queryset = cls.objects.filter(user=user)
return cls.get_highest_order_of_queryset(queryset) + 1
class GroupInvitation(
ParentGroupTrashableModelMixin, CreatedAndUpdatedOnMixin, models.Model
):
group = models.ForeignKey(
Group,
on_delete=models.CASCADE,
help_text="The group that the user will get access to once the invitation is "
"accepted.",
)
invited_by = models.ForeignKey(
User,
on_delete=models.CASCADE,
help_text="The user that created the invitation.",
)
email = models.EmailField(
db_index=True,
help_text="The email address of the user that the invitation is meant for. "
"Only a user with that email address can accept it.",
)
permissions = models.CharField(
default=GROUP_USER_PERMISSION_MEMBER,
max_length=32,
choices=GROUP_USER_PERMISSION_CHOICES,
help_text="The permissions that the user is going to get within the group "
"after accepting the invitation.",
)
message = models.TextField(
blank=True,
help_text="An optional message that the invitor can provide. This will be "
"visible to the receiver of the invitation.",
)
class Meta:
ordering = ("id",)
class Application(
TrashableModelMixin,
CreatedAndUpdatedOnMixin,
OrderableMixin,
PolymorphicContentTypeMixin,
models.Model,
):
group = models.ForeignKey(Group, on_delete=models.CASCADE)
name = models.CharField(max_length=50)
order = models.PositiveIntegerField()
content_type = models.ForeignKey(
ContentType,
verbose_name="content type",
related_name="applications",
on_delete=models.SET(get_default_application_content_type),
)
class Meta:
ordering = ("order",)
@classmethod
def get_last_order(cls, group):
queryset = Application.objects.filter(group=group)
return cls.get_highest_order_of_queryset(queryset) + 1
class TemplateCategory(models.Model):
name = models.CharField(max_length=32)
class Meta:
ordering = ("name",)
class Template(models.Model):
name = models.CharField(max_length=64)
slug = models.SlugField(
help_text="The template slug that is used to match the template with the JSON "
"file name."
)
icon = models.CharField(
max_length=32,
help_text="The font awesome class name that can be used for displaying "
"purposes.",
)
categories = models.ManyToManyField(TemplateCategory, related_name="templates")
group = models.ForeignKey(
Group,
on_delete=models.SET_NULL,
null=True,
help_text="The group containing the applications related to the template. The "
"read endpoints related to that group are publicly accessible for "
"preview purposes.",
)
export_hash = models.CharField(
max_length=64,
blank=True,
help_text="The export hash that is used to compare if the exported group "
"applications have changed when syncing the templates.",
)
keywords = models.TextField(
default="",
blank=True,
help_text="Keywords related to the template that can be used for search.",
)
class Meta:
ordering = ("name",)
class UserLogEntry(models.Model):
actor = models.ForeignKey(User, on_delete=models.CASCADE)
action = models.CharField(max_length=20, choices=(("SIGNED_IN", "Signed in"),))
timestamp = models.DateTimeField(auto_now_add=True)
class Meta:
get_latest_by = "timestamp"
ordering = ["-timestamp"]
class TrashEntry(models.Model):
"""
A TrashEntry is a record indicating that another model in Baserow has a trashed
row. When a user deletes certain things in Baserow they are not actually deleted
from the database, but instead marked as trashed. Trashed rows can be restored
or permanently deleted.
The other model must mixin the TrashableModelMixin and also have a corresponding
TrashableItemType registered specifying exactly how to delete and restore that
model.
"""
# The TrashableItemType.type of the item that is trashed.
trash_item_type = models.TextField()
# We need to also store the parent id as for some trashable items the
# trash_item_type and the trash_item_id is not unique as the items of that type
# could be spread over multiple tables with the same id.
parent_trash_item_id = models.PositiveIntegerField(null=True, blank=True)
# The actual id of the item that is trashed
trash_item_id = models.PositiveIntegerField()
# If the user who trashed something gets deleted we still wish to preserve this
# trash record as it is independent of if the user exists or not.
user_who_trashed = models.ForeignKey(
User, on_delete=models.SET_NULL, null=True, blank=True
)
# The group and application fields are used to group trash into separate "bins"
# which can be viewed and emptied independently of each other.
# The group the item that is trashed is found in, if the trashed item is the
# group itself then this should also be set to that trashed group.
group = models.ForeignKey(Group, on_delete=models.CASCADE)
# The application the item that is trashed is found in, if the trashed item is the
# application itself then this should also be set to that trashed application.
application = models.ForeignKey(
Application, on_delete=models.CASCADE, null=True, blank=True
)
# When set to true this trash entry will be picked up by a periodic job and the
# underlying item will be actually permanently deleted along with the entry.
should_be_permanently_deleted = models.BooleanField(default=False)
trashed_at = models.DateTimeField(auto_now_add=True)
# The name, name of the parent and any extra description are cached so lookups
# of trashed items are simple and do not require joining to many different tables
# to simply get these details.
name = models.TextField()
parent_name = models.TextField(null=True, blank=True)
extra_description = models.TextField(null=True, blank=True)
class Meta:
unique_together = ("trash_item_type", "parent_trash_item_id", "trash_item_id")
indexes = [
models.Index(
fields=["-trashed_at", "trash_item_type", "group", "application"]
)
]
|
backend/src/baserow/core/models.py
|
from django.db import models
from django.contrib.auth import get_user_model
from django.contrib.contenttypes.models import ContentType
from rest_framework.exceptions import NotAuthenticated
from baserow.core.user_files.models import UserFile
from .mixins import (
OrderableMixin,
PolymorphicContentTypeMixin,
CreatedAndUpdatedOnMixin,
TrashableModelMixin,
ParentGroupTrashableModelMixin,
)
from .exceptions import UserNotInGroup, UserInvalidGroupPermissionsError
__all__ = [
"Settings",
"Group",
"GroupUser",
"GroupInvitation",
"Application",
"TemplateCategory",
"Template",
"UserLogEntry",
"TrashEntry",
"UserFile",
]
User = get_user_model()
# The difference between an admin and member right now is that an admin has
# permissions to update, delete and manage the members of a group.
GROUP_USER_PERMISSION_ADMIN = "ADMIN"
GROUP_USER_PERMISSION_MEMBER = "MEMBER"
GROUP_USER_PERMISSION_CHOICES = (
(GROUP_USER_PERMISSION_ADMIN, "Admin"),
(GROUP_USER_PERMISSION_MEMBER, "Member"),
)
def get_default_application_content_type():
return ContentType.objects.get_for_model(Application)
class Settings(models.Model):
"""
The settings model represents the application wide settings that only admins can
change. This table can only contain a single row.
"""
allow_new_signups = models.BooleanField(
default=True,
help_text="Indicates whether new users can create a new account when signing "
"up.",
)
class Group(TrashableModelMixin, CreatedAndUpdatedOnMixin):
name = models.CharField(max_length=100)
users = models.ManyToManyField(User, through="GroupUser")
def application_set_including_trash(self):
"""
:return: The applications for this group including any trashed applications.
"""
return self.application_set(manager="objects_and_trash")
def has_template(self):
return self.template_set.all().exists()
def has_user(
self,
user,
permissions=None,
raise_error=False,
allow_if_template=False,
include_trash=False,
):
"""
Checks if the provided user belongs to the group.
:param user: The user that must be in the group.
:type user: User
:param permissions: One or multiple permissions can optionally be provided
and if so, the user must have one of those permissions.
:type permissions: None, str or list
:param raise_error: If True an error will be raised when the user does not
belong to the group or doesn't have the right permissions.
:type raise_error: bool
:param allow_if_template: If true and if the group is related to a template,
then True is always returned and no exception will be raised.
:type allow_if_template: bool
:param include_trash: If true then also checks if the group has been trashed
instead of raising a DoesNotExist exception.
:type include_trash: bool
:raises UserNotInGroup: If the user does not belong to the group.
:raises UserInvalidGroupPermissionsError: If the user does belong to the group,
but doesn't have the right permissions.
:return: Indicates if the user belongs to the group.
:rtype: bool
"""
if permissions and not isinstance(permissions, list):
permissions = [permissions]
if allow_if_template and self.has_template():
return True
elif not bool(user and user.is_authenticated):
if raise_error:
raise NotAuthenticated()
else:
return False
if include_trash:
manager = GroupUser.objects_and_trash
else:
manager = GroupUser.objects
queryset = manager.filter(user_id=user.id, group_id=self.id)
if raise_error:
try:
group_user = queryset.get()
except GroupUser.DoesNotExist:
raise UserNotInGroup(user, self)
if permissions is not None and group_user.permissions not in permissions:
raise UserInvalidGroupPermissionsError(user, self, permissions)
else:
if permissions is not None:
queryset = queryset.filter(permissions__in=permissions)
return queryset.exists()
def __str__(self):
return f"<Group id={self.id}, name={self.name}>"
class GroupUser(
ParentGroupTrashableModelMixin,
CreatedAndUpdatedOnMixin,
OrderableMixin,
models.Model,
):
user = models.ForeignKey(
User,
on_delete=models.CASCADE,
help_text="The user that has access to the group.",
)
group = models.ForeignKey(
Group,
on_delete=models.CASCADE,
help_text="The group that the user has access to.",
)
order = models.PositiveIntegerField(
help_text="Unique order that the group has for the user."
)
permissions = models.CharField(
default=GROUP_USER_PERMISSION_MEMBER,
max_length=32,
choices=GROUP_USER_PERMISSION_CHOICES,
help_text="The permissions that the user has within the group.",
)
class Meta:
unique_together = [["user", "group"]]
ordering = ("order",)
@classmethod
def get_last_order(cls, user):
queryset = cls.objects.filter(user=user)
return cls.get_highest_order_of_queryset(queryset) + 1
class GroupInvitation(
ParentGroupTrashableModelMixin, CreatedAndUpdatedOnMixin, models.Model
):
group = models.ForeignKey(
Group,
on_delete=models.CASCADE,
help_text="The group that the user will get access to once the invitation is "
"accepted.",
)
invited_by = models.ForeignKey(
User,
on_delete=models.CASCADE,
help_text="The user that created the invitation.",
)
email = models.EmailField(
db_index=True,
help_text="The email address of the user that the invitation is meant for. "
"Only a user with that email address can accept it.",
)
permissions = models.CharField(
default=GROUP_USER_PERMISSION_MEMBER,
max_length=32,
choices=GROUP_USER_PERMISSION_CHOICES,
help_text="The permissions that the user is going to get within the group "
"after accepting the invitation.",
)
message = models.TextField(
blank=True,
help_text="An optional message that the invitor can provide. This will be "
"visible to the receiver of the invitation.",
)
class Meta:
ordering = ("id",)
class Application(
TrashableModelMixin,
CreatedAndUpdatedOnMixin,
OrderableMixin,
PolymorphicContentTypeMixin,
models.Model,
):
group = models.ForeignKey(Group, on_delete=models.CASCADE)
name = models.CharField(max_length=50)
order = models.PositiveIntegerField()
content_type = models.ForeignKey(
ContentType,
verbose_name="content type",
related_name="applications",
on_delete=models.SET(get_default_application_content_type),
)
class Meta:
ordering = ("order",)
@classmethod
def get_last_order(cls, group):
queryset = Application.objects.filter(group=group)
return cls.get_highest_order_of_queryset(queryset) + 1
class TemplateCategory(models.Model):
name = models.CharField(max_length=32)
class Meta:
ordering = ("name",)
class Template(models.Model):
name = models.CharField(max_length=64)
slug = models.SlugField(
help_text="The template slug that is used to match the template with the JSON "
"file name."
)
icon = models.CharField(
max_length=32,
help_text="The font awesome class name that can be used for displaying "
"purposes.",
)
categories = models.ManyToManyField(TemplateCategory, related_name="templates")
group = models.ForeignKey(
Group,
on_delete=models.SET_NULL,
null=True,
help_text="The group containing the applications related to the template. The "
"read endpoints related to that group are publicly accessible for "
"preview purposes.",
)
export_hash = models.CharField(
max_length=64,
blank=True,
help_text="The export hash that is used to compare if the exported group "
"applications have changed when syncing the templates.",
)
keywords = models.TextField(
default="",
blank=True,
help_text="Keywords related to the template that can be used for search.",
)
class Meta:
ordering = ("name",)
class UserLogEntry(models.Model):
actor = models.ForeignKey(User, on_delete=models.CASCADE)
action = models.CharField(max_length=20, choices=(("SIGNED_IN", "Signed in"),))
timestamp = models.DateTimeField(auto_now_add=True)
class Meta:
get_latest_by = "timestamp"
ordering = ["-timestamp"]
class TrashEntry(models.Model):
"""
A TrashEntry is a record indicating that another model in Baserow has a trashed
row. When a user deletes certain things in Baserow they are not actually deleted
from the database, but instead marked as trashed. Trashed rows can be restored
or permanently deleted.
The other model must mixin the TrashableModelMixin and also have a corresponding
TrashableItemType registered specifying exactly how to delete and restore that
model.
"""
# The TrashableItemType.type of the item that is trashed.
trash_item_type = models.TextField()
# We need to also store the parent id as for some trashable items the
# trash_item_type and the trash_item_id is not unique as the items of that type
# could be spread over multiple tables with the same id.
parent_trash_item_id = models.PositiveIntegerField(null=True, blank=True)
# The actual id of the item that is trashed
trash_item_id = models.PositiveIntegerField()
# If the user who trashed something gets deleted we still wish to preserve this
# trash record as it is independent of if the user exists or not.
user_who_trashed = models.ForeignKey(
User, on_delete=models.SET_NULL, null=True, blank=True
)
# The group and application fields are used to group trash into separate "bins"
# which can be viewed and emptied independently of each other.
# The group the item that is trashed is found in, if the trashed item is the
# group itself then this should also be set to that trashed group.
group = models.ForeignKey(Group, on_delete=models.CASCADE)
# The application the item that is trashed is found in, if the trashed item is the
# application itself then this should also be set to that trashed application.
application = models.ForeignKey(
Application, on_delete=models.CASCADE, null=True, blank=True
)
# When set to true this trash entry will be picked up by a periodic job and the
# underlying item will be actually permanently deleted along with the entry.
should_be_permanently_deleted = models.BooleanField(default=False)
trashed_at = models.DateTimeField(auto_now_add=True)
# The name, name of the parent and any extra description are cached so lookups
# of trashed items are simple and do not require joining to many different tables
# to simply get these details.
name = models.TextField()
parent_name = models.TextField(null=True, blank=True)
extra_description = models.TextField(null=True, blank=True)
class Meta:
unique_together = ("trash_item_type", "parent_trash_item_id", "trash_item_id")
indexes = [
models.Index(
fields=["-trashed_at", "trash_item_type", "group", "application"]
)
]
| 0.635788 | 0.158956 |
import decimal
from blingalytics import sources
DIVISION_BY_ZERO = (decimal.InvalidOperation, ZeroDivisionError)
class DerivedSource(sources.Source):
def post_process(self, row, clean_inputs):
# Compute derived values for all columns on this row
for name, column in self._columns:
row[name] = column.get_derived_value(row)
return row
class DerivedColumn(sources.Column):
source = DerivedSource
class Value(DerivedColumn):
"""
A column that derives its value from other columns in the row. In
addition to the standard column options, this takes one positional
argument: the function used to derive the value.
The function you provide will be passed one argument: the row, as pulled
from other data sources but before the ``post_process`` step. The row is
a dict with the column names as keys. Your function should return just the
derived value for this column in the row. The function is often provided
as a lambda, but more complex functions can be defined wherever you like.
Continuing the example from above::
derived.Value(lambda row: row['net'] / row['gross'] * Decimal('100.00'))
By default, the footer for this column performs the same operation over
the appropriate footer columns. This is generally the footer you want for
a derived column, as opposed to simply summing or averaging the values in
the column. If one of the columns involved in the derive function does not
return a footer, this will return a total.
"""
def __init__(self, derive_func, **kwargs):
self.derive_func = derive_func
super(Value, self).__init__(**kwargs)
def get_derived_value(self, row):
try:
return self.derive_func(row)
except TypeError:
# Got None for a value, so return None
return None
except DIVISION_BY_ZERO:
return decimal.Decimal('0.00')
def finalize_footer(self, total, footer):
# The footer is the derive function run over the other footer columns
if self.footer:
try:
return self.derive_func(footer)
except TypeError:
# Got None for a value, so return None
return total
except DIVISION_BY_ZERO:
return decimal.Decimal('0.00')
class Aggregate(DerivedColumn):
"""
A column that outputs a running total of another column.
Example usage::
derived.Aggregate(lambda row: row['subs'], format=formats.Integer)
This column does not compute or output a footer.
"""
def __init__(self, derive_func, **kwargs):
self.total = 0
self.derive_func = derive_func
super(Aggregate, self).__init__(**kwargs)
# Never return a footer
self.footer = False
def get_derived_value(self, row):
result = self.derive_func(row)
if result:
self.total += result
return self.total
def finalize(self):
self.total = 0
|
blingalytics/sources/derived.py
|
import decimal
from blingalytics import sources
DIVISION_BY_ZERO = (decimal.InvalidOperation, ZeroDivisionError)
class DerivedSource(sources.Source):
def post_process(self, row, clean_inputs):
# Compute derived values for all columns on this row
for name, column in self._columns:
row[name] = column.get_derived_value(row)
return row
class DerivedColumn(sources.Column):
source = DerivedSource
class Value(DerivedColumn):
"""
A column that derives its value from other columns in the row. In
addition to the standard column options, this takes one positional
argument: the function used to derive the value.
The function you provide will be passed one argument: the row, as pulled
from other data sources but before the ``post_process`` step. The row is
a dict with the column names as keys. Your function should return just the
derived value for this column in the row. The function is often provided
as a lambda, but more complex functions can be defined wherever you like.
Continuing the example from above::
derived.Value(lambda row: row['net'] / row['gross'] * Decimal('100.00'))
By default, the footer for this column performs the same operation over
the appropriate footer columns. This is generally the footer you want for
a derived column, as opposed to simply summing or averaging the values in
the column. If one of the columns involved in the derive function does not
return a footer, this will return a total.
"""
def __init__(self, derive_func, **kwargs):
self.derive_func = derive_func
super(Value, self).__init__(**kwargs)
def get_derived_value(self, row):
try:
return self.derive_func(row)
except TypeError:
# Got None for a value, so return None
return None
except DIVISION_BY_ZERO:
return decimal.Decimal('0.00')
def finalize_footer(self, total, footer):
# The footer is the derive function run over the other footer columns
if self.footer:
try:
return self.derive_func(footer)
except TypeError:
# Got None for a value, so return None
return total
except DIVISION_BY_ZERO:
return decimal.Decimal('0.00')
class Aggregate(DerivedColumn):
"""
A column that outputs a running total of another column.
Example usage::
derived.Aggregate(lambda row: row['subs'], format=formats.Integer)
This column does not compute or output a footer.
"""
def __init__(self, derive_func, **kwargs):
self.total = 0
self.derive_func = derive_func
super(Aggregate, self).__init__(**kwargs)
# Never return a footer
self.footer = False
def get_derived_value(self, row):
result = self.derive_func(row)
if result:
self.total += result
return self.total
def finalize(self):
self.total = 0
| 0.75401 | 0.46308 |
from scikits.audiolab import wavread
from scikits.audiolab import wavwrite
import operator
def weighting_vector(size, sigma, mu, fs):
weights = []
mu = mu/(fs/2) * size
weights = []
for t in range(size):
a = 1/(sigma * np.sqrt(2 * math.pi *sigma))
val = a * math.exp( - math.pow(t - mu, 2) / (2 * math.pow(sigma, 2)))
weights.append(val)
return weights
def sibilant_detector(filename):
"""
The aim of this algorithm is to detect where are the parts in filename where the energy is maximal.
This algorithm works as follows:
1- First compute the spectrogram
2- Then compute a gaussian curve centered in the frequency researched. Usually for sibilants it's around 6000 Hz
3- Multiply the spectrum and the gaussian in order to weight the spectrum
4- Mean all the resultant signal and normalize
5- The peaks in the resulting signal are the parts in time where the energy in the researched area is the most important.
"""
sound_data, fs, enc = wavread(filename)
#Gaussian coefs
sigma = 5
mu = 10000 # mean frequency
NFFT=512
#Spectre
Pxx, freqs, bins, im = specgram(sound_data, NFFT=NFFT, noverlap=128 , Fs=fs)
show()
#Siflantes detector
nb_of_windows = Pxx.shape[1]
nb_of_fft_coefs = Pxx.shape[0]
#Compute the gaussian vector and plot
weights = weighting_vector(nb_of_fft_coefs, sigma, mu, fs)
f_wweights = np.linspace(0, fs/2, len(weights), endpoint=True)
plot(f_wweights, weights)
show()
fft_coeficients = np.zeros(nb_of_fft_coefs)
sibilant_desc = []
weighted_ffts = []
#Multiply the weights and the spectrum and show the multiplication
for i in range(nb_of_windows):
weighted_fft = Pxx[:, i] * weights
if len(weighted_ffts) == 0:
weighted_ffts = weighted_fft
else:
weighted_ffts = np.c_[weighted_ffts, weighted_fft]
sibilant_desc.append(sum(weighted_fft))
imshow(weighted_ffts, interpolation='nearest', aspect='auto')
show()
#Now mean the matrix to have only one descriptor
sibilant_desc = [float(i)/max(sibilant_desc) for i in sibilant_desc]
plot(sibilant_desc)
show()
#export audio
max_index, max_value = max(enumerate(sibilant_desc), key=operator.itemgetter(1))
wavwrite(sound_data[(max_index-5)*NFFT:(max_index+5)*NFFT], 'test.wav', fs=44100)
|
scripts/sibilant_detector.py
|
from scikits.audiolab import wavread
from scikits.audiolab import wavwrite
import operator
def weighting_vector(size, sigma, mu, fs):
weights = []
mu = mu/(fs/2) * size
weights = []
for t in range(size):
a = 1/(sigma * np.sqrt(2 * math.pi *sigma))
val = a * math.exp( - math.pow(t - mu, 2) / (2 * math.pow(sigma, 2)))
weights.append(val)
return weights
def sibilant_detector(filename):
"""
The aim of this algorithm is to detect where are the parts in filename where the energy is maximal.
This algorithm works as follows:
1- First compute the spectrogram
2- Then compute a gaussian curve centered in the frequency researched. Usually for sibilants it's around 6000 Hz
3- Multiply the spectrum and the gaussian in order to weight the spectrum
4- Mean all the resultant signal and normalize
5- The peaks in the resulting signal are the parts in time where the energy in the researched area is the most important.
"""
sound_data, fs, enc = wavread(filename)
#Gaussian coefs
sigma = 5
mu = 10000 # mean frequency
NFFT=512
#Spectre
Pxx, freqs, bins, im = specgram(sound_data, NFFT=NFFT, noverlap=128 , Fs=fs)
show()
#Siflantes detector
nb_of_windows = Pxx.shape[1]
nb_of_fft_coefs = Pxx.shape[0]
#Compute the gaussian vector and plot
weights = weighting_vector(nb_of_fft_coefs, sigma, mu, fs)
f_wweights = np.linspace(0, fs/2, len(weights), endpoint=True)
plot(f_wweights, weights)
show()
fft_coeficients = np.zeros(nb_of_fft_coefs)
sibilant_desc = []
weighted_ffts = []
#Multiply the weights and the spectrum and show the multiplication
for i in range(nb_of_windows):
weighted_fft = Pxx[:, i] * weights
if len(weighted_ffts) == 0:
weighted_ffts = weighted_fft
else:
weighted_ffts = np.c_[weighted_ffts, weighted_fft]
sibilant_desc.append(sum(weighted_fft))
imshow(weighted_ffts, interpolation='nearest', aspect='auto')
show()
#Now mean the matrix to have only one descriptor
sibilant_desc = [float(i)/max(sibilant_desc) for i in sibilant_desc]
plot(sibilant_desc)
show()
#export audio
max_index, max_value = max(enumerate(sibilant_desc), key=operator.itemgetter(1))
wavwrite(sound_data[(max_index-5)*NFFT:(max_index+5)*NFFT], 'test.wav', fs=44100)
| 0.587352 | 0.484868 |
import numpy as np
from sklearn.svm import SVC
from sklearn.naive_bayes import GaussianNB
from sklearn.linear_model import LogisticRegression
from sklearn.tree import DecisionTreeClassifier
class StackingEnsemble:
def __init__(self, layers=None, final=None):
if layers == None:
self.layers = [[SVC(), LogisticRegression()], [DecisionTreeClassifier()]]
else:
self.layers = layers
if final == None:
self.final = GaussianNB()
else:
self.final = final
self.network = []
def network_constructor(self):
"""
Creates a network containing layers of estimators.
"""
network = self.network
layers = self.layers
final = self.final
network.append(layers)
network.append(final)
return network
def forward_pass(self, X, y):
"""
Do a forward pass of the stacked network
"""
network = self.network_constructor()
output = y
input_current_layer = []
input_next_layer = []
for index, layer in enumerate(network):
if index == 0:
input_current_layer = X
for estimator in layer:
estimator.fit(input_current_layer, output)
input_next_layer.append(estimator.predict(input_current_layer))
else:
input_current_layer = input_next_layer
input_next_layer = []
for estimator in layer:
estimator.fit(input_current_layer, output)
input_next_layer.append(estimator.predict(input_current_layer))
return network
def fit(self, X, y):
input_length = len(X)
target_lenght = len(y)
if input_length == target_lenght:
return self.forward_pass(X, y)
else:
raise ValueError("X and y must have the same length")
def predict(self, X):
"""
Do a prediction for a test data
"""
network = self.network
prediction_current_layer = np.array([])
input_current_layer = []
for index, layer in enumerate(network):
if index == 0:
input_current_layer = X
for estimator in layer:
prediction_current_layer = np.concatenate(
(
prediction_current_layer,
estimator.predict(input_current_layer),
)
)
prediction_current_layer = np.reshape(prediction_current_layer, (1, 2))
else:
input_current_layer = prediction_current_layer
prediction_current_layer = np.array([])
for estimator in layer:
prediction_current_layer = np.concatenate(
(
prediction_current_layer,
estimator.predict(input_current_layer),
)
)
return prediction_current_layer
if __name__ == "__main__":
X_train = [[0, 0], [1, 1]]
y_train = [0, 1]
X_test = [[2.0, 2.0]]
y_test = [1]
ensemble = StackingEnsemble([SVC(), DecisionTreeClassifier()], [SVC()])
ensemble.fit(X_train, y_train)
y_pred = ensemble.predict(X_test)
|
autogoal/experimental/stacking.py
|
import numpy as np
from sklearn.svm import SVC
from sklearn.naive_bayes import GaussianNB
from sklearn.linear_model import LogisticRegression
from sklearn.tree import DecisionTreeClassifier
class StackingEnsemble:
def __init__(self, layers=None, final=None):
if layers == None:
self.layers = [[SVC(), LogisticRegression()], [DecisionTreeClassifier()]]
else:
self.layers = layers
if final == None:
self.final = GaussianNB()
else:
self.final = final
self.network = []
def network_constructor(self):
"""
Creates a network containing layers of estimators.
"""
network = self.network
layers = self.layers
final = self.final
network.append(layers)
network.append(final)
return network
def forward_pass(self, X, y):
"""
Do a forward pass of the stacked network
"""
network = self.network_constructor()
output = y
input_current_layer = []
input_next_layer = []
for index, layer in enumerate(network):
if index == 0:
input_current_layer = X
for estimator in layer:
estimator.fit(input_current_layer, output)
input_next_layer.append(estimator.predict(input_current_layer))
else:
input_current_layer = input_next_layer
input_next_layer = []
for estimator in layer:
estimator.fit(input_current_layer, output)
input_next_layer.append(estimator.predict(input_current_layer))
return network
def fit(self, X, y):
input_length = len(X)
target_lenght = len(y)
if input_length == target_lenght:
return self.forward_pass(X, y)
else:
raise ValueError("X and y must have the same length")
def predict(self, X):
"""
Do a prediction for a test data
"""
network = self.network
prediction_current_layer = np.array([])
input_current_layer = []
for index, layer in enumerate(network):
if index == 0:
input_current_layer = X
for estimator in layer:
prediction_current_layer = np.concatenate(
(
prediction_current_layer,
estimator.predict(input_current_layer),
)
)
prediction_current_layer = np.reshape(prediction_current_layer, (1, 2))
else:
input_current_layer = prediction_current_layer
prediction_current_layer = np.array([])
for estimator in layer:
prediction_current_layer = np.concatenate(
(
prediction_current_layer,
estimator.predict(input_current_layer),
)
)
return prediction_current_layer
if __name__ == "__main__":
X_train = [[0, 0], [1, 1]]
y_train = [0, 1]
X_test = [[2.0, 2.0]]
y_test = [1]
ensemble = StackingEnsemble([SVC(), DecisionTreeClassifier()], [SVC()])
ensemble.fit(X_train, y_train)
y_pred = ensemble.predict(X_test)
| 0.829699 | 0.393705 |
from utlis.rank import setrank,isrank,remrank,remsudos,setsudo, GPranks
from utlis.send import Name,Glang
from utlis.tg import Bot
from config import *
from pyrogram import ReplyKeyboardMarkup, InlineKeyboardMarkup, InlineKeyboardButton
import threading, requests, time, random, re,json
import importlib
def delete(client, message,redis):
type = message.chat.type
userID = message.from_user.id
userFN = message.from_user.first_name
chatID = message.chat.id
rank = isrank(redis,userID,chatID)
if message.text :
text = message.text
elif message.caption:
text = message.caption
else:
text = 0
c = importlib.import_module("lang.arcmd")
r = importlib.import_module("lang.arreply")
if redis.sismember("{}Nbot:restricteds".format(BOT_ID),userID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if redis.sismember("{}Nbot:bans".format(BOT_ID),userID):
Bot("kickChatMember",{"chat_id":chatID,"user_id":userID})
if redis.sismember(f"{BOT_ID}Nbot:{chatID}:muteusers",userID) and (rank is False or rank is 0):
message.delete()
if text :
if text == c.kickme and not redis.sismember("{}Nbot:kickme".format(BOT_ID),chatID):
GetGprank = GPranks(userID,chatID)
if GetGprank == "member":
reply_markup=InlineKeyboardMarkup([[InlineKeyboardButton(r.yes,callback_data=json.dumps(["kickme-yes","",userID])),InlineKeyboardButton(r.no,callback_data=json.dumps(["kickme-no","",userID])),]])
Bot("sendMessage",{"chat_id":chatID,"text":r.kickme,"reply_to_message_id":message.message_id,"parse_mode":"html","reply_markup":reply_markup})
if re.findall("[Hh][Tt][Tt][Pp][Ss]:/|[Hh][Tt][Tt][Pp]://|.[Ii][Rr]|.[Cc][Oo][Mm]|.[Oo][Rr][Gg]|.[Ii][Nn][Ff][Oo]|[Ww][Ww][Ww]|.[Tt][Kk]|.[Mm][Ee]", text):
if redis.sismember("{}Nbot:Llink".format(BOT_ID),chatID): #1
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Llink:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if re.findall('@', text):
if redis.sismember("{}Nbot:Lusername".format(BOT_ID),chatID):#2
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Lusername:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if message.forward_date:
if redis.sismember("{}Nbot:Lfwd".format(BOT_ID),chatID):#18
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Lfwd:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if re.findall('#', text):
if redis.sismember("{}Nbot:Ltag".format(BOT_ID),chatID):#3
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Ltag:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if re.findall("[a-zA-Z0-9$@$!%*?&#^-_. +]+", text):
if redis.sismember("{}Nbot:Lenglish".format(BOT_ID),chatID):#4
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Lenglish:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if re.findall("[ا-ي٠-٩]", text):
if redis.sismember("{}Nbot:Larabic".format(BOT_ID),chatID):#5
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Larabic:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
Nlongtext = (redis.get("{}Nbot:Nlongtext".format(BOT_ID)) or 250)
if len(text) >= Nlongtext:
if redis.sismember("{}Nbot:Llongtext".format(BOT_ID),chatID):#2
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Llongtext:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
li = redis.smembers("{}Nbot:{}:blockTEXTs".format(BOT_ID,chatID))
for word in li:
if re.findall(word, text):
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
break
# text ^
if message.entities :
if redis.sismember("{}Nbot:Lmarkdown".format(BOT_ID),chatID):#6
for entitie in message.entities:
if entitie.type is "text_link":
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Lmarkdown:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
break
if message.via_bot:
if redis.sismember("{}Nbot:Linline".format(BOT_ID),chatID):#7
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Linline:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if message.reply_markup:
if redis.sismember("{}Nbot:Linline".format(BOT_ID),chatID):
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Linline:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if message.sticker:
if redis.sismember("{}Nbot:Lsticker".format(BOT_ID),chatID):#8
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Lsticker:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
elif redis.sismember("{}Nbot:{}:blockSTICKERs".format(BOT_ID,chatID),message.sticker.file_id):
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if message.animation:
if redis.sismember("{}Nbot:Lgifs".format(BOT_ID),chatID):#9
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Lgifs:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
elif redis.sismember("{}Nbot:{}:blockanimations".format(BOT_ID,chatID),message.animation.file_id):
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if message.audio:
if redis.sismember("{}Nbot:Lmusic".format(BOT_ID),chatID):#10
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Lmusic:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if message.voice:
if redis.sismember("{}Nbot:Lvoice".format(BOT_ID),chatID):#11
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Lvoice:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if message.video:
if redis.sismember("{}Nbot:Lvideo".format(BOT_ID),chatID):#12
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Lvideo:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if message.document:
if redis.sismember("{}Nbot:Lfiles".format(BOT_ID),chatID):#13
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Lfiles:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if message.photo:
if redis.sismember("{}Nbot:Lphoto".format(BOT_ID),chatID):#14
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Lphoto:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
elif redis.sismember("{}Nbot:{}:blockphotos".format(BOT_ID,chatID),message.photo.file_id):
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if message.contact:
if redis.sismember("{}Nbot:Lcontact".format(BOT_ID),chatID):#15
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Lcontact:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if message.new_chat_members:
if message.new_chat_members[0].is_bot:
if redis.sismember("{}Nbot:Lbots".format(BOT_ID),chatID):#16
first_name = message.new_chat_members[0].first_name
username = message.new_chat_members[0].username
Bot("kickChatMember",{"chat_id":chatID,"user_id":message.new_chat_members[0].id})
Bot("sendMessage",{"chat_id":chatID,"text":r.kickbotadd.format(username,first_name),"reply_to_message_id":message.message_id,"parse_mode":"html"})
if redis.sismember("{}Nbot:Ljoin".format(BOT_ID),chatID):#17
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if message.forward_date:
if redis.sismember("{}Nbot:Lfwd".format(BOT_ID),chatID):#18
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Lfwd:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if message.video_note:
if redis.sismember("{}Nbot:Lnote".format(BOT_ID),chatID):#19
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Lnote:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if redis.sismember("{}Nbot:Lflood".format(BOT_ID),chatID) :#20
Max_msg = int((redis.hget("{}Nbot:max_msg".format(BOT_ID),chatID) or 10))
Time_ck = int((redis.hget("{}Nbot:time_ck".format(BOT_ID),chatID) or 3))
User_msg = int((redis.get("{}Nbot:{}:{}:flood".format(BOT_ID,chatID,userID)) or 1))
if User_msg > Max_msg:
GetGprank = GPranks(userID,chatID)
if GetGprank == "member":
if redis.hexists("{}Nbot:floodset".format(BOT_ID),chatID):
get = redis.hget("{}Nbot:floodset".format(BOT_ID),chatID)
else:
get = "res"
if get == "res":
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if get == "ban":
Bot("kickChatMember",{"chat_id":chatID,"user_id":userID})
redis.sadd("{}Nbot:{}:restricteds".format(BOT_ID,chatID),userID)
BY = "<a href=\"tg://user?id={}\">{}</a>".format(userID,Name(userFN))
Bot("sendMessage",{"chat_id":chatID,"text":r.TKflood.format(BY,Max_msg,Time_ck),"parse_mode":"html"})
redis.setex("{}Nbot:{}:{}:flood".format(BOT_ID,chatID,userID), Time_ck, User_msg+1)
|
handlers/delete.py
|
from utlis.rank import setrank,isrank,remrank,remsudos,setsudo, GPranks
from utlis.send import Name,Glang
from utlis.tg import Bot
from config import *
from pyrogram import ReplyKeyboardMarkup, InlineKeyboardMarkup, InlineKeyboardButton
import threading, requests, time, random, re,json
import importlib
def delete(client, message,redis):
type = message.chat.type
userID = message.from_user.id
userFN = message.from_user.first_name
chatID = message.chat.id
rank = isrank(redis,userID,chatID)
if message.text :
text = message.text
elif message.caption:
text = message.caption
else:
text = 0
c = importlib.import_module("lang.arcmd")
r = importlib.import_module("lang.arreply")
if redis.sismember("{}Nbot:restricteds".format(BOT_ID),userID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if redis.sismember("{}Nbot:bans".format(BOT_ID),userID):
Bot("kickChatMember",{"chat_id":chatID,"user_id":userID})
if redis.sismember(f"{BOT_ID}Nbot:{chatID}:muteusers",userID) and (rank is False or rank is 0):
message.delete()
if text :
if text == c.kickme and not redis.sismember("{}Nbot:kickme".format(BOT_ID),chatID):
GetGprank = GPranks(userID,chatID)
if GetGprank == "member":
reply_markup=InlineKeyboardMarkup([[InlineKeyboardButton(r.yes,callback_data=json.dumps(["kickme-yes","",userID])),InlineKeyboardButton(r.no,callback_data=json.dumps(["kickme-no","",userID])),]])
Bot("sendMessage",{"chat_id":chatID,"text":r.kickme,"reply_to_message_id":message.message_id,"parse_mode":"html","reply_markup":reply_markup})
if re.findall("[Hh][Tt][Tt][Pp][Ss]:/|[Hh][Tt][Tt][Pp]://|.[Ii][Rr]|.[Cc][Oo][Mm]|.[Oo][Rr][Gg]|.[Ii][Nn][Ff][Oo]|[Ww][Ww][Ww]|.[Tt][Kk]|.[Mm][Ee]", text):
if redis.sismember("{}Nbot:Llink".format(BOT_ID),chatID): #1
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Llink:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if re.findall('@', text):
if redis.sismember("{}Nbot:Lusername".format(BOT_ID),chatID):#2
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Lusername:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if message.forward_date:
if redis.sismember("{}Nbot:Lfwd".format(BOT_ID),chatID):#18
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Lfwd:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if re.findall('#', text):
if redis.sismember("{}Nbot:Ltag".format(BOT_ID),chatID):#3
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Ltag:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if re.findall("[a-zA-Z0-9$@$!%*?&#^-_. +]+", text):
if redis.sismember("{}Nbot:Lenglish".format(BOT_ID),chatID):#4
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Lenglish:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if re.findall("[ا-ي٠-٩]", text):
if redis.sismember("{}Nbot:Larabic".format(BOT_ID),chatID):#5
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Larabic:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
Nlongtext = (redis.get("{}Nbot:Nlongtext".format(BOT_ID)) or 250)
if len(text) >= Nlongtext:
if redis.sismember("{}Nbot:Llongtext".format(BOT_ID),chatID):#2
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Llongtext:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
li = redis.smembers("{}Nbot:{}:blockTEXTs".format(BOT_ID,chatID))
for word in li:
if re.findall(word, text):
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
break
# text ^
if message.entities :
if redis.sismember("{}Nbot:Lmarkdown".format(BOT_ID),chatID):#6
for entitie in message.entities:
if entitie.type is "text_link":
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Lmarkdown:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
break
if message.via_bot:
if redis.sismember("{}Nbot:Linline".format(BOT_ID),chatID):#7
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Linline:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if message.reply_markup:
if redis.sismember("{}Nbot:Linline".format(BOT_ID),chatID):
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Linline:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if message.sticker:
if redis.sismember("{}Nbot:Lsticker".format(BOT_ID),chatID):#8
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Lsticker:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
elif redis.sismember("{}Nbot:{}:blockSTICKERs".format(BOT_ID,chatID),message.sticker.file_id):
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if message.animation:
if redis.sismember("{}Nbot:Lgifs".format(BOT_ID),chatID):#9
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Lgifs:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
elif redis.sismember("{}Nbot:{}:blockanimations".format(BOT_ID,chatID),message.animation.file_id):
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if message.audio:
if redis.sismember("{}Nbot:Lmusic".format(BOT_ID),chatID):#10
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Lmusic:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if message.voice:
if redis.sismember("{}Nbot:Lvoice".format(BOT_ID),chatID):#11
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Lvoice:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if message.video:
if redis.sismember("{}Nbot:Lvideo".format(BOT_ID),chatID):#12
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Lvideo:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if message.document:
if redis.sismember("{}Nbot:Lfiles".format(BOT_ID),chatID):#13
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Lfiles:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if message.photo:
if redis.sismember("{}Nbot:Lphoto".format(BOT_ID),chatID):#14
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Lphoto:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
elif redis.sismember("{}Nbot:{}:blockphotos".format(BOT_ID,chatID),message.photo.file_id):
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if message.contact:
if redis.sismember("{}Nbot:Lcontact".format(BOT_ID),chatID):#15
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Lcontact:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if message.new_chat_members:
if message.new_chat_members[0].is_bot:
if redis.sismember("{}Nbot:Lbots".format(BOT_ID),chatID):#16
first_name = message.new_chat_members[0].first_name
username = message.new_chat_members[0].username
Bot("kickChatMember",{"chat_id":chatID,"user_id":message.new_chat_members[0].id})
Bot("sendMessage",{"chat_id":chatID,"text":r.kickbotadd.format(username,first_name),"reply_to_message_id":message.message_id,"parse_mode":"html"})
if redis.sismember("{}Nbot:Ljoin".format(BOT_ID),chatID):#17
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if message.forward_date:
if redis.sismember("{}Nbot:Lfwd".format(BOT_ID),chatID):#18
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Lfwd:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if message.video_note:
if redis.sismember("{}Nbot:Lnote".format(BOT_ID),chatID):#19
Bot("deleteMessage",{"chat_id":chatID,"message_id":message.message_id})
if redis.sismember("{}Nbot:Lnote:res".format(BOT_ID),chatID):
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if redis.sismember("{}Nbot:Lflood".format(BOT_ID),chatID) :#20
Max_msg = int((redis.hget("{}Nbot:max_msg".format(BOT_ID),chatID) or 10))
Time_ck = int((redis.hget("{}Nbot:time_ck".format(BOT_ID),chatID) or 3))
User_msg = int((redis.get("{}Nbot:{}:{}:flood".format(BOT_ID,chatID,userID)) or 1))
if User_msg > Max_msg:
GetGprank = GPranks(userID,chatID)
if GetGprank == "member":
if redis.hexists("{}Nbot:floodset".format(BOT_ID),chatID):
get = redis.hget("{}Nbot:floodset".format(BOT_ID),chatID)
else:
get = "res"
if get == "res":
Bot("restrictChatMember",{"chat_id": chatID,"user_id": userId,"can_send_messages": 0,"can_send_media_messages": 0,"can_send_other_messages": 0,
"can_send_polls": 0,"can_change_info": 0,"can_add_web_page_previews": 0,"can_pin_messages": 0,"can_invite_users": 0,})
if get == "ban":
Bot("kickChatMember",{"chat_id":chatID,"user_id":userID})
redis.sadd("{}Nbot:{}:restricteds".format(BOT_ID,chatID),userID)
BY = "<a href=\"tg://user?id={}\">{}</a>".format(userID,Name(userFN))
Bot("sendMessage",{"chat_id":chatID,"text":r.TKflood.format(BY,Max_msg,Time_ck),"parse_mode":"html"})
redis.setex("{}Nbot:{}:{}:flood".format(BOT_ID,chatID,userID), Time_ck, User_msg+1)
| 0.15219 | 0.116036 |
import paddle
import paddle.nn as nn
import paddle.fluid.layers as layers
from .builder import NECKS
from paddle.vision.models.resnet import BasicBlock, BottleneckBlock
from ...modules.init import init_backbone_weight, normal_init, kaiming_init, constant_, reset_parameters, xavier_init, init_backbone_weight_simclr
def _init_parameters(module, init_linear='normal', std=0.01, bias=0.):
assert init_linear in ['normal', 'kaiming'], \
"Undefined init_linear: {}".format(init_linear)
for m in module.sublayers():
if isinstance(m, nn.Linear):
if init_linear == 'normal':
normal_init(m, std=std, bias=bias)
else:
kaiming_init(m, mode='fan_in', nonlinearity='relu')
elif isinstance(
m,
(nn.BatchNorm1D, nn.BatchNorm2D, nn.GroupNorm, nn.SyncBatchNorm)):
if m.weight is not None:
constant_(m.weight, 1)
if m.bias is not None:
constant_(m.bias, 0)
elif isinstance(m, nn.Conv2D):
kaiming_init(m, mode='fan_in', nonlinearity='relu')
@NECKS.register()
class LinearNeck(nn.Layer):
"""Linear neck: fc only.
"""
def __init__(self, in_channels, out_channels, with_avg_pool=True):
super(LinearNeck, self).__init__()
self.with_avg_pool = with_avg_pool
if with_avg_pool:
self.avgpool = nn.AdaptiveAvgPool2D((1, 1))
self.fc = nn.Linear(in_channels, out_channels)
# init_backbone_weight(self.fc)
self.init_parameters()
def init_parameters(self, init_linear='kaiming'):
_init_parameters(self, init_linear)
def forward(self, x):
if self.with_avg_pool:
x = self.avgpool(x)
return self.fc(x.reshape([x.shape[0], -1]))
@NECKS.register()
class NonLinearNeckV1(nn.Layer):
"""The non-linear neck in MoCo v2: fc-relu-fc.
"""
def __init__(self,
in_channels,
hid_channels,
out_channels,
with_avg_pool=True):
super(NonLinearNeckV1, self).__init__()
self.with_avg_pool = with_avg_pool
if with_avg_pool:
self.avgpool = nn.AdaptiveAvgPool2D((1, 1))
self.mlp = nn.Sequential(nn.Linear(in_channels,
hid_channels), nn.ReLU(),
nn.Linear(hid_channels, out_channels))
# init_backbone_weight(self.mlp)
self.init_parameters()
def init_parameters(self, init_linear='kaiming'):
_init_parameters(self, init_linear)
def forward(self, x):
if self.with_avg_pool:
x = self.avgpool(x)
return self.mlp(x.reshape([x.shape[0], -1]))
@NECKS.register()
class NonLinearNeckV2(nn.Layer):
"""The non-linear neck in MoCo v2: fc-relu-fc.
"""
def __init__(self,
in_channels,
hid_channels,
out_channels,
with_avg_pool=True):
super(NonLinearNeckV2, self).__init__()
self.with_avg_pool = with_avg_pool
if with_avg_pool:
self.avgpool = nn.AdaptiveAvgPool2D((1, 1))
self.mlp = nn.Sequential(nn.Linear(in_channels, hid_channels),
nn.BatchNorm1D(hid_channels), nn.ReLU(),
nn.Linear(hid_channels, out_channels))
# init_backbone_weight(self.mlp)
# self.init_parameters()
def init_parameters(self, init_linear='kaiming'):
# _init_parameters(self, init_linear)
for m in self.sublayers():
if isinstance(m, nn.Linear):
xavier_init(m, distribution='uniform')
elif isinstance(m, (nn.BatchNorm1D, nn.BatchNorm2D, nn.GroupNorm,
nn.SyncBatchNorm)):
if m.weight is not None:
constant_(m.weight, 1)
if m.bias is not None:
constant_(m.bias, 0)
def forward(self, x):
if self.with_avg_pool:
x = self.avgpool(x)
return self.mlp(x.reshape([x.shape[0], -1]))
@NECKS.register()
class NonLinearNeckV3(nn.Layer):
"""MLP"""
def __init__(self, in_channels, hid_channels, out_channels):
super(NonLinearNeckV3, self).__init__()
self.l1 = nn.Linear(in_channels, hid_channels)
self.bn1 = nn.BatchNorm1D(hid_channels)
self.relu1 = nn.ReLU()
self.l2 = nn.Linear(hid_channels, out_channels)
def init_parameters(self, init_linear='kaiming'):
# _init_parameters(self, init_linear)
for m in self.sublayers():
if isinstance(m, nn.Linear):
xavier_init(m, distribution='uniform')
elif isinstance(m, (nn.BatchNorm1D, nn.BatchNorm2D, nn.GroupNorm,
nn.SyncBatchNorm)):
if m.weight is not None:
constant_(m.weight, 1)
if m.bias is not None:
constant_(m.bias, 0)
def forward(self, x):
"""forward"""
x = self.l1(x)
x = self.bn1(x)
x = self.relu1(x)
x = self.l2(x)
return x
@NECKS.register()
class ConvNonLinearNeck(nn.Layer):
"""
The Convolutioanl Neck proposed by F.
"""
def __init__(self,
in_channels,
hid_channels,
out_channels,
with_avg_pool=True):
super(ConvNonLinearNeck, self).__init__()
self.with_avg_pool = with_avg_pool
assert with_avg_pool, 'The with_avg_pool must be set to True in ConvNonLinearNeck!'
if with_avg_pool:
self.avgpool = nn.AdaptiveAvgPool2D((1, 1))
self.conv = BottleneckBlock(in_channels, in_channels // 4)
self.mlp = nn.Sequential(nn.Linear(in_channels,
hid_channels), nn.ReLU(),
nn.Linear(hid_channels, out_channels))
init_backbone_weight(self.mlp)
def init_parameters(self, init_linear='normal'):
_init_parameters(self, init_linear)
def forward(self, x):
x = self.conv(x)
if self.with_avg_pool:
x = self.avgpool(x)
return self.mlp(x.reshape([x.shape[0], -1]))
@NECKS.register()
class NonLinearNeckfc3(nn.Layer):
"""The non-linear neck in MoCo v2: fc-relu-fc-relu-fc.
"""
def __init__(self,
in_channels,
hid_channels,
out_channels,
with_avg_pool=True):
super(NonLinearNeckfc3, self).__init__()
self.with_avg_pool = with_avg_pool
if with_avg_pool:
self.avgpool = nn.AdaptiveAvgPool2D((1, 1))
self.mlp = nn.Sequential(nn.Linear(in_channels, hid_channels),
nn.BatchNorm1D(hid_channels), nn.ReLU(),
nn.Linear(hid_channels, hid_channels),
nn.BatchNorm1D(hid_channels), nn.ReLU(),
nn.Linear(hid_channels, out_channels),
nn.BatchNorm1D(out_channels))
init_backbone_weight_simclr(self.mlp)
def init_parameters(self, init_linear='normal'):
_init_parameters(self, init_linear)
def forward(self, x):
x = layers.squeeze(x, axes=[])
hidden = self.mlp(x)
hidden = layers.l2_normalize(hidden, -1)
return hidden
@NECKS.register()
class MLP2d(nn.Layer):
"""The non-linear neck in pixpro.
"""
def __init__(self, in_channels, hid_channels=4096, out_channels=256):
super(MLP2d, self).__init__()
self.linear1 = nn.Conv2D(in_channels,
hid_channels,
kernel_size=1,
stride=1,
padding=0,
bias_attr=True)
self.bn1 = nn.BatchNorm2D(hid_channels)
self.relu1 = nn.ReLU()
self.linear2 = nn.Conv2D(hid_channels,
out_channels,
kernel_size=1,
stride=1,
padding=0,
bias_attr=True)
self.init_parameters()
def init_parameters(self, init_linear='kaiming'):
_init_parameters(self, init_linear)
return
def forward(self, x):
x = self.linear1(x)
x = self.bn1(x)
x = self.relu1(x)
x = self.linear2(x)
return x
|
passl/modeling/necks/base_neck.py
|
import paddle
import paddle.nn as nn
import paddle.fluid.layers as layers
from .builder import NECKS
from paddle.vision.models.resnet import BasicBlock, BottleneckBlock
from ...modules.init import init_backbone_weight, normal_init, kaiming_init, constant_, reset_parameters, xavier_init, init_backbone_weight_simclr
def _init_parameters(module, init_linear='normal', std=0.01, bias=0.):
assert init_linear in ['normal', 'kaiming'], \
"Undefined init_linear: {}".format(init_linear)
for m in module.sublayers():
if isinstance(m, nn.Linear):
if init_linear == 'normal':
normal_init(m, std=std, bias=bias)
else:
kaiming_init(m, mode='fan_in', nonlinearity='relu')
elif isinstance(
m,
(nn.BatchNorm1D, nn.BatchNorm2D, nn.GroupNorm, nn.SyncBatchNorm)):
if m.weight is not None:
constant_(m.weight, 1)
if m.bias is not None:
constant_(m.bias, 0)
elif isinstance(m, nn.Conv2D):
kaiming_init(m, mode='fan_in', nonlinearity='relu')
@NECKS.register()
class LinearNeck(nn.Layer):
"""Linear neck: fc only.
"""
def __init__(self, in_channels, out_channels, with_avg_pool=True):
super(LinearNeck, self).__init__()
self.with_avg_pool = with_avg_pool
if with_avg_pool:
self.avgpool = nn.AdaptiveAvgPool2D((1, 1))
self.fc = nn.Linear(in_channels, out_channels)
# init_backbone_weight(self.fc)
self.init_parameters()
def init_parameters(self, init_linear='kaiming'):
_init_parameters(self, init_linear)
def forward(self, x):
if self.with_avg_pool:
x = self.avgpool(x)
return self.fc(x.reshape([x.shape[0], -1]))
@NECKS.register()
class NonLinearNeckV1(nn.Layer):
"""The non-linear neck in MoCo v2: fc-relu-fc.
"""
def __init__(self,
in_channels,
hid_channels,
out_channels,
with_avg_pool=True):
super(NonLinearNeckV1, self).__init__()
self.with_avg_pool = with_avg_pool
if with_avg_pool:
self.avgpool = nn.AdaptiveAvgPool2D((1, 1))
self.mlp = nn.Sequential(nn.Linear(in_channels,
hid_channels), nn.ReLU(),
nn.Linear(hid_channels, out_channels))
# init_backbone_weight(self.mlp)
self.init_parameters()
def init_parameters(self, init_linear='kaiming'):
_init_parameters(self, init_linear)
def forward(self, x):
if self.with_avg_pool:
x = self.avgpool(x)
return self.mlp(x.reshape([x.shape[0], -1]))
@NECKS.register()
class NonLinearNeckV2(nn.Layer):
"""The non-linear neck in MoCo v2: fc-relu-fc.
"""
def __init__(self,
in_channels,
hid_channels,
out_channels,
with_avg_pool=True):
super(NonLinearNeckV2, self).__init__()
self.with_avg_pool = with_avg_pool
if with_avg_pool:
self.avgpool = nn.AdaptiveAvgPool2D((1, 1))
self.mlp = nn.Sequential(nn.Linear(in_channels, hid_channels),
nn.BatchNorm1D(hid_channels), nn.ReLU(),
nn.Linear(hid_channels, out_channels))
# init_backbone_weight(self.mlp)
# self.init_parameters()
def init_parameters(self, init_linear='kaiming'):
# _init_parameters(self, init_linear)
for m in self.sublayers():
if isinstance(m, nn.Linear):
xavier_init(m, distribution='uniform')
elif isinstance(m, (nn.BatchNorm1D, nn.BatchNorm2D, nn.GroupNorm,
nn.SyncBatchNorm)):
if m.weight is not None:
constant_(m.weight, 1)
if m.bias is not None:
constant_(m.bias, 0)
def forward(self, x):
if self.with_avg_pool:
x = self.avgpool(x)
return self.mlp(x.reshape([x.shape[0], -1]))
@NECKS.register()
class NonLinearNeckV3(nn.Layer):
"""MLP"""
def __init__(self, in_channels, hid_channels, out_channels):
super(NonLinearNeckV3, self).__init__()
self.l1 = nn.Linear(in_channels, hid_channels)
self.bn1 = nn.BatchNorm1D(hid_channels)
self.relu1 = nn.ReLU()
self.l2 = nn.Linear(hid_channels, out_channels)
def init_parameters(self, init_linear='kaiming'):
# _init_parameters(self, init_linear)
for m in self.sublayers():
if isinstance(m, nn.Linear):
xavier_init(m, distribution='uniform')
elif isinstance(m, (nn.BatchNorm1D, nn.BatchNorm2D, nn.GroupNorm,
nn.SyncBatchNorm)):
if m.weight is not None:
constant_(m.weight, 1)
if m.bias is not None:
constant_(m.bias, 0)
def forward(self, x):
"""forward"""
x = self.l1(x)
x = self.bn1(x)
x = self.relu1(x)
x = self.l2(x)
return x
@NECKS.register()
class ConvNonLinearNeck(nn.Layer):
"""
The Convolutioanl Neck proposed by F.
"""
def __init__(self,
in_channels,
hid_channels,
out_channels,
with_avg_pool=True):
super(ConvNonLinearNeck, self).__init__()
self.with_avg_pool = with_avg_pool
assert with_avg_pool, 'The with_avg_pool must be set to True in ConvNonLinearNeck!'
if with_avg_pool:
self.avgpool = nn.AdaptiveAvgPool2D((1, 1))
self.conv = BottleneckBlock(in_channels, in_channels // 4)
self.mlp = nn.Sequential(nn.Linear(in_channels,
hid_channels), nn.ReLU(),
nn.Linear(hid_channels, out_channels))
init_backbone_weight(self.mlp)
def init_parameters(self, init_linear='normal'):
_init_parameters(self, init_linear)
def forward(self, x):
x = self.conv(x)
if self.with_avg_pool:
x = self.avgpool(x)
return self.mlp(x.reshape([x.shape[0], -1]))
@NECKS.register()
class NonLinearNeckfc3(nn.Layer):
"""The non-linear neck in MoCo v2: fc-relu-fc-relu-fc.
"""
def __init__(self,
in_channels,
hid_channels,
out_channels,
with_avg_pool=True):
super(NonLinearNeckfc3, self).__init__()
self.with_avg_pool = with_avg_pool
if with_avg_pool:
self.avgpool = nn.AdaptiveAvgPool2D((1, 1))
self.mlp = nn.Sequential(nn.Linear(in_channels, hid_channels),
nn.BatchNorm1D(hid_channels), nn.ReLU(),
nn.Linear(hid_channels, hid_channels),
nn.BatchNorm1D(hid_channels), nn.ReLU(),
nn.Linear(hid_channels, out_channels),
nn.BatchNorm1D(out_channels))
init_backbone_weight_simclr(self.mlp)
def init_parameters(self, init_linear='normal'):
_init_parameters(self, init_linear)
def forward(self, x):
x = layers.squeeze(x, axes=[])
hidden = self.mlp(x)
hidden = layers.l2_normalize(hidden, -1)
return hidden
@NECKS.register()
class MLP2d(nn.Layer):
"""The non-linear neck in pixpro.
"""
def __init__(self, in_channels, hid_channels=4096, out_channels=256):
super(MLP2d, self).__init__()
self.linear1 = nn.Conv2D(in_channels,
hid_channels,
kernel_size=1,
stride=1,
padding=0,
bias_attr=True)
self.bn1 = nn.BatchNorm2D(hid_channels)
self.relu1 = nn.ReLU()
self.linear2 = nn.Conv2D(hid_channels,
out_channels,
kernel_size=1,
stride=1,
padding=0,
bias_attr=True)
self.init_parameters()
def init_parameters(self, init_linear='kaiming'):
_init_parameters(self, init_linear)
return
def forward(self, x):
x = self.linear1(x)
x = self.bn1(x)
x = self.relu1(x)
x = self.linear2(x)
return x
| 0.887881 | 0.321021 |
__author__ = "<NAME> <<EMAIL>>, <NAME> <<EMAIL>>"
import os
import md5
import json
import Queue
import threading
import time
class Worker(object):
''' Worker thread for concurrent process of tasks from a queue using multiple threads.
This worker is designed to never die, always keeping num_threads threads active.
It can work on any function with arbitrary arguemtns using the add_task() method.
Example:
worker = Worker(50)
for i in xrange(100):
worker.add_task(func, arg1, arg2) # blocks when queue is full
worker.join() # blocks here
Args:
num_threads: the number of num_threads threads to use from the Queue.
queue_size: the number of elements that can be placed in Queue. If 0 then infinite.
'''
def __init__(self, num_threads=1, queue_size=0, keep_alive=True, quiet=False):
if queue_size != 0 and queue_size < num_threads:
raise Exception('queue_size has to be > num_threads to make sense')
self.num_threads = num_threads
self.queue = Queue.Queue(queue_size)
self.threads = []
self.keep_alive = keep_alive
self.quiet = quiet
self._retain_threads() # Start the threads.
# The following extra thread keeps all the threads alive even if they are crashing.
# This makes it possible to block on a queue size, have threads fail, and still be able to add
# more to the queue because this thread will spawn more new ones to take some stuff off the
# queue.
self.thr = threading.Thread(target=self._keep_alive, args=[self])
self.thr.setDaemon(True)
self.thr.start()
def _retain_threads(self):
''' Make sure there at self.num_threads always. '''
while len(self.threads) < self.num_threads:
t = threading.Thread(target=self._run, args=[self])
t.setDaemon(True)
t.start()
self.threads.append(t)
def _keep_alive(self, *args):
''' This is called by thread self.t to keep all the self.threads alive forever. '''
while self.keep_alive:
# This join(1) here checks if the thread hit an exception and terminated
self.threads = [t.join(1) or t for t in self.threads if t.isAlive()]
if not self.queue.empty() and self.keep_alive:
self._retain_threads()
def _end_func(self):
''' Dummy function that when added it stops the threads. '''
pass
def _run(self, *args):
''' This is the function the threads have as their targets. '''
while True:
(func, args, kargs) = self.queue.get()
if func == self._end_func: # Check for dummy function and if so end thread.
break
func(*args, **kargs)
def restart(self):
''' If the threads have been killed by a KeyboardInterrupt, then you can call this on the worker
to set keep_alive to True and recreate the extra thread which in turn creates worker threads.
'''
self.keep_alive = True
self._retain_threads()
del self.thr
self.thr = threading.Thread(target=self._keep_alive, args=[self])
self.thr.setDaemon(True)
self.thr.start()
def apply_async(self, func, args): # to match multiprocessing.ThreadPool
self.add_task(func, *args)
def add_task(self, func, *args, **kargs):
''' Add a task to the queue, blocking if the queue is full. This also resets the threads to do
work.
'''
if not self.threads:
self.restart()
self.queue.put((func, args, kargs))
def close(self): # to match multiprocessing.ThreadPool
pass
def join(self, block=True, timeout=None):
''' Wait for the queue to empty.
Args:
block: If block is True, this will stall the interpreter at that line until the queue is
empty, recreating threads if they die until the queue is empty. If False, this just recreates
any stalled threads once, and returns so the interpreter can go on. Setting to False does not
ensure that threads will stay alive, but is handy to keep more tasks to work on until you
finally want to wait on all them to be finished at the end of your program.
'''
if timeout is not None:
start_time = time.time()
time_join = timeout
else:
time_join = 100
if block:
try:
# Keep the threads going until the queue is emptied.
# This is the marker to to the threads, so put it in the queue now.
for t in range(self.num_threads):
self.add_task(self._end_func)
while self.threads and (timeout is None or time.time() - start_time < timeout):
if self.queue.empty():
raise Exception()
time.sleep(0.0001)
except KeyboardInterrupt:
# self.threads = [t.join(0.01 / self.num_threads) or t for t in self.threads if t.isAlive()]
self.keep_alive = False
for t in range(self.num_threads):
self.add_task(self._end_func)
except Exception:
# Prevent the keep_alive thread from running
self.keep_alive = False
# Stop all the work threads.
for t in range(self.num_threads):
self.add_task(self._end_func)
# Wait on threads.
self.threads = [t.join(time_join) or t for t in self.threads if t.isAlive()]
|
web-search-engine/final/utils.py
|
__author__ = "<NAME> <<EMAIL>>, <NAME> <<EMAIL>>"
import os
import md5
import json
import Queue
import threading
import time
class Worker(object):
''' Worker thread for concurrent process of tasks from a queue using multiple threads.
This worker is designed to never die, always keeping num_threads threads active.
It can work on any function with arbitrary arguemtns using the add_task() method.
Example:
worker = Worker(50)
for i in xrange(100):
worker.add_task(func, arg1, arg2) # blocks when queue is full
worker.join() # blocks here
Args:
num_threads: the number of num_threads threads to use from the Queue.
queue_size: the number of elements that can be placed in Queue. If 0 then infinite.
'''
def __init__(self, num_threads=1, queue_size=0, keep_alive=True, quiet=False):
if queue_size != 0 and queue_size < num_threads:
raise Exception('queue_size has to be > num_threads to make sense')
self.num_threads = num_threads
self.queue = Queue.Queue(queue_size)
self.threads = []
self.keep_alive = keep_alive
self.quiet = quiet
self._retain_threads() # Start the threads.
# The following extra thread keeps all the threads alive even if they are crashing.
# This makes it possible to block on a queue size, have threads fail, and still be able to add
# more to the queue because this thread will spawn more new ones to take some stuff off the
# queue.
self.thr = threading.Thread(target=self._keep_alive, args=[self])
self.thr.setDaemon(True)
self.thr.start()
def _retain_threads(self):
''' Make sure there at self.num_threads always. '''
while len(self.threads) < self.num_threads:
t = threading.Thread(target=self._run, args=[self])
t.setDaemon(True)
t.start()
self.threads.append(t)
def _keep_alive(self, *args):
''' This is called by thread self.t to keep all the self.threads alive forever. '''
while self.keep_alive:
# This join(1) here checks if the thread hit an exception and terminated
self.threads = [t.join(1) or t for t in self.threads if t.isAlive()]
if not self.queue.empty() and self.keep_alive:
self._retain_threads()
def _end_func(self):
''' Dummy function that when added it stops the threads. '''
pass
def _run(self, *args):
''' This is the function the threads have as their targets. '''
while True:
(func, args, kargs) = self.queue.get()
if func == self._end_func: # Check for dummy function and if so end thread.
break
func(*args, **kargs)
def restart(self):
''' If the threads have been killed by a KeyboardInterrupt, then you can call this on the worker
to set keep_alive to True and recreate the extra thread which in turn creates worker threads.
'''
self.keep_alive = True
self._retain_threads()
del self.thr
self.thr = threading.Thread(target=self._keep_alive, args=[self])
self.thr.setDaemon(True)
self.thr.start()
def apply_async(self, func, args): # to match multiprocessing.ThreadPool
self.add_task(func, *args)
def add_task(self, func, *args, **kargs):
''' Add a task to the queue, blocking if the queue is full. This also resets the threads to do
work.
'''
if not self.threads:
self.restart()
self.queue.put((func, args, kargs))
def close(self): # to match multiprocessing.ThreadPool
pass
def join(self, block=True, timeout=None):
''' Wait for the queue to empty.
Args:
block: If block is True, this will stall the interpreter at that line until the queue is
empty, recreating threads if they die until the queue is empty. If False, this just recreates
any stalled threads once, and returns so the interpreter can go on. Setting to False does not
ensure that threads will stay alive, but is handy to keep more tasks to work on until you
finally want to wait on all them to be finished at the end of your program.
'''
if timeout is not None:
start_time = time.time()
time_join = timeout
else:
time_join = 100
if block:
try:
# Keep the threads going until the queue is emptied.
# This is the marker to to the threads, so put it in the queue now.
for t in range(self.num_threads):
self.add_task(self._end_func)
while self.threads and (timeout is None or time.time() - start_time < timeout):
if self.queue.empty():
raise Exception()
time.sleep(0.0001)
except KeyboardInterrupt:
# self.threads = [t.join(0.01 / self.num_threads) or t for t in self.threads if t.isAlive()]
self.keep_alive = False
for t in range(self.num_threads):
self.add_task(self._end_func)
except Exception:
# Prevent the keep_alive thread from running
self.keep_alive = False
# Stop all the work threads.
for t in range(self.num_threads):
self.add_task(self._end_func)
# Wait on threads.
self.threads = [t.join(time_join) or t for t in self.threads if t.isAlive()]
| 0.535827 | 0.194483 |
import typing as tp
import numpy as np
from static_frame.core.util import mloc
from static_frame.core.util import FilePathOrFileLike
from static_frame.core.util import write_optional_file
from static_frame.core.display import DisplayFormats
from static_frame.core.display import DisplayActive
from static_frame.core.display import DisplayConfig
from static_frame.core.doc_str import doc_inject
class IndexBase:
STATIC = True
_IMMUTABLE_CONSTRUCTOR = None
_UFUNC_UNION = None
_UFUNC_INTERSECTION = None
__slots__ = () # defined in dervied classes
#---------------------------------------------------------------------------
# constructors
@classmethod
def from_pandas(cls,
value,
*,
is_go: bool = False) -> 'IndexBase':
'''
Given a Pandas index, return the appropriate IndexBase derived class.
'''
import pandas
from static_frame import Index
from static_frame import IndexGO
from static_frame import IndexDate
from static_frame import IndexHierarchy
from static_frame import IndexHierarchyGO
if isinstance(value, pandas.MultiIndex):
# iterating over a hierarchucal index will iterate over labels
if is_go:
return IndexHierarchyGO.from_labels(value)
return IndexHierarchy.from_labels(value)
elif isinstance(value, pandas.DatetimeIndex):
if is_go:
raise NotImplementedError('No grow-only version of IndexDate yet exists')
return IndexDate(value)
if is_go:
return IndexGO(value)
return Index(value)
#---------------------------------------------------------------------------
# name interface
@property
def name(self) -> tp.Hashable:
return self._name
#---------------------------------------------------------------------------
# common attributes from the numpy array
@property
def mloc(self):
'''Memory location
'''
if self._recache:
self._update_array_cache()
return mloc(self._labels)
@property
def dtype(self) -> np.dtype:
'''
Return the dtype of the underlying NumPy array.
Returns:
:py:class:`numpy.dtype`
'''
if self._recache:
self._update_array_cache()
return self._labels.dtype
@property
def shape(self) -> tp.Tuple[int]:
'''
Return a tuple describing the shape of the underlying NumPy array.
Returns:
:py:class:`tp.Tuple[int]`
'''
if self._recache:
self._update_array_cache()
return self.values.shape
@property
def ndim(self) -> int:
'''
Return the number of dimensions.
Returns:
:py:class:`int`
'''
if self._recache:
self._update_array_cache()
return self._labels.ndim
@property
def size(self) -> int:
'''
Return the size of the underlying NumPy array.
Returns:
:py:class:`int`
'''
if self._recache:
self._update_array_cache()
return self._labels.size
@property
def nbytes(self) -> int:
'''
Return the total bytes of the underlying NumPy array.
Returns:
:py:class:`int`
'''
if self._recache:
self._update_array_cache()
return self._labels.nbytes
#---------------------------------------------------------------------------
# set operations
def intersection(self, other) -> 'Index':
if self._recache:
self._update_array_cache()
if isinstance(other, np.ndarray):
opperand = other
else: # assume we can get it from a .values attribute
opperand = other.values
cls = self.__class__
return cls.from_labels(cls._UFUNC_INTERSECTION(self._labels, opperand))
def union(self, other) -> 'Index':
if self._recache:
self._update_array_cache()
if isinstance(other, np.ndarray):
opperand = other
else: # assume we can get it from a .values attribute
opperand = other.values
cls = self.__class__
return cls.from_labels(cls._UFUNC_UNION(self._labels, opperand))
#---------------------------------------------------------------------------
# common display
def __repr__(self) -> str:
return repr(self.display())
def _repr_html_(self):
'''
Provide HTML representation for Jupyter Notebooks.
'''
# modify the active display to be force HTML
config = DisplayActive.get(
display_format=DisplayFormats.HTML_TABLE,
type_show=False
)
return repr(self.display(config))
#---------------------------------------------------------------------------
# exporters
@doc_inject(class_name='Index')
def to_html(self,
config: tp.Optional[DisplayConfig] = None
):
'''
{}
'''
config = config or DisplayActive.get(type_show=False)
config = config.to_display_config(
display_format=DisplayFormats.HTML_TABLE,
)
return repr(self.display(config))
@doc_inject(class_name='Index')
def to_html_datatables(self,
fp: tp.Optional[FilePathOrFileLike] = None,
*,
show: bool = True,
config: tp.Optional[DisplayConfig] = None
) -> str:
'''
{}
'''
config = config or DisplayActive.get(type_show=False)
config = config.to_display_config(
display_format=DisplayFormats.HTML_DATATABLES,
)
content = repr(self.display(config))
fp = write_optional_file(content=content, fp=fp)
if fp and show:
import webbrowser
webbrowser.open_new_tab(fp)
return fp
|
static_frame/core/index_base.py
|
import typing as tp
import numpy as np
from static_frame.core.util import mloc
from static_frame.core.util import FilePathOrFileLike
from static_frame.core.util import write_optional_file
from static_frame.core.display import DisplayFormats
from static_frame.core.display import DisplayActive
from static_frame.core.display import DisplayConfig
from static_frame.core.doc_str import doc_inject
class IndexBase:
STATIC = True
_IMMUTABLE_CONSTRUCTOR = None
_UFUNC_UNION = None
_UFUNC_INTERSECTION = None
__slots__ = () # defined in dervied classes
#---------------------------------------------------------------------------
# constructors
@classmethod
def from_pandas(cls,
value,
*,
is_go: bool = False) -> 'IndexBase':
'''
Given a Pandas index, return the appropriate IndexBase derived class.
'''
import pandas
from static_frame import Index
from static_frame import IndexGO
from static_frame import IndexDate
from static_frame import IndexHierarchy
from static_frame import IndexHierarchyGO
if isinstance(value, pandas.MultiIndex):
# iterating over a hierarchucal index will iterate over labels
if is_go:
return IndexHierarchyGO.from_labels(value)
return IndexHierarchy.from_labels(value)
elif isinstance(value, pandas.DatetimeIndex):
if is_go:
raise NotImplementedError('No grow-only version of IndexDate yet exists')
return IndexDate(value)
if is_go:
return IndexGO(value)
return Index(value)
#---------------------------------------------------------------------------
# name interface
@property
def name(self) -> tp.Hashable:
return self._name
#---------------------------------------------------------------------------
# common attributes from the numpy array
@property
def mloc(self):
'''Memory location
'''
if self._recache:
self._update_array_cache()
return mloc(self._labels)
@property
def dtype(self) -> np.dtype:
'''
Return the dtype of the underlying NumPy array.
Returns:
:py:class:`numpy.dtype`
'''
if self._recache:
self._update_array_cache()
return self._labels.dtype
@property
def shape(self) -> tp.Tuple[int]:
'''
Return a tuple describing the shape of the underlying NumPy array.
Returns:
:py:class:`tp.Tuple[int]`
'''
if self._recache:
self._update_array_cache()
return self.values.shape
@property
def ndim(self) -> int:
'''
Return the number of dimensions.
Returns:
:py:class:`int`
'''
if self._recache:
self._update_array_cache()
return self._labels.ndim
@property
def size(self) -> int:
'''
Return the size of the underlying NumPy array.
Returns:
:py:class:`int`
'''
if self._recache:
self._update_array_cache()
return self._labels.size
@property
def nbytes(self) -> int:
'''
Return the total bytes of the underlying NumPy array.
Returns:
:py:class:`int`
'''
if self._recache:
self._update_array_cache()
return self._labels.nbytes
#---------------------------------------------------------------------------
# set operations
def intersection(self, other) -> 'Index':
if self._recache:
self._update_array_cache()
if isinstance(other, np.ndarray):
opperand = other
else: # assume we can get it from a .values attribute
opperand = other.values
cls = self.__class__
return cls.from_labels(cls._UFUNC_INTERSECTION(self._labels, opperand))
def union(self, other) -> 'Index':
if self._recache:
self._update_array_cache()
if isinstance(other, np.ndarray):
opperand = other
else: # assume we can get it from a .values attribute
opperand = other.values
cls = self.__class__
return cls.from_labels(cls._UFUNC_UNION(self._labels, opperand))
#---------------------------------------------------------------------------
# common display
def __repr__(self) -> str:
return repr(self.display())
def _repr_html_(self):
'''
Provide HTML representation for Jupyter Notebooks.
'''
# modify the active display to be force HTML
config = DisplayActive.get(
display_format=DisplayFormats.HTML_TABLE,
type_show=False
)
return repr(self.display(config))
#---------------------------------------------------------------------------
# exporters
@doc_inject(class_name='Index')
def to_html(self,
config: tp.Optional[DisplayConfig] = None
):
'''
{}
'''
config = config or DisplayActive.get(type_show=False)
config = config.to_display_config(
display_format=DisplayFormats.HTML_TABLE,
)
return repr(self.display(config))
@doc_inject(class_name='Index')
def to_html_datatables(self,
fp: tp.Optional[FilePathOrFileLike] = None,
*,
show: bool = True,
config: tp.Optional[DisplayConfig] = None
) -> str:
'''
{}
'''
config = config or DisplayActive.get(type_show=False)
config = config.to_display_config(
display_format=DisplayFormats.HTML_DATATABLES,
)
content = repr(self.display(config))
fp = write_optional_file(content=content, fp=fp)
if fp and show:
import webbrowser
webbrowser.open_new_tab(fp)
return fp
| 0.613005 | 0.315024 |
import struct
def recv_all(sock, size):
received = ""
while len(received) < size:
data = sock.recv(size - len(received))
if data == "":
raise Exception("Lost connection")
else:
received += data
return received
class basePacker(object):
@classmethod
def pack(cls, value):
return struct.pack(cls._format, value)
@classmethod
def unpack(cls, buf, offset = 0):
size = struct.calcsize(cls._format)
value, = struct.unpack(cls._format, buf[offset:offset + size])
return value, offset + size
@classmethod
def recv(cls, sock):
size = struct.calcsize(cls._format)
data = recv_all(sock, size)
value, = struct.unpack(cls._format, data)
return value
class int8Packer(basePacker):
_format = '>b'
class int16Packer(basePacker):
_format = '>h'
class int32Packer(basePacker):
_format = '>i'
class int64Packer(basePacker):
_format = '>q'
class uint8Packer(basePacker):
_format = '>B'
class uint16Packer(basePacker):
_format = '>H'
class uint32Packer(basePacker):
_format = '>I'
class uint64Packer(basePacker):
_format = '>Q'
class float32Packer(basePacker):
_format = '>f'
class float64Packer(basePacker):
_format = '>d'
class astringPacker(object):
@staticmethod
def pack(value):
asc = value.encode('ascii')
return struct.pack(">I", len(asc)) + asc
@staticmethod
def unpack(buf, offset = 0):
length, offset = uint32Packer.unpack(buf, offset)
asc = buf[offset:offset + length]
return asc.decode('ascii'), offset + length
@staticmethod
def recv(sock):
length = uint32Packer.recv(sock)
data = recv_all(sock, length)
return str(data)
class ustringPacker(object):
@staticmethod
def pack(value):
utf8 = value.encode('utf-8')
return struct.pack(">I", len(utf8)) + utf8
@staticmethod
def unpack(buf, offset = 0):
length, offset = uint32Packer.unpack(buf, offset)
utf8 = buf[offset:offset + length]
return utf8.decode('utf-8'), offset + length
@staticmethod
def recv(sock):
length = uint32Packer.recv(sock)
data = recv_all(sock, length)
return data.decode('utf-8')
if __name__ == '__main__':
s = "abc"
print("s = %s" % s)
buf = astringPacker.pack(s)
print("packed:\n", repr(buf))
s, offset = astringPacker.unpack(buf)
print("unpacked: s =", s, ", offset =", offset)
s = U"αߢ"
print("s = %s" % s)
buf = ustringPacker.pack(s)
print("packed:\n", repr(buf))
s, offset = ustringPacker.unpack(buf)
print("unpacked: s =", s, ", offset =", offset)
|
tyger.py
|
import struct
def recv_all(sock, size):
received = ""
while len(received) < size:
data = sock.recv(size - len(received))
if data == "":
raise Exception("Lost connection")
else:
received += data
return received
class basePacker(object):
@classmethod
def pack(cls, value):
return struct.pack(cls._format, value)
@classmethod
def unpack(cls, buf, offset = 0):
size = struct.calcsize(cls._format)
value, = struct.unpack(cls._format, buf[offset:offset + size])
return value, offset + size
@classmethod
def recv(cls, sock):
size = struct.calcsize(cls._format)
data = recv_all(sock, size)
value, = struct.unpack(cls._format, data)
return value
class int8Packer(basePacker):
_format = '>b'
class int16Packer(basePacker):
_format = '>h'
class int32Packer(basePacker):
_format = '>i'
class int64Packer(basePacker):
_format = '>q'
class uint8Packer(basePacker):
_format = '>B'
class uint16Packer(basePacker):
_format = '>H'
class uint32Packer(basePacker):
_format = '>I'
class uint64Packer(basePacker):
_format = '>Q'
class float32Packer(basePacker):
_format = '>f'
class float64Packer(basePacker):
_format = '>d'
class astringPacker(object):
@staticmethod
def pack(value):
asc = value.encode('ascii')
return struct.pack(">I", len(asc)) + asc
@staticmethod
def unpack(buf, offset = 0):
length, offset = uint32Packer.unpack(buf, offset)
asc = buf[offset:offset + length]
return asc.decode('ascii'), offset + length
@staticmethod
def recv(sock):
length = uint32Packer.recv(sock)
data = recv_all(sock, length)
return str(data)
class ustringPacker(object):
@staticmethod
def pack(value):
utf8 = value.encode('utf-8')
return struct.pack(">I", len(utf8)) + utf8
@staticmethod
def unpack(buf, offset = 0):
length, offset = uint32Packer.unpack(buf, offset)
utf8 = buf[offset:offset + length]
return utf8.decode('utf-8'), offset + length
@staticmethod
def recv(sock):
length = uint32Packer.recv(sock)
data = recv_all(sock, length)
return data.decode('utf-8')
if __name__ == '__main__':
s = "abc"
print("s = %s" % s)
buf = astringPacker.pack(s)
print("packed:\n", repr(buf))
s, offset = astringPacker.unpack(buf)
print("unpacked: s =", s, ", offset =", offset)
s = U"αߢ"
print("s = %s" % s)
buf = ustringPacker.pack(s)
print("packed:\n", repr(buf))
s, offset = ustringPacker.unpack(buf)
print("unpacked: s =", s, ", offset =", offset)
| 0.451568 | 0.225961 |
import torch.nn as nn
import torch.nn.utils.spectral_norm as SN
import torchvision
import torch
def conv_block(in_channels, out_channels, kernel_size, stride, padding=1, bias=True, activation=nn.ReLU(), transpose=False, no_BN=False, all_tanh=False, spec_norm=False):
if(transpose):
block = [nn.ConvTranspose2d(in_channels=in_channels, out_channels=out_channels,
stride=stride, kernel_size=kernel_size, padding=padding, bias=bias) ]
else:
block = [nn.Conv2d(in_channels=in_channels, out_channels=out_channels,
stride=stride, kernel_size=kernel_size, padding=padding, bias=bias) ]
if(spec_norm):
block[0] = SN(block[0])
elif(not no_BN):
block.append(nn.BatchNorm2d(num_features=out_channels))
if(all_tanh):
block.append(nn.Tanh())
elif(activation != None):
block.append(activation)
return block
class StandardCNN_Generator(nn.Module):
def __init__(self, no_BN=False, all_tanh=False):
super(StandardCNN_Generator, self).__init__()
self.linear = nn.Sequential(nn.Flatten(), nn.Linear(128, 512*4*4))
self.model = nn.Sequential( *conv_block(in_channels=512, out_channels=256, stride=2, kernel_size=4,
transpose=True, no_BN=no_BN, all_tanh=all_tanh),
*conv_block(in_channels=256, out_channels=128, stride=2, kernel_size=4,
transpose=True, no_BN=no_BN, all_tanh=all_tanh),
*conv_block(in_channels=128, out_channels=64, stride=2, kernel_size=4,
transpose=True, no_BN=no_BN, all_tanh=all_tanh),
*conv_block(in_channels=64, out_channels=3, stride=1, kernel_size=3,
transpose=True, no_BN=True, all_tanh=True) )
def forward(self, z):
linear = self.linear(z)
reshaped = linear.view(-1,512,4,4)
return self.model(reshaped)
class StandardCNN_Discriminator(nn.Module):
def __init__(self, no_BN=False, all_tanh=False, spec_norm=True):
super(StandardCNN_Discriminator, self).__init__()
self.model = nn.Sequential( *conv_block(in_channels=3, out_channels=64, stride=1, no_BN=no_BN,
kernel_size=3, spec_norm=spec_norm, all_tanh=all_tanh,
activation= nn.LeakyReLU(negative_slope=1e-1)),
*conv_block(in_channels=64, out_channels=64, stride=2, no_BN=no_BN,
kernel_size=4, spec_norm=spec_norm, all_tanh=all_tanh,
activation= nn.LeakyReLU(negative_slope=1e-1)),
*conv_block(in_channels=64, out_channels=128, stride=1, no_BN=no_BN,
kernel_size=3, spec_norm=spec_norm, all_tanh=all_tanh,
activation= nn.LeakyReLU(negative_slope=1e-1)),
*conv_block(in_channels=128, out_channels=128, stride=2, no_BN=no_BN,
kernel_size=4, spec_norm=spec_norm, all_tanh=all_tanh,
activation= nn.LeakyReLU(negative_slope=1e-1)),
*conv_block(in_channels=128, out_channels=256, stride=1, no_BN=no_BN,
kernel_size=3, spec_norm=spec_norm, all_tanh=all_tanh,
activation= nn.LeakyReLU(negative_slope=1e-1)),
*conv_block(in_channels=256, out_channels=256, stride=2, no_BN=no_BN,
kernel_size=4, spec_norm=spec_norm, all_tanh=all_tanh,
activation= nn.LeakyReLU(negative_slope=1e-1)),
*conv_block(in_channels=256, out_channels=512, stride=1, no_BN=no_BN,
kernel_size=3, spec_norm=spec_norm, all_tanh=all_tanh,
activation=nn.LeakyReLU(negative_slope=1e-1)),
nn.Flatten(),
( SN(nn.Linear(512*4*4, 1)) if spec_norm
else nn.Linear(512*4*4, 1)) )
def forward(self, x):
return self.model(x)
class DCGAN_64_Generator(nn.Module):
def __init__(self, no_BN=False, all_tanh=False):
super(DCGAN_64_Generator, self).__init__()
self.model = nn.Sequential( *conv_block(in_channels=128, out_channels=512, stride=1, bias=False, padding=0,
transpose=True, kernel_size=4, no_BN=no_BN, all_tanh=all_tanh),
*conv_block(in_channels=512, out_channels=256, stride=2, bias=False,
transpose=True, kernel_size=4, no_BN=no_BN, all_tanh=all_tanh),
*conv_block(in_channels=256, out_channels=128, stride=2, bias=False,
transpose=True, kernel_size=4, no_BN=no_BN, all_tanh=all_tanh),
*conv_block(in_channels=128, out_channels=64, stride=2, bias=False,
transpose=True, kernel_size=4, no_BN=no_BN, all_tanh=all_tanh),
*conv_block(in_channels=64, out_channels=3, stride=2, bias=False,
transpose=True, kernel_size=4, no_BN=True, all_tanh=True) )
def forward(self, z):
return self.model(z)
class DCGAN_64_Discriminator(nn.Module):
def __init__(self, no_BN=False, all_tanh=False, spec_norm=False):
super(DCGAN_64_Discriminator, self).__init__()
self.model = nn.Sequential( *conv_block(in_channels=3, out_channels=64, stride=2, no_BN=True,
kernel_size=4, bias=False, all_tanh=all_tanh, spec_norm=spec_norm,
activation= nn.LeakyReLU(negative_slope=2e-1)),
*conv_block(in_channels=64, out_channels=128, stride=2, no_BN=no_BN,
kernel_size=4, bias=False, all_tanh=all_tanh, spec_norm=spec_norm,
activation= nn.LeakyReLU(negative_slope=2e-1)),
*conv_block(in_channels=128, out_channels=256, stride=2, no_BN=no_BN,
kernel_size=4, bias=False, all_tanh=all_tanh, spec_norm=spec_norm,
activation= nn.LeakyReLU(negative_slope=2e-1)),
*conv_block(in_channels=256, out_channels=512, stride=2, no_BN=no_BN,
kernel_size=4, bias=False, all_tanh=all_tanh, spec_norm=spec_norm,
activation= nn.LeakyReLU(negative_slope=2e-1)),
*conv_block(in_channels=512, out_channels=1, stride=2, no_BN=True,
kernel_size=4, bias=False, all_tanh=False, spec_norm=spec_norm,
activation=None) )
def forward(self, x):
return self.model(x)
class InceptionV3(nn.Module):
def __init__(self, verbose = False):
super(InceptionV3, self).__init__()
if verbose:
print("Loading the pretrained InceptionV3 model...")
inception = torchvision.models.inception_v3(pretrained = True)
if verbose:
print("Model succesfully loaded!")
# Removed the last average pooling layer, so this network outputs the input image features instead of some scalar.
self.layers = [
inception.Conv2d_1a_3x3,
inception.Conv2d_2a_3x3,
inception.Conv2d_2b_3x3,
nn.MaxPool2d(kernel_size = 3, stride = 2),
inception.Conv2d_3b_1x1,
inception.Conv2d_4a_3x3,
nn.MaxPool2d(kernel_size = 3, stride = 2),
inception.Mixed_5b,
inception.Mixed_5c,
inception.Mixed_5d,
inception.Mixed_6a,
inception.Mixed_6b,
inception.Mixed_6c,
inception.Mixed_6d,
inception.Mixed_6e,
inception.Mixed_7a,
inception.Mixed_7b,
inception.Mixed_7c
]
self.model = nn.Sequential(*self.layers)
# This model will not be trained for the purposes of this project.
for parameter in self.parameters():
parameter.requires_grad = False
def forward(self, x):
x = torch.nn.functional.interpolate(x,
size = (299, 299),
mode = 'bilinear',
align_corners = False)
# Move input from range [0, 1] to [-1, 1]
x = 2 * x - 1
# Run model through the network (last layer removed)
x = self.model(x)
return x
|
models.py
|
import torch.nn as nn
import torch.nn.utils.spectral_norm as SN
import torchvision
import torch
def conv_block(in_channels, out_channels, kernel_size, stride, padding=1, bias=True, activation=nn.ReLU(), transpose=False, no_BN=False, all_tanh=False, spec_norm=False):
if(transpose):
block = [nn.ConvTranspose2d(in_channels=in_channels, out_channels=out_channels,
stride=stride, kernel_size=kernel_size, padding=padding, bias=bias) ]
else:
block = [nn.Conv2d(in_channels=in_channels, out_channels=out_channels,
stride=stride, kernel_size=kernel_size, padding=padding, bias=bias) ]
if(spec_norm):
block[0] = SN(block[0])
elif(not no_BN):
block.append(nn.BatchNorm2d(num_features=out_channels))
if(all_tanh):
block.append(nn.Tanh())
elif(activation != None):
block.append(activation)
return block
class StandardCNN_Generator(nn.Module):
def __init__(self, no_BN=False, all_tanh=False):
super(StandardCNN_Generator, self).__init__()
self.linear = nn.Sequential(nn.Flatten(), nn.Linear(128, 512*4*4))
self.model = nn.Sequential( *conv_block(in_channels=512, out_channels=256, stride=2, kernel_size=4,
transpose=True, no_BN=no_BN, all_tanh=all_tanh),
*conv_block(in_channels=256, out_channels=128, stride=2, kernel_size=4,
transpose=True, no_BN=no_BN, all_tanh=all_tanh),
*conv_block(in_channels=128, out_channels=64, stride=2, kernel_size=4,
transpose=True, no_BN=no_BN, all_tanh=all_tanh),
*conv_block(in_channels=64, out_channels=3, stride=1, kernel_size=3,
transpose=True, no_BN=True, all_tanh=True) )
def forward(self, z):
linear = self.linear(z)
reshaped = linear.view(-1,512,4,4)
return self.model(reshaped)
class StandardCNN_Discriminator(nn.Module):
def __init__(self, no_BN=False, all_tanh=False, spec_norm=True):
super(StandardCNN_Discriminator, self).__init__()
self.model = nn.Sequential( *conv_block(in_channels=3, out_channels=64, stride=1, no_BN=no_BN,
kernel_size=3, spec_norm=spec_norm, all_tanh=all_tanh,
activation= nn.LeakyReLU(negative_slope=1e-1)),
*conv_block(in_channels=64, out_channels=64, stride=2, no_BN=no_BN,
kernel_size=4, spec_norm=spec_norm, all_tanh=all_tanh,
activation= nn.LeakyReLU(negative_slope=1e-1)),
*conv_block(in_channels=64, out_channels=128, stride=1, no_BN=no_BN,
kernel_size=3, spec_norm=spec_norm, all_tanh=all_tanh,
activation= nn.LeakyReLU(negative_slope=1e-1)),
*conv_block(in_channels=128, out_channels=128, stride=2, no_BN=no_BN,
kernel_size=4, spec_norm=spec_norm, all_tanh=all_tanh,
activation= nn.LeakyReLU(negative_slope=1e-1)),
*conv_block(in_channels=128, out_channels=256, stride=1, no_BN=no_BN,
kernel_size=3, spec_norm=spec_norm, all_tanh=all_tanh,
activation= nn.LeakyReLU(negative_slope=1e-1)),
*conv_block(in_channels=256, out_channels=256, stride=2, no_BN=no_BN,
kernel_size=4, spec_norm=spec_norm, all_tanh=all_tanh,
activation= nn.LeakyReLU(negative_slope=1e-1)),
*conv_block(in_channels=256, out_channels=512, stride=1, no_BN=no_BN,
kernel_size=3, spec_norm=spec_norm, all_tanh=all_tanh,
activation=nn.LeakyReLU(negative_slope=1e-1)),
nn.Flatten(),
( SN(nn.Linear(512*4*4, 1)) if spec_norm
else nn.Linear(512*4*4, 1)) )
def forward(self, x):
return self.model(x)
class DCGAN_64_Generator(nn.Module):
def __init__(self, no_BN=False, all_tanh=False):
super(DCGAN_64_Generator, self).__init__()
self.model = nn.Sequential( *conv_block(in_channels=128, out_channels=512, stride=1, bias=False, padding=0,
transpose=True, kernel_size=4, no_BN=no_BN, all_tanh=all_tanh),
*conv_block(in_channels=512, out_channels=256, stride=2, bias=False,
transpose=True, kernel_size=4, no_BN=no_BN, all_tanh=all_tanh),
*conv_block(in_channels=256, out_channels=128, stride=2, bias=False,
transpose=True, kernel_size=4, no_BN=no_BN, all_tanh=all_tanh),
*conv_block(in_channels=128, out_channels=64, stride=2, bias=False,
transpose=True, kernel_size=4, no_BN=no_BN, all_tanh=all_tanh),
*conv_block(in_channels=64, out_channels=3, stride=2, bias=False,
transpose=True, kernel_size=4, no_BN=True, all_tanh=True) )
def forward(self, z):
return self.model(z)
class DCGAN_64_Discriminator(nn.Module):
def __init__(self, no_BN=False, all_tanh=False, spec_norm=False):
super(DCGAN_64_Discriminator, self).__init__()
self.model = nn.Sequential( *conv_block(in_channels=3, out_channels=64, stride=2, no_BN=True,
kernel_size=4, bias=False, all_tanh=all_tanh, spec_norm=spec_norm,
activation= nn.LeakyReLU(negative_slope=2e-1)),
*conv_block(in_channels=64, out_channels=128, stride=2, no_BN=no_BN,
kernel_size=4, bias=False, all_tanh=all_tanh, spec_norm=spec_norm,
activation= nn.LeakyReLU(negative_slope=2e-1)),
*conv_block(in_channels=128, out_channels=256, stride=2, no_BN=no_BN,
kernel_size=4, bias=False, all_tanh=all_tanh, spec_norm=spec_norm,
activation= nn.LeakyReLU(negative_slope=2e-1)),
*conv_block(in_channels=256, out_channels=512, stride=2, no_BN=no_BN,
kernel_size=4, bias=False, all_tanh=all_tanh, spec_norm=spec_norm,
activation= nn.LeakyReLU(negative_slope=2e-1)),
*conv_block(in_channels=512, out_channels=1, stride=2, no_BN=True,
kernel_size=4, bias=False, all_tanh=False, spec_norm=spec_norm,
activation=None) )
def forward(self, x):
return self.model(x)
class InceptionV3(nn.Module):
def __init__(self, verbose = False):
super(InceptionV3, self).__init__()
if verbose:
print("Loading the pretrained InceptionV3 model...")
inception = torchvision.models.inception_v3(pretrained = True)
if verbose:
print("Model succesfully loaded!")
# Removed the last average pooling layer, so this network outputs the input image features instead of some scalar.
self.layers = [
inception.Conv2d_1a_3x3,
inception.Conv2d_2a_3x3,
inception.Conv2d_2b_3x3,
nn.MaxPool2d(kernel_size = 3, stride = 2),
inception.Conv2d_3b_1x1,
inception.Conv2d_4a_3x3,
nn.MaxPool2d(kernel_size = 3, stride = 2),
inception.Mixed_5b,
inception.Mixed_5c,
inception.Mixed_5d,
inception.Mixed_6a,
inception.Mixed_6b,
inception.Mixed_6c,
inception.Mixed_6d,
inception.Mixed_6e,
inception.Mixed_7a,
inception.Mixed_7b,
inception.Mixed_7c
]
self.model = nn.Sequential(*self.layers)
# This model will not be trained for the purposes of this project.
for parameter in self.parameters():
parameter.requires_grad = False
def forward(self, x):
x = torch.nn.functional.interpolate(x,
size = (299, 299),
mode = 'bilinear',
align_corners = False)
# Move input from range [0, 1] to [-1, 1]
x = 2 * x - 1
# Run model through the network (last layer removed)
x = self.model(x)
return x
| 0.922731 | 0.345906 |
from mako import runtime, filters, cache
UNDEFINED = runtime.UNDEFINED
__M_dict_builtin = dict
__M_locals_builtin = locals
_magic_number = 9
_modified_time = 1396763868.373039
_enable_loop = True
_template_filename = 'C:\\Users\\<NAME>\\Desktop\\MyStuff\\account\\scripts/user.jsm'
_template_uri = 'user.jsm'
_source_encoding = 'ascii'
import os, os.path, re
_exports = []
def render_body(context,**pageargs):
__M_caller = context.caller_stack._push_frame()
try:
__M_locals = __M_dict_builtin(pageargs=pageargs)
user = context.get('user', UNDEFINED)
__M_writer = context.writer()
# SOURCE LINE 1
__M_writer("\n\n//Ajax call to create a modal\n$(function() {\n\n\t$('#password_button').off('click.password').on('click.password', function(){\n\n\t\t$('#password_button').loadmodal({\n\t\t\turl: '/account/user__password/")
# SOURCE LINE 9
__M_writer(str(user.id))
__M_writer("',\n\t\t\tid: 'password_modal',\n\t\t\ttitle: '<h2>Edit Password</h2>',\n\t\t\twidth: '600px',\n\t\t\tajax: {\n\t\t\t\tdataType: 'html',\n\t\t\t\tmethod: 'POST',\n\t\t\t\tsuccess: function(data, status, xhr) {\n\t\t\t\t\tconsole.log($('#password_modal'));\n\t\t\t\t},//\n\t\t\t// any other options from the regular $.ajax call (see JQuery docs)\n\t\t\t\n\t\t\t},\n\t\t});\n\t});\n});\n\n//Ajax call to create a modal\n$(function() {\n\n\t$('#edit_button').off('click.edit').on('click.edit', function(){\n\n\t\t$('#edit_button').loadmodal({\n\t\t\turl: '/account/user__edit/")
# SOURCE LINE 32
__M_writer(str(user.id))
__M_writer("',\n\t\t\tid: 'edit_modal',\n\t\t\ttitle: '<h2>Edit Account Info</h2>',\n\t\t\twidth: '600px',\n\t\t\tajax: {\n\t\t\t\tdataType: 'html',\n\t\t\t\tmethod: 'POST',\n\t\t\t\tsuccess: function(data, status, xhr) {\n\t\t\t\t\tconsole.log($('#edit_modal'));\n\t\t\t\t},//\n\t\t\t// any other options from the regular $.ajax call (see JQuery docs)\n\t\t\t\n\t\t\t},\n\t\t});\n\t});\n});")
return ''
finally:
context.caller_stack._pop_frame()
|
account/cached_templates/scripts/user.jsm.py
|
from mako import runtime, filters, cache
UNDEFINED = runtime.UNDEFINED
__M_dict_builtin = dict
__M_locals_builtin = locals
_magic_number = 9
_modified_time = 1396763868.373039
_enable_loop = True
_template_filename = 'C:\\Users\\<NAME>\\Desktop\\MyStuff\\account\\scripts/user.jsm'
_template_uri = 'user.jsm'
_source_encoding = 'ascii'
import os, os.path, re
_exports = []
def render_body(context,**pageargs):
__M_caller = context.caller_stack._push_frame()
try:
__M_locals = __M_dict_builtin(pageargs=pageargs)
user = context.get('user', UNDEFINED)
__M_writer = context.writer()
# SOURCE LINE 1
__M_writer("\n\n//Ajax call to create a modal\n$(function() {\n\n\t$('#password_button').off('click.password').on('click.password', function(){\n\n\t\t$('#password_button').loadmodal({\n\t\t\turl: '/account/user__password/")
# SOURCE LINE 9
__M_writer(str(user.id))
__M_writer("',\n\t\t\tid: 'password_modal',\n\t\t\ttitle: '<h2>Edit Password</h2>',\n\t\t\twidth: '600px',\n\t\t\tajax: {\n\t\t\t\tdataType: 'html',\n\t\t\t\tmethod: 'POST',\n\t\t\t\tsuccess: function(data, status, xhr) {\n\t\t\t\t\tconsole.log($('#password_modal'));\n\t\t\t\t},//\n\t\t\t// any other options from the regular $.ajax call (see JQuery docs)\n\t\t\t\n\t\t\t},\n\t\t});\n\t});\n});\n\n//Ajax call to create a modal\n$(function() {\n\n\t$('#edit_button').off('click.edit').on('click.edit', function(){\n\n\t\t$('#edit_button').loadmodal({\n\t\t\turl: '/account/user__edit/")
# SOURCE LINE 32
__M_writer(str(user.id))
__M_writer("',\n\t\t\tid: 'edit_modal',\n\t\t\ttitle: '<h2>Edit Account Info</h2>',\n\t\t\twidth: '600px',\n\t\t\tajax: {\n\t\t\t\tdataType: 'html',\n\t\t\t\tmethod: 'POST',\n\t\t\t\tsuccess: function(data, status, xhr) {\n\t\t\t\t\tconsole.log($('#edit_modal'));\n\t\t\t\t},//\n\t\t\t// any other options from the regular $.ajax call (see JQuery docs)\n\t\t\t\n\t\t\t},\n\t\t});\n\t});\n});")
return ''
finally:
context.caller_stack._pop_frame()
| 0.218253 | 0.121009 |
class Item:
def __init__(self, itemID, modid='minecraft'):
super().__init__()
self.modid = modid
self.id = itemID
def __str__(self) -> str:
return f"{self.modid}:{self.id}"
WHITE_CANDLE = Item('white_candle')
ORANGE_CANDLE = Item('orange_candle')
MAGENTA_CANDLE = Item('magenta_candle')
LIGHT_BLUE_CANDLE = Item('lisgt_blue_cnadle')
YELLOW_CANDLE = Item('yellow_cnadle')
LIME_CANDLE = Item('lime_cnadle')
PINK_CANDLE = Item('pink_candle')
GRAY_CANDLE = Item('gray_cnadle')
LIGHT_GRAY_CANDLE = Item('light_gray_candle')
CYAN_CANDLE = Item('cyan_candle')
PURPLE_CANDLE = Item('purple_candle')
BLUE_CANDLE = Item('blue_candle')
BROWN_CANDLE = Item('brown_candle')
GREEN_CANDLE = Item('green_candle')
RED_CANDLE = Item('red_candle')
BLACK_CANDLE = Item('black_candle')
CANDLE = Item('candle')
DEEPSLATE_COAL_ORE = Item('deepslate_coal_ore')
COPPER_ORE = Item('copper_ore')
DEEPSLATE_COPPER_ORE = Item('deepslate_copper_ore')
DEEPSLATE_DIAMOND_ORE = Item('DEEPSLATE_DIAMOND_ORE'.lower())
DEEPSLATE_EMERALD_ORE = Item('DEEPSLATE_EMERALD_ORE'.lower())
FLOWERING_AZALEA_LEAVES = Item('FLOWERING_AZALEA_LEAVES'.lower())
FLOWERING_AZALEA = Item('FLOWERING_AZALEA'.lower())
GLOW_BERRIES = Item('GLOW_BERRIES'.lower())
DEEPSLATE_GOLD_ORE = Item('DEEPSLATE_GOLD_ORE'.lower())
DEEPSLATE_IRON_ORE = Item('DEEPSLATE_IRON_ORE'.lower())
DEEPSLATE_LAPIS_ORE = Item('DEEPSLATE_LAPIS_ORE'.lower())
DEEPSLATE_REDSTONE_ORE = Item('DEEPSLATE_REDSTONE_ORE'.lower())
COBBLED_DEEPSLATE = Item('COBBLED_DEEPSLATE'.lower())
COBBLED_DEEPSLATE_WALL = Item('COBBLED_DEEPSLATE_WALL'.lower())
POLISHED_DEEPSLATE_WALL = Item('POLISHED_DEEPSLATE_WALL'.lower())
POLISHED_DEEPSLATE_STAIRS = Item('polished_deepslate_stairs')
DEEPSLATE_TILE_STAIRS = Item('deepslate_tile_stairs')
DEEPSLATE_BRICK_STAIRS = Item('deepslate_brick_stairs')
OXIDIZED_CUT_COPPER_STAIRS = Item('oxidized_cut_copper_stairs')
WEATHERED_CUT_COPPER_STAIRS = Item('weathered_cut_copper_stairs')
EXPOSED_CUT_COPPER_STAIRS = Item('exposed_cut_copper_stairs')
CUT_COPPER_STAIRS = Item('cut_copper_stairs')
WAXED_WEATHERED_CUT_COPPER_STAIRS = Item('waxed_weathered_cut_copper_stairs')
WAXED_EXPOSED_CUT_COPPER_STAIRS = Item('waxed_exposed_cut_copper_stairs')
WAXED_CUT_COPPER_STAIRS = Item('waxed_cut_copper_stairs')
WAXED_OXIDIZED_CUT_COPPER_STAIRS = Item('waxed_oxidized_cut_copper_stairs')
COBBLED_DEEPSLATE_SLAB = Item('cobbled_deepslate_slab')
POLISHED_DEEPSLATE_SLAB = Item('polished_deepslate_slab')
DEEPSLATE_TILE_SLAB = Item('deepslate_tile_slab')
DEEPSLATE_BRICK_SLAB = Item('deepslate_brick_slab')
WAXED_WEATHERED_CUT_COPPER_SLAB = Item('waxed_weathered_cut_copper_slab')
WAXED_EXPOSED_CUT_COPPER_SLAB = Item('waxed_exposed_cut_copper_slab')
WAXED_CUT_COPPER_SLAB = Item('waxed_cut_copper_slab')
OXIDIZED_CUT_COPPER_SLAB = Item('oxidized_cut_copper_slab')
WEATHERED_CUT_COPPER_SLAB = Item('weathered_cut_copper_slab')
EXPOSED_CUT_COPPER_SLAB = Item('exposed_cut_copper_slab')
CUT_COPPER_SLAB = Item('cut_copper_slab')
WAXED_OXIDIZED_CUT_COPPER_SLAB = Item('waxed_oxidized_cut_copper_slab')
COBBLED_DEEPSLATE_STAIRS = Item('COBBLED_DEEPSLATE_STAIRS'.lower())
DEEPSLATE_TILE_WALL = Item('DEEPSLATE_TILE_WALL'.lower())
DEEPSLATE_BRICK_WALL = Item('DEEPSLATE_BRICK_WALL'.lower())
CUT_SANDSTONE_SLAB = Item('CUT_SANDSTONE_SLAB'.lower())
AZALEA_LEAVES = Item('AZALEA_LEAVES'.lower())
RAW_GOLD = Item('RAW_GOLD'.lower())
RAW_GOLD_BLOCK = Item('RAW_GOLD_BLOCK'.lower())
AZALEA = Item('AZALEA'.lower())
AIR = Item('air')
STONE = Item('stone')
GRANITE = Item('granite')
POLISHED_GRANITE = Item('polished_granite')
DIORITE = Item('diorite')
POLISHED_DIORITE = Item('polished_diorite')
ANDESITE = Item('andesite')
POLISHED_ANDESITE = Item('polished_andesite')
GRASS_BLOCK = Item('grass_block')
DIRT = Item('dirt')
COARSE_DIRT = Item('coarse_dirt')
PODZOL = Item('podzol')
CRIMSON_NYLIUM = Item('crimson_nylium')
WARPED_NYLIUM = Item('warped_nylium')
COBBLESTONE = Item('cobblestone')
OAK_PLANKS = Item('oak_planks')
SPRUCE_PLANKS = Item('spruce_planks')
BIRCH_PLANKS = Item('birch_planks')
JUNGLE_PLANKS = Item('jungle_planks')
ACACIA_PLANKS = Item('acacia_planks')
DARK_OAK_PLANKS = Item('dark_oak_planks')
CRIMSON_PLANKS = Item('crimson_planks')
WARPED_PLANKS = Item('warped_planks')
OAK_SAPLING = Item('oak_sapling')
SPRUCE_SAPLING = Item('spruce_sapling')
BIRCH_SAPLING = Item('birch_sapling')
JUNGLE_SAPLING = Item('jungle_sapling')
ACACIA_SAPLING = Item('acacia_sapling')
DARK_OAK_SAPLING = Item('dark_oak_sapling')
BEDROCK = Item('bedrock')
SAND = Item('sand')
RED_SAND = Item('red_sand')
GRAVEL = Item('gravel')
GOLD_ORE = Item('gold_ore')
IRON_ORE = Item('iron_ore')
COAL_ORE = Item('coal_ore')
NETHER_GOLD_ORE = Item('nether_gold_ore')
OAK_LOG = Item('oak_log')
SPRUCE_LOG = Item('spruce_log')
BIRCH_LOG = Item('birch_log')
JUNGLE_LOG = Item('jungle_log')
ACACIA_LOG = Item('acacia_log')
DARK_OAK_LOG = Item('dark_oak_log')
CRIMSON_STEM = Item('crimson_stem')
WARPED_STEM = Item('warped_stem')
STRIPPED_OAK_LOG = Item('stripped_oak_log')
STRIPPED_SPRUCE_LOG = Item('stripped_spruce_log')
STRIPPED_BIRCH_LOG = Item('stripped_birch_log')
STRIPPED_JUNGLE_LOG = Item('stripped_jungle_log')
STRIPPED_ACACIA_LOG = Item('stripped_acacia_log')
STRIPPED_DARK_OAK_LOG = Item('stripped_dark_oak_log')
STRIPPED_CRIMSON_STEM = Item('stripped_crimson_stem')
STRIPPED_WARPED_STEM = Item('stripped_warped_stem')
STRIPPED_OAK_WOOD = Item('stripped_oak_wood')
STRIPPED_SPRUCE_WOOD = Item('stripped_spruce_wood')
STRIPPED_BIRCH_WOOD = Item('stripped_birch_wood')
STRIPPED_JUNGLE_WOOD = Item('stripped_jungle_wood')
STRIPPED_ACACIA_WOOD = Item('stripped_acacia_wood')
STRIPPED_DARK_OAK_WOOD = Item('stripped_dark_oak_wood')
STRIPPED_CRIMSON_HYPHAE = Item('stripped_crimson_hyphae')
STRIPPED_WARPED_HYPHAE = Item('stripped_warped_hyphae')
OAK_WOOD = Item('oak_wood')
SPRUCE_WOOD = Item('spruce_wood')
BIRCH_WOOD = Item('birch_wood')
JUNGLE_WOOD = Item('jungle_wood')
ACACIA_WOOD = Item('acacia_wood')
DARK_OAK_WOOD = Item('dark_oak_wood')
CRIMSON_HYPHAE = Item('crimson_hyphae')
WARPED_HYPHAE = Item('warped_hyphae')
OAK_LEAVES = Item('oak_leaves')
SPRUCE_LEAVES = Item('spruce_leaves')
BIRCH_LEAVES = Item('birch_leaves')
JUNGLE_LEAVES = Item('jungle_leaves')
ACACIA_LEAVES = Item('acacia_leaves')
DARK_OAK_LEAVES = Item('dark_oak_leaves')
SPONGE = Item('sponge')
WET_SPONGE = Item('wet_sponge')
GLASS = Item('glass')
LAPIS_ORE = Item('lapis_ore')
LAPIS_BLOCK = Item('lapis_block')
DISPENSER = Item('dispenser')
SANDSTONE = Item('sandstone')
CHISELED_SANDSTONE = Item('chiseled_sandstone')
CUT_SANDSTONE = Item('cut_sandstone')
NOTE_BLOCK = Item('note_block')
POWERED_RAIL = Item('powered_rail')
DETECTOR_RAIL = Item('detector_rail')
STICKY_PISTON = Item('sticky_piston')
COBWEB = Item('cobweb')
GRASS = Item('grass')
FERN = Item('fern')
DEAD_BUSH = Item('dead_bush')
SEAGRASS = Item('seagrass')
SEA_PICKLE = Item('sea_pickle')
PISTON = Item('piston')
WHITE_WOOL = Item('white_wool')
ORANGE_WOOL = Item('orange_wool')
MAGENTA_WOOL = Item('magenta_wool')
LIGHT_BLUE_WOOL = Item('light_blue_wool')
YELLOW_WOOL = Item('yellow_wool')
LIME_WOOL = Item('lime_wool')
PINK_WOOL = Item('pink_wool')
GRAY_WOOL = Item('gray_wool')
LIGHT_GRAY_WOOL = Item('light_gray_wool')
CYAN_WOOL = Item('cyan_wool')
PURPLE_WOOL = Item('purple_wool')
BLUE_WOOL = Item('blue_wool')
BROWN_WOOL = Item('brown_wool')
GREEN_WOOL = Item('green_wool')
RED_WOOL = Item('red_wool')
BLACK_WOOL = Item('black_wool')
DANDELION = Item('dandelion')
POPPY = Item('poppy')
BLUE_ORCHID = Item('blue_orchid')
ALLIUM = Item('allium')
AZURE_BLUET = Item('azure_bluet')
RED_TULIP = Item('red_tulip')
ORANGE_TULIP = Item('orange_tulip')
WHITE_TULIP = Item('white_tulip')
PINK_TULIP = Item('pink_tulip')
OXEYE_DAISY = Item('oxeye_daisy')
CORNFLOWER = Item('cornflower')
LILY_OF_THE_VALLEY = Item('lily_of_the_valley')
WITHER_ROSE = Item('wither_rose')
BROWN_MUSHROOM = Item('brown_mushroom')
RED_MUSHROOM = Item('red_mushroom')
CRIMSON_FUNGUS = Item('crimson_fungus')
WARPED_FUNGUS = Item('warped_fungus')
CRIMSON_ROOTS = Item('crimson_roots')
WARPED_ROOTS = Item('warped_roots')
NETHER_SPROUTS = Item('nether_sprouts')
WEEPING_VINES = Item('weeping_vines')
TWISTING_VINES = Item('twisting_vines')
SUGAR_CANE = Item('sugar_cane')
KELP = Item('kelp')
BAMBOO = Item('bamboo')
GOLD_BLOCK = Item('gold_block')
IRON_BLOCK = Item('iron_block')
OAK_SLAB = Item('oak_slab')
SPRUCE_SLAB = Item('spruce_slab')
BIRCH_SLAB = Item('birch_slab')
JUNGLE_SLAB = Item('jungle_slab')
ACACIA_SLAB = Item('acacia_slab')
DARK_OAK_SLAB = Item('dark_oak_slab')
CRIMSON_SLAB = Item('crimson_slab')
WARPED_SLAB = Item('warped_slab')
STONE_SLAB = Item('stone_slab')
SMOOTH_STONE_SLAB = Item('smooth_stone_slab')
SANDSTONE_SLAB = Item('sandstone_slab')
CUT_STANDSTONE_SLAB = Item('cut_standstone_slab')
PETRIFIED_OAK_SLAB = Item('petrified_oak_slab')
COBBLESTONE_SLAB = Item('cobblestone_slab')
BRICK_SLAB = Item('brick_slab')
STONE_BRICK_SLAB = Item('stone_brick_slab')
NETHER_BRICK_SLAB = Item('nether_brick_slab')
QUARTZ_SLAB = Item('quartz_slab')
RED_SANDSTONE_SLAB = Item('red_sandstone_slab')
CUT_RED_SANDSTONE_SLAB = Item('cut_red_sandstone_slab')
PURPUR_SLAB = Item('purpur_slab')
PRISMARINE_SLAB = Item('prismarine_slab')
PRISMARINE_BRICK_SLAB = Item('prismarine_brick_slab')
DARK_PRISMARINE_SLAB = Item('dark_prismarine_slab')
SMOOTH_QUARTZ = Item('smooth_quartz')
SMOOTH_RED_SANDSTONE = Item('smooth_red_sandstone')
SMOOTH_SANDSTONE = Item('smooth_sandstone')
SMOOTH_STONE = Item('smooth_stone')
BRICKS = Item('bricks')
TNT = Item('tnt')
BOOKSHELF = Item('bookshelf')
MOSSY_COBBLESTONE = Item('mossy_cobblestone')
OBSIDIAN = Item('obsidian')
TORCH = Item('torch')
END_ROD = Item('end_rod')
CHORUS_PLANT = Item('chorus_plant')
CHORUS_FLOWER = Item('chorus_flower')
PURPUR_BLOCK = Item('purpur_block')
PURPUR_PILLAR = Item('purpur_pillar')
PURPUR_STAIRS = Item('purpur_stairs')
SPAWNER = Item('spawner')
OAK_STAIRS = Item('oak_stairs')
CHEST = Item('chest')
DIAMOND_ORE = Item('diamond_ore')
DIAMOND_BLOCK = Item('diamond_block')
CRAFTING_TABLE = Item('crafting_table')
FARMLAND = Item('farmland')
FURNACE = Item('furnace')
LADDER = Item('ladder')
RAIL = Item('rail')
COBBLESTONE_STAIRS = Item('cobblestone_stairs')
LEVER = Item('lever')
STONE_PRESSURE_PLATE = Item('stone_pressure_plate')
OAK_PRESSURE_PLATE = Item('oak_pressure_plate')
SPRUCE_PRESSURE_PLATE = Item('spruce_pressure_plate')
BIRCH_PRESSURE_PLATE = Item('birch_pressure_plate')
JUNGLE_PRESSURE_PLATE = Item('jungle_pressure_plate')
ACACIA_PRESSURE_PLATE = Item('acacia_pressure_plate')
DARK_OAK_PRESSURE_PLATE = Item('dark_oak_pressure_plate')
CRIMSON_PRESSURE_PLATE = Item('crimson_pressure_plate')
WARPED_PRESSURE_PLATE = Item('warped_pressure_plate')
POLISHED_BLACKSTONE_PRESSURE_PLATE = Item('polished_blackstone_pressure_plate')
REDSTONE_ORE = Item('redstone_ore')
REDSTONE_TORCH = Item('redstone_torch')
SNOW = Item('snow')
ICE = Item('ice')
SNOW_BLOCK = Item('snow_block')
CACTUS = Item('cactus')
CLAY = Item('clay')
JUKEBOX = Item('jukebox')
OAK_FENCE = Item('oak_fence')
SPRUCE_FENCE = Item('spruce_fence')
BIRCH_FENCE = Item('birch_fence')
JUNGLE_FENCE = Item('jungle_fence')
ACACIA_FENCE = Item('acacia_fence')
DARK_OAK_FENCE = Item('dark_oak_fence')
CRIMSON_FENCE = Item('crimson_fence')
WARPED_FENCE = Item('warped_fence')
PUMPKIN = Item('pumpkin')
CARVED_PUMPKIN = Item('carved_pumpkin')
NETHERRACK = Item('netherrack')
SOUL_SAND = Item('soul_sand')
SOUL_SOIL = Item('soul_soil')
BASALT = Item('basalt')
POLISHED_BASALT = Item('polished_basalt')
SOUL_TORCH = Item('soul_torch')
GLOWSTONE = Item('glowstone')
JACK_O_LANTERN = Item('jack_o_lantern')
OAK_TRAPDOOR = Item('oak_trapdoor')
SPRUCE_TRAPDOOR = Item('spruce_trapdoor')
BIRCH_TRAPDOOR = Item('birch_trapdoor')
JUNGLE_TRAPDOOR = Item('jungle_trapdoor')
ACACIA_TRAPDOOR = Item('acacia_trapdoor')
DARK_OAK_TRAPDOOR = Item('dark_oak_trapdoor')
CRIMSON_TRAPDOOR = Item('crimson_trapdoor')
WARPED_TRAPDOOR = Item('warped_trapdoor')
INFESTED_STONE = Item('infested_stone')
INFESTED_COBBLESTONE = Item('infested_cobblestone')
INFESTED_STONE_BRICKS = Item('infested_stone_bricks')
INFESTED_MOSSY_STONE_BRICKS = Item('infested_mossy_stone_bricks')
INFESTED_CRACKED_STONE_BRICKS = Item('infested_cracked_stone_bricks')
INFESTED_CHISELED_STONE_BRICKS = Item('infested_chiseled_stone_bricks')
STONE_BRICKS = Item('stone_bricks')
MOSSY_STONE_BRICKS = Item('mossy_stone_bricks')
CRACKED_STONE_BRICKS = Item('cracked_stone_bricks')
CHISELED_STONE_BRICKS = Item('chiseled_stone_bricks')
BROWN_MUSHROOM_BLOCK = Item('brown_mushroom_block')
RED_MUSHROOM_BLOCK = Item('red_mushroom_block')
MUSHROOM_STEM = Item('mushroom_stem')
IRON_BARS = Item('iron_bars')
CHAIN = Item('chain')
GLASS_PANE = Item('glass_pane')
MELON = Item('melon')
VINE = Item('vine')
OAK_FENCE_GATE = Item('oak_fence_gate')
SPRUCE_FENCE_GATE = Item('spruce_fence_gate')
BIRCH_FENCE_GATE = Item('birch_fence_gate')
JUNGLE_FENCE_GATE = Item('jungle_fence_gate')
ACACIA_FENCE_GATE = Item('acacia_fence_gate')
DARK_OAK_FENCE_GATE = Item('dark_oak_fence_gate')
CRIMSON_FENCE_GATE = Item('crimson_fence_gate')
WARPED_FENCE_GATE = Item('warped_fence_gate')
BRICK_STAIRS = Item('brick_stairs')
STONE_BRICK_STAIRS = Item('stone_brick_stairs')
MYCELIUM = Item('mycelium')
LILY_PAD = Item('lily_pad')
NETHER_BRICKS = Item('nether_bricks')
CRACKED_NETHER_BRICKS = Item('cracked_nether_bricks')
CHISELED_NETHER_BRICKS = Item('chiseled_nether_bricks')
NETHER_BRICK_FENCE = Item('nether_brick_fence')
NETHER_BRICK_STAIRS = Item('nether_brick_stairs')
ENCHANTING_TABLE = Item('enchanting_table')
END_PORTAL_FRAME = Item('end_portal_frame')
END_STONE = Item('end_stone')
END_STONE_BRICKS = Item('end_stone_bricks')
DRAGON_EGG = Item('dragon_egg')
REDSTONE_LAMP = Item('redstone_lamp')
SANDSTONE_STAIRS = Item('sandstone_stairs')
EMERALD_ORE = Item('emerald_ore')
ENDER_CHEST = Item('ender_chest')
TRIPWIRE_HOOK = Item('tripwire_hook')
EMERALD_BLOCK = Item('emerald_block')
SPRUCE_STAIRS = Item('spruce_stairs')
BIRCH_STAIRS = Item('birch_stairs')
JUNGLE_STAIRS = Item('jungle_stairs')
CRIMSON_STAIRS = Item('crimson_stairs')
WARPED_STAIRS = Item('warped_stairs')
COMMAND_BLOCK = Item('command_block')
BEACON = Item('beacon')
COBBLESTONE_WALL = Item('cobblestone_wall')
MOSSY_COBBLESTONE_WALL = Item('mossy_cobblestone_wall')
BRICK_WALL = Item('brick_wall')
PRISMARINE_WALL = Item('prismarine_wall')
RED_SANDSTONE_WALL = Item('red_sandstone_wall')
MOSSY_STONE_BRICK_WALL = Item('mossy_stone_brick_wall')
GRANITE_WALL = Item('granite_wall')
STONE_BRICK_WALL = Item('stone_brick_wall')
NETHER_BRICK_WALL = Item('nether_brick_wall')
ANDESITE_WALL = Item('andesite_wall')
RED_NETHER_BRICK_WALL = Item('red_nether_brick_wall')
SANDSTONE_WALL = Item('sandstone_wall')
END_STONE_BRICK_WALL = Item('end_stone_brick_wall')
DIORITE_WALL = Item('diorite_wall')
BLACKSTONE_WALL = Item('blackstone_wall')
POLISHED_BLACKSTONE_WALL = Item('polished_blackstone_wall')
POLISHED_BLACKSTONE_BRICK_WALL = Item('polished_blackstone_brick_wall')
STONE_BUTTON = Item('stone_button')
OAK_BUTTON = Item('oak_button')
SPRUCE_BUTTON = Item('spruce_button')
BIRCH_BUTTON = Item('birch_button')
JUNGLE_BUTTON = Item('jungle_button')
ACACIA_BUTTON = Item('acacia_button')
DARK_OAK_BUTTON = Item('dark_oak_button')
CRIMSON_BUTTON = Item('crimson_button')
WARPED_BUTTON = Item('warped_button')
POLISHED_BLACKSTONE_BUTTON = Item('polished_blackstone_button')
ANVIL = Item('anvil')
CHIPPED_ANVIL = Item('chipped_anvil')
DAMAGED_ANVIL = Item('damaged_anvil')
TRAPPED_CHEST = Item('trapped_chest')
LIGHT_WEIGHTED_PRESSURE_PLATE = Item('light_weighted_pressure_plate')
HEAVY_WEIGHTED_PRESSURE_PLATE = Item('heavy_weighted_pressure_plate')
DAYLIGHT_DETECTOR = Item('daylight_detector')
REDSTONE_BLOCK = Item('redstone_block')
NETHER_QUARTZ_ORE = Item('nether_quartz_ore')
HOPPER = Item('hopper')
CHISELED_QUARTZ_BLOCK = Item('chiseled_quartz_block')
QUARTZ_BLOCK = Item('quartz_block')
QUARTZ_BRICKS = Item('quartz_bricks')
QUARTZ_PILLAR = Item('quartz_pillar')
QUARTZ_STAIRS = Item('quartz_stairs')
ACTIVATOR_RAIL = Item('activator_rail')
DROPPER = Item('dropper')
WHITE_TERRACOTTA = Item('white_terracotta')
ORANGE_TERRACOTTA = Item('orange_terracotta')
MAGENTA_TERRACOTTA = Item('magenta_terracotta')
LIGHT_BLUE_TERRACOTTA = Item('light_blue_terracotta')
YELLOW_TERRACOTTA = Item('yellow_terracotta')
LIME_TERRACOTTA = Item('lime_terracotta')
PINK_TERRACOTTA = Item('pink_terracotta')
GRAY_TERRACOTTA = Item('gray_terracotta')
LIGHT_GRAY_TERRACOTTA = Item('light_gray_terracotta')
CYAN_TERRACOTTA = Item('cyan_terracotta')
PURPLE_TERRACOTTA = Item('purple_terracotta')
BLUE_TERRACOTTA = Item('blue_terracotta')
BROWN_TERRACOTTA = Item('brown_terracotta')
GREEN_TERRACOTTA = Item('green_terracotta')
RED_TERRACOTTA = Item('red_terracotta')
BLACK_TERRACOTTA = Item('black_terracotta')
BARRIER = Item('barrier')
IRON_TRAPDOOR = Item('iron_trapdoor')
HAY_BLOCK = Item('hay_block')
WHITE_CARPET = Item('white_carpet')
ORANGE_CARPET = Item('orange_carpet')
MAGENTA_CARPET = Item('magenta_carpet')
LIGHT_BLUE_CARPET = Item('light_blue_carpet')
YELLOW_CARPET = Item('yellow_carpet')
LIME_CARPET = Item('lime_carpet')
PINK_CARPET = Item('pink_carpet')
GRAY_CARPET = Item('gray_carpet')
LIGHT_GRAY_CARPET = Item('light_gray_carpet')
CYAN_CARPET = Item('cyan_carpet')
PURPLE_CARPET = Item('purple_carpet')
BLUE_CARPET = Item('blue_carpet')
BROWN_CARPET = Item('brown_carpet')
GREEN_CARPET = Item('green_carpet')
RED_CARPET = Item('red_carpet')
BLACK_CARPET = Item('black_carpet')
TERRACOTTA = Item('terracotta')
COAL_BLOCK = Item('coal_block')
PACKED_ICE = Item('packed_ice')
ACACIA_STAIRS = Item('acacia_stairs')
DARK_OAK_STAIRS = Item('dark_oak_stairs')
SLIME_BLOCK = Item('slime_block')
GRASS_PATH = Item('grass_path')
SUNFLOWER = Item('sunflower')
LILAC = Item('lilac')
ROSE_BUSH = Item('rose_bush')
PEONY = Item('peony')
TALL_GRASS = Item('tall_grass')
LARGE_FERN = Item('large_fern')
WHITE_STAINED_GLASS = Item('white_stained_glass')
ORANGE_STAINED_GLASS = Item('orange_stained_glass')
MAGENTA_STAINED_GLASS = Item('magenta_stained_glass')
LIGHT_BLUE_STAINED_GLASS = Item('light_blue_stained_glass')
YELLOW_STAINED_GLASS = Item('yellow_stained_glass')
LIME_STAINED_GLASS = Item('lime_stained_glass')
PINK_STAINED_GLASS = Item('pink_stained_glass')
GRAY_STAINED_GLASS = Item('gray_stained_glass')
LIGHT_GRAY_STAINED_GLASS = Item('light_gray_stained_glass')
CYAN_STAINED_GLASS = Item('cyan_stained_glass')
PURPLE_STAINED_GLASS = Item('purple_stained_glass')
BLUE_STAINED_GLASS = Item('blue_stained_glass')
BROWN_STAINED_GLASS = Item('brown_stained_glass')
GREEN_STAINED_GLASS = Item('green_stained_glass')
RED_STAINED_GLASS = Item('red_stained_glass')
BLACK_STAINED_GLASS = Item('black_stained_glass')
WHITE_STAINED_GLASS_PANE = Item('white_stained_glass_pane')
ORANGE_STAINED_GLASS_PANE = Item('orange_stained_glass_pane')
MAGENTA_STAINED_GLASS_PANE = Item('magenta_stained_glass_pane')
LIGHT_BLUE_STAINED_GLASS_PANE = Item('light_blue_stained_glass_pane')
YELLOW_STAINED_GLASS_PANE = Item('yellow_stained_glass_pane')
LIME_STAINED_GLASS_PANE = Item('lime_stained_glass_pane')
PINK_STAINED_GLASS_PANE = Item('pink_stained_glass_pane')
GRAY_STAINED_GLASS_PANE = Item('gray_stained_glass_pane')
LIGHT_GRAY_STAINED_GLASS_PANE = Item('light_gray_stained_glass_pane')
CYAN_STAINED_GLASS_PANE = Item('cyan_stained_glass_pane')
PURPLE_STAINED_GLASS_PANE = Item('purple_stained_glass_pane')
BLUE_STAINED_GLASS_PANE = Item('blue_stained_glass_pane')
BROWN_STAINED_GLASS_PANE = Item('brown_stained_glass_pane')
GREEN_STAINED_GLASS_PANE = Item('green_stained_glass_pane')
RED_STAINED_GLASS_PANE = Item('red_stained_glass_pane')
BLACK_STAINED_GLASS_PANE = Item('black_stained_glass_pane')
PRISMARINE = Item('prismarine')
PRISMARINE_BRICKS = Item('prismarine_bricks')
DARK_PRISMARINE = Item('dark_prismarine')
PRISMARINE_STAIRS = Item('prismarine_stairs')
PRISMARINE_BRICK_STAIRS = Item('prismarine_brick_stairs')
DARK_PRISMARINE_STAIRS = Item('dark_prismarine_stairs')
SEA_LANTERN = Item('sea_lantern')
RED_SANDSTONE = Item('red_sandstone')
CHISELED_RED_SANDSTONE = Item('chiseled_red_sandstone')
CUT_RED_SANDSTONE = Item('cut_red_sandstone')
RED_SANDSTONE_STAIRS = Item('red_sandstone_stairs')
REPEATING_COMMAND_BLOCK = Item('repeating_command_block')
CHAIN_COMMAND_BLOCK = Item('chain_command_block')
MAGMA_BLOCK = Item('magma_block')
NETHER_WART_BLOCK = Item('nether_wart_block')
WARPED_WART_BLOCK = Item('warped_wart_block')
RED_NETHER_BRICKS = Item('red_nether_bricks')
BONE_BLOCK = Item('bone_block')
STRUCTURE_VOID = Item('structure_void')
OBSERVER = Item('observer')
SHULKER_BOX = Item('shulker_box')
WHITE_SHULKER_BOX = Item('white_shulker_box')
ORANGE_SHULKER_BOX = Item('orange_shulker_box')
MAGENTA_SHULKER_BOX = Item('magenta_shulker_box')
LIGHT_BLUE_SHULKER_BOX = Item('light_blue_shulker_box')
YELLOW_SHULKER_BOX = Item('yellow_shulker_box')
LIME_SHULKER_BOX = Item('lime_shulker_box')
PINK_SHULKER_BOX = Item('pink_shulker_box')
GRAY_SHULKER_BOX = Item('gray_shulker_box')
LIGHT_GRAY_SHULKER_BOX = Item('light_gray_shulker_box')
CYAN_SHULKER_BOX = Item('cyan_shulker_box')
PURPLE_SHULKER_BOX = Item('purple_shulker_box')
BLUE_SHULKER_BOX = Item('blue_shulker_box')
BROWN_SHULKER_BOX = Item('brown_shulker_box')
GREEN_SHULKER_BOX = Item('green_shulker_box')
RED_SHULKER_BOX = Item('red_shulker_box')
BLACK_SHULKER_BOX = Item('black_shulker_box')
WHITE_GLAZED_TERRACOTTA = Item('white_glazed_terracotta')
ORANGE_GLAZED_TERRACOTTA = Item('orange_glazed_terracotta')
MAGENTA_GLAZED_TERRACOTTA = Item('magenta_glazed_terracotta')
LIGHT_BLUE_GLAZED_TERRACOTTA = Item('light_blue_glazed_terracotta')
YELLOW_GLAZED_TERRACOTTA = Item('yellow_glazed_terracotta')
LIME_GLAZED_TERRACOTTA = Item('lime_glazed_terracotta')
PINK_GLAZED_TERRACOTTA = Item('pink_glazed_terracotta')
GRAY_GLAZED_TERRACOTTA = Item('gray_glazed_terracotta')
LIGHT_GRAY_GLAZED_TERRACOTTA = Item('light_gray_glazed_terracotta')
CYAN_GLAZED_TERRACOTTA = Item('cyan_glazed_terracotta')
PURPLE_GLAZED_TERRACOTTA = Item('purple_glazed_terracotta')
BLUE_GLAZED_TERRACOTTA = Item('blue_glazed_terracotta')
BROWN_GLAZED_TERRACOTTA = Item('brown_glazed_terracotta')
GREEN_GLAZED_TERRACOTTA = Item('green_glazed_terracotta')
RED_GLAZED_TERRACOTTA = Item('red_glazed_terracotta')
BLACK_GLAZED_TERRACOTTA = Item('black_glazed_terracotta')
WHITE_CONCRETE = Item('white_concrete')
ORANGE_CONCRETE = Item('orange_concrete')
MAGENTA_CONCRETE = Item('magenta_concrete')
LIGHT_BLUE_CONCRETE = Item('light_blue_concrete')
YELLOW_CONCRETE = Item('yellow_concrete')
LIME_CONCRETE = Item('lime_concrete')
PINK_CONCRETE = Item('pink_concrete')
GRAY_CONCRETE = Item('gray_concrete')
LIGHT_GRAY_CONCRETE = Item('light_gray_concrete')
CYAN_CONCRETE = Item('cyan_concrete')
PURPLE_CONCRETE = Item('purple_concrete')
BLUE_CONCRETE = Item('blue_concrete')
BROWN_CONCRETE = Item('brown_concrete')
GREEN_CONCRETE = Item('green_concrete')
RED_CONCRETE = Item('red_concrete')
BLACK_CONCRETE = Item('black_concrete')
WHITE_CONCRETE_POWDER = Item('white_concrete_powder')
ORANGE_CONCRETE_POWDER = Item('orange_concrete_powder')
MAGENTA_CONCRETE_POWDER = Item('magenta_concrete_powder')
LIGHT_BLUE_CONCRETE_POWDER = Item('light_blue_concrete_powder')
YELLOW_CONCRETE_POWDER = Item('yellow_concrete_powder')
LIME_CONCRETE_POWDER = Item('lime_concrete_powder')
PINK_CONCRETE_POWDER = Item('pink_concrete_powder')
GRAY_CONCRETE_POWDER = Item('gray_concrete_powder')
LIGHT_GRAY_CONCRETE_POWDER = Item('light_gray_concrete_powder')
CYAN_CONCRETE_POWDER = Item('cyan_concrete_powder')
PURPLE_CONCRETE_POWDER = Item('purple_concrete_powder')
BLUE_CONCRETE_POWDER = Item('blue_concrete_powder')
BROWN_CONCRETE_POWDER = Item('brown_concrete_powder')
GREEN_CONCRETE_POWDER = Item('green_concrete_powder')
RED_CONCRETE_POWDER = Item('red_concrete_powder')
BLACK_CONCRETE_POWDER = Item('black_concrete_powder')
TURTLE_EGG = Item('turtle_egg')
DEAD_TUBE_CORAL_BLOCK = Item('dead_tube_coral_block')
DEAD_BRAIN_CORAL_BLOCK = Item('dead_brain_coral_block')
DEAD_BUBBLE_CORAL_BLOCK = Item('dead_bubble_coral_block')
DEAD_FIRE_CORAL_BLOCK = Item('dead_fire_coral_block')
DEAD_HORN_CORAL_BLOCK = Item('dead_horn_coral_block')
TUBE_CORAL_BLOCK = Item('tube_coral_block')
BRAIN_CORAL_BLOCK = Item('brain_coral_block')
BUBBLE_CORAL_BLOCK = Item('bubble_coral_block')
FIRE_CORAL_BLOCK = Item('fire_coral_block')
HORN_CORAL_BLOCK = Item('horn_coral_block')
TUBE_CORAL = Item('tube_coral')
BRAIN_CORAL = Item('brain_coral')
BUBBLE_CORAL = Item('bubble_coral')
FIRE_CORAL = Item('fire_coral')
HORN_CORAL = Item('horn_coral')
DEAD_BRAIN_CORAL = Item('dead_brain_coral')
DEAD_BUBBLE_CORAL = Item('dead_bubble_coral')
DEAD_FIRE_CORAL = Item('dead_fire_coral')
DEAD_HORN_CORAL = Item('dead_horn_coral')
DEAD_TUBE_CORAL = Item('dead_tube_coral')
TUBE_CORAL_FAN = Item('tube_coral_fan')
BRAIN_CORAL_FAN = Item('brain_coral_fan')
BUBBLE_CORAL_FAN = Item('bubble_coral_fan')
FIRE_CORAL_FAN = Item('fire_coral_fan')
HORN_CORAL_FAN = Item('horn_coral_fan')
DEAD_TUBE_CORAL_FAN = Item('dead_tube_coral_fan')
DEAD_BRAIN_CORAL_FAN = Item('dead_brain_coral_fan')
DEAD_BUBBLE_CORAL_FAN = Item('dead_bubble_coral_fan')
DEAD_FIRE_CORAL_FAN = Item('dead_fire_coral_fan')
DEAD_HORN_CORAL_FAN = Item('dead_horn_coral_fan')
BLUE_ICE = Item('blue_ice')
CONDUIT = Item('conduit')
POLISHED_GRANITE_STAIRS = Item('polished_granite_stairs')
SMOOTH_RED_SANDSTONE_STAIRS = Item('smooth_red_sandstone_stairs')
MOSSY_STONE_BRICK_STAIRS = Item('mossy_stone_brick_stairs')
POLISHED_DIORITE_STAIRS = Item('polished_diorite_stairs')
MOSSY_COBBLESTONE_STAIRS = Item('mossy_cobblestone_stairs')
END_STONE_BRICK_STAIRS = Item('end_stone_brick_stairs')
STONE_STAIRS = Item('stone_stairs')
SMOOTH_SANDSTONE_STAIRS = Item('smooth_sandstone_stairs')
SMOOTH_QUARTZ_STAIRS = Item('smooth_quartz_stairs')
GRANITE_STAIRS = Item('granite_stairs')
ANDESITE_STAIRS = Item('andesite_stairs')
RED_NETHER_BRICK_STAIRS = Item('red_nether_brick_stairs')
POLISHED_ANDESITE_STAIRS = Item('polished_andesite_stairs')
DIORITE_STAIRS = Item('diorite_stairs')
POLISHED_GRANITE_SLAB = Item('polished_granite_slab')
SMOOTH_RED_SANDSTONE_SLAB = Item('smooth_red_sandstone_slab')
MOSSY_STONE_BRICK_SLAB = Item('mossy_stone_brick_slab')
POLISHED_DIORITE_SLAB = Item('polished_diorite_slab')
MOSSY_COBBLESTONE_SLAB = Item('mossy_cobblestone_slab')
END_STONE_BRICK_SLAB = Item('end_stone_brick_slab')
SMOOTH_SANDSTONE_SLAB = Item('smooth_sandstone_slab')
SMOOTH_QUARTZ_SLAB = Item('smooth_quartz_slab')
GRANITE_SLAB = Item('granite_slab')
ANDESITE_SLAB = Item('andesite_slab')
RED_NETHER_BRICK_SLAB = Item('red_nether_brick_slab')
POLISHED_ANDESITE_SLAB = Item('polished_andesite_slab')
DIORITE_SLAB = Item('diorite_slab')
SCAFFOLDING = Item('scaffolding')
IRON_DOOR = Item('iron_door')
OAK_DOOR = Item('oak_door')
SPRUCE_DOOR = Item('spruce_door')
BIRCH_DOOR = Item('birch_door')
JUNGLE_DOOR = Item('jungle_door')
ACACIA_DOOR = Item('acacia_door')
DARK_OAK_DOOR = Item('dark_oak_door')
CRIMSON_DOOR = Item('crimson_door')
WARPED_DOOR = Item('warped_door')
REPEATER = Item('repeater')
COMPARATOR = Item('comparator')
STRUCTURE_BLOCK = Item('structure_block')
JIGSAW = Item('jigsaw')
TURTLE_HELMET = Item('turtle_helmet')
SCUTE = Item('scute')
FLINT_AND_STEEL = Item('flint_and_steel')
APPLE = Item('apple')
BOW = Item('bow')
ARROW = Item('arrow')
COAL = Item('coal')
CHARCOAL = Item('charcoal')
DIAMOND = Item('diamond')
IRON_INGOT = Item('iron_ingot')
GOLD_INGOT = Item('gold_ingot')
NETHERITE_INGOT = Item('netherite_ingot')
NETHERITE_SCRAP = Item('netherite_scrap')
WOODEN_SWORD = Item('wooden_sword')
WOODEN_SHOVEL = Item('wooden_shovel')
WOODEN_PICKAXE = Item('wooden_pickaxe')
WOODEN_AXE = Item('wooden_axe')
WOODEN_HOE = Item('wooden_hoe')
STONE_SWORD = Item('stone_sword')
STONE_SHOVEL = Item('stone_shovel')
STONE_PICKAXE = Item('stone_pickaxe')
STONE_AXE = Item('stone_axe')
STONE_HOE = Item('stone_hoe')
GOLDEN_SWORD = Item('golden_sword')
GOLDEN_SHOVEL = Item('golden_shovel')
GOLDEN_PICKAXE = Item('golden_pickaxe')
GOLDEN_AXE = Item('golden_axe')
GOLDEN_HOE = Item('golden_hoe')
IRON_SWORD = Item('iron_sword')
IRON_SHOVEL = Item('iron_shovel')
IRON_PICKAXE = Item('iron_pickaxe')
IRON_AXE = Item('iron_axe')
IRON_HOE = Item('iron_hoe')
DIAMOND_SWORD = Item('diamond_sword')
DIAMOND_SHOVEL = Item('diamond_shovel')
DIAMOND_PICKAXE = Item('diamond_pickaxe')
DIAMOND_AXE = Item('diamond_axe')
DIAMOND_HOE = Item('diamond_hoe')
NETHERITE_SWORD = Item('netherite_sword')
NETHERITE_SHOVEL = Item('netherite_shovel')
NETHERITE_PICKAXE = Item('netherite_pickaxe')
NETHERITE_AXE = Item('netherite_axe')
NETHERITE_HOE = Item('netherite_hoe')
STICK = Item('stick')
BOWL = Item('bowl')
MUSHROOM_STEW = Item('mushroom_stew')
STRING = Item('string')
FEATHER = Item('feather')
GUNPOWDER = Item('gunpowder')
WHEAT_SEEDS = Item('wheat_seeds')
WHEAT = Item('wheat')
BREAD = Item('bread')
LEATHER_HELMET = Item('leather_helmet')
LEATHER_CHESTPLATE = Item('leather_chestplate')
LEATHER_LEGGINGS = Item('leather_leggings')
LEATHER_BOOTS = Item('leather_boots')
CHAINMAIL_HELMET = Item('chainmail_helmet')
CHAINMAIL_CHESTPLATE = Item('chainmail_chestplate')
CHAINMAIL_LEGGINGS = Item('chainmail_leggings')
CHAINMAIL_BOOTS = Item('chainmail_boots')
IRON_HELMET = Item('iron_helmet')
IRON_CHESTPLATE = Item('iron_chestplate')
IRON_LEGGINGS = Item('iron_leggings')
IRON_BOOTS = Item('iron_boots')
DIAMOND_HELMET = Item('diamond_helmet')
DIAMOND_CHESTPLATE = Item('diamond_chestplate')
DIAMOND_LEGGINGS = Item('diamond_leggings')
DIAMOND_BOOTS = Item('diamond_boots')
GOLDEN_HELMET = Item('golden_helmet')
GOLDEN_CHESTPLATE = Item('golden_chestplate')
GOLDEN_LEGGINGS = Item('golden_leggings')
GOLDEN_BOOTS = Item('golden_boots')
NETHERITE_HELMET = Item('netherite_helmet')
NETHERITE_CHESTPLATE = Item('netherite_chestplate')
NETHERITE_LEGGINGS = Item('netherite_leggings')
NETHERITE_BOOTS = Item('netherite_boots')
FLINT = Item('flint')
PORKCHOP = Item('porkchop')
COOKED_PORKCHOP = Item('cooked_porkchop')
PAINTING = Item('painting')
GOLDEN_APPLE = Item('golden_apple')
ENCHANTED_GOLDEN_APPLE = Item('enchanted_golden_apple')
OAK_SIGN = Item('oak_sign')
SPRUCE_SIGN = Item('spruce_sign')
BIRCH_SIGN = Item('birch_sign')
JUNGLE_SIGN = Item('jungle_sign')
ACACIA_SIGN = Item('acacia_sign')
DARK_OAK_SIGN = Item('dark_oak_sign')
CRIMSON_SIGN = Item('crimson_sign')
WARPED_SIGN = Item('warped_sign')
BUCKET = Item('bucket')
WATER_BUCKET = Item('water_bucket')
LAVA_BUCKET = Item('lava_bucket')
MINECART = Item('minecart')
SADDLE = Item('saddle')
REDSTONE = Item('redstone')
SNOWBALL = Item('snowball')
OAK_BOAT = Item('oak_boat')
LEATHER = Item('leather')
MILK_BUCKET = Item('milk_bucket')
PUFFERFISH_BUCKET = Item('pufferfish_bucket')
SALMON_BUCKET = Item('salmon_bucket')
COD_BUCKET = Item('cod_bucket')
TROPICAL_FISH_BUCKET = Item('tropical_fish_bucket')
BRICK = Item('brick')
CLAY_BALL = Item('clay_ball')
DRIED_KELP_BLOCK = Item('dried_kelp_block')
PAPER = Item('paper')
BOOK = Item('book')
SLIME_BALL = Item('slime_ball')
CHEST_MINECART = Item('chest_minecart')
FURNACE_MINECART = Item('furnace_minecart')
EGG = Item('egg')
COMPASS = Item('compass')
FISHING_ROD = Item('fishing_rod')
CLOCK = Item('clock')
GLOWSTONE_DUST = Item('glowstone_dust')
COD = Item('cod')
SALMON = Item('salmon')
TROPICAL_FISH = Item('tropical_fish')
PUFFERFISH = Item('pufferfish')
COOKED_COD = Item('cooked_cod')
COOKED_SALMON = Item('cooked_salmon')
INK_SAC = Item('ink_sac')
COCOA_BEANS = Item('cocoa_beans')
LAPIS_LAZULI = Item('lapis_lazuli')
WHITE_DYE = Item('white_dye')
ORANGE_DYE = Item('orange_dye')
MAGENTA_DYE = Item('magenta_dye')
LIGHT_BLUE_DYE = Item('light_blue_dye')
YELLOW_DYE = Item('yellow_dye')
LIME_DYE = Item('lime_dye')
PINK_DYE = Item('pink_dye')
GRAY_DYE = Item('gray_dye')
LIGHT_GRAY_DYE = Item('light_gray_dye')
CYAN_DYE = Item('cyan_dye')
PURPLE_DYE = Item('purple_dye')
BLUE_DYE = Item('blue_dye')
BROWN_DYE = Item('brown_dye')
GREEN_DYE = Item('green_dye')
RED_DYE = Item('red_dye')
BLACK_DYE = Item('black_dye')
BONE_MEAL = Item('bone_meal')
BONE = Item('bone')
SUGAR = Item('sugar')
CAKE = Item('cake')
WHITE_BED = Item('white_bed')
ORANGE_BED = Item('orange_bed')
MAGENTA_BED = Item('magenta_bed')
LIGHT_BLUE_BED = Item('light_blue_bed')
YELLOW_BED = Item('yellow_bed')
LIME_BED = Item('lime_bed')
PINK_BED = Item('pink_bed')
GRAY_BED = Item('gray_bed')
LIGHT_GRAY_BED = Item('light_gray_bed')
CYAN_BED = Item('cyan_bed')
PURPLE_BED = Item('purple_bed')
BLUE_BED = Item('blue_bed')
BROWN_BED = Item('brown_bed')
GREEN_BED = Item('green_bed')
RED_BED = Item('red_bed')
BLACK_BED = Item('black_bed')
COOKIE = Item('cookie')
FILLED_MAP = Item('filled_map')
SHEARS = Item('shears')
MELON_SLICE = Item('melon_slice')
DRIED_KELP = Item('dried_kelp')
PUMPKIN_SEEDS = Item('pumpkin_seeds')
MELON_SEEDS = Item('melon_seeds')
BEEF = Item('beef')
COOKED_BEEF = Item('cooked_beef')
CHICKEN = Item('chicken')
COOKED_CHICKEN = Item('cooked_chicken')
ROTTEN_FLESH = Item('rotten_flesh')
ENDER_PEARL = Item('ender_pearl')
BLAZE_ROD = Item('blaze_rod')
GHAST_TEAR = Item('ghast_tear')
GOLD_NUGGET = Item('gold_nugget')
NETHER_WART = Item('nether_wart')
POTION = Item('potion')
GLASS_BOTTLE = Item('glass_bottle')
SPIDER_EYE = Item('spider_eye')
FERMENTED_SPIDER_EYE = Item('fermented_spider_eye')
BLAZE_POWDER = Item('blaze_powder')
MAGMA_CREAM = Item('magma_cream')
BREWING_STAND = Item('brewing_stand')
CAULDRON = Item('cauldron')
ENDER_EYE = Item('ender_eye')
GLISTERING_MELON_SLICE = Item('glistering_melon_slice')
BAT_SPAWN_EGG = Item('bat_spawn_egg')
BEE_SPAWN_EGG = Item('bee_spawn_egg')
BLAZE_SPAWN_EGG = Item('blaze_spawn_egg')
CAT_SPAWN_EGG = Item('cat_spawn_egg')
CAVE_SPIDER_SPAWN_EGG = Item('cave_spider_spawn_egg')
CHICKEN_SPAWN_EGG = Item('chicken_spawn_egg')
COD_SPAWN_EGG = Item('cod_spawn_egg')
COW_SPAWN_EGG = Item('cow_spawn_egg')
CREEPER_SPAWN_EGG = Item('creeper_spawn_egg')
DOLPHIN_SPAWN_EGG = Item('dolphin_spawn_egg')
DONKEY_SPAWN_EGG = Item('donkey_spawn_egg')
DROWNED_SPAWN_EGG = Item('drowned_spawn_egg')
ELDER_GUARDIAN_SPAWN_EGG = Item('elder_guardian_spawn_egg')
ENDERMAN_SPAWN_EGG = Item('enderman_spawn_egg')
ENDERMITE_SPAWN_EGG = Item('endermite_spawn_egg')
EVOKER_SPAWN_EGG = Item('evoker_spawn_egg')
FOX_SPAWN_EGG = Item('fox_spawn_egg')
GHAST_SPAWN_EGG = Item('ghast_spawn_egg')
GUARDIAN_SPAWN_EGG = Item('guardian_spawn_egg')
HOGLIN_SPAWN_EGG = Item('hoglin_spawn_egg')
HORSE_SPAWN_EGG = Item('horse_spawn_egg')
HUSK_SPAWN_EGG = Item('husk_spawn_egg')
LLAMA_SPAWN_EGG = Item('llama_spawn_egg')
MAGMA_CUBE_SPAWN_EGG = Item('magma_cube_spawn_egg')
MOOSHROOM_SPAWN_EGG = Item('mooshroom_spawn_egg')
MULE_SPAWN_EGG = Item('mule_spawn_egg')
OCELOT_SPAWN_EGG = Item('ocelot_spawn_egg')
PANDA_SPAWN_EGG = Item('panda_spawn_egg')
PARROT_SPAWN_EGG = Item('parrot_spawn_egg')
PHANTOM_SPAWN_EGG = Item('phantom_spawn_egg')
PIG_SPAWN_EGG = Item('pig_spawn_egg')
PIGLIN_SPAWN_EGG = Item('piglin_spawn_egg')
PIGLIN_BRUTE_SPAWN_EGG = Item('piglin_brute_spawn_egg')
PILLAGER_SPAWN_EGG = Item('pillager_spawn_egg')
POLAR_BEAR_SPAWN_EGG = Item('polar_bear_spawn_egg')
PUFFERFISH_SPAWN_EGG = Item('pufferfish_spawn_egg')
RABBIT_SPAWN_EGG = Item('rabbit_spawn_egg')
RAVAGER_SPAWN_EGG = Item('ravager_spawn_egg')
SALMON_SPAWN_EGG = Item('salmon_spawn_egg')
SHEEP_SPAWN_EGG = Item('sheep_spawn_egg')
SHULKER_SPAWN_EGG = Item('shulker_spawn_egg')
SILVERFISH_SPAWN_EGG = Item('silverfish_spawn_egg')
SKELETON_SPAWN_EGG = Item('skeleton_spawn_egg')
SKELETON_HORSE_SPAWN_EGG = Item('skeleton_horse_spawn_egg')
SLIME_SPAWN_EGG = Item('slime_spawn_egg')
SPIDER_SPAWN_EGG = Item('spider_spawn_egg')
SQUID_SPAWN_EGG = Item('squid_spawn_egg')
STRAY_SPAWN_EGG = Item('stray_spawn_egg')
STRIDER_SPAWN_EGG = Item('strider_spawn_egg')
TRADER_LLAMA_SPAWN_EGG = Item('trader_llama_spawn_egg')
TROPICAL_FISH_SPAWN_EGG = Item('tropical_fish_spawn_egg')
TURTLE_SPAWN_EGG = Item('turtle_spawn_egg')
VEX_SPAWN_EGG = Item('vex_spawn_egg')
VILLAGER_SPAWN_EGG = Item('villager_spawn_egg')
VINDICATOR_SPAWN_EGG = Item('vindicator_spawn_egg')
WANDERING_TRADER_SPAWN_EGG = Item('wandering_trader_spawn_egg')
WITCH_SPAWN_EGG = Item('witch_spawn_egg')
WITHER_SKELETON_SPAWN_EGG = Item('wither_skeleton_spawn_egg')
WOLF_SPAWN_EGG = Item('wolf_spawn_egg')
ZOGLIN_SPAWN_EGG = Item('zoglin_spawn_egg')
ZOMBIE_SPAWN_EGG = Item('zombie_spawn_egg')
ZOMBIE_HORSE_SPAWN_EGG = Item('zombie_horse_spawn_egg')
ZOMBIE_VILLAGER_SPAWN_EGG = Item('zombie_villager_spawn_egg')
ZOMBIFIED_PIGLIN_SPAWN_EGG = Item('zombified_piglin_spawn_egg')
EXPERIENCE_BOTTLE = Item('experience_bottle')
FIRE_CHARGE = Item('fire_charge')
WRITABLE_BOOK = Item('writable_book')
WRITTEN_BOOK = Item('written_book')
EMERALD = Item('emerald')
ITEM_FRAME = Item('item_frame')
FLOWER_POT = Item('flower_pot')
CARROT = Item('carrot')
POTATO = Item('potato')
BAKED_POTATO = Item('baked_potato')
POISONOUS_POTATO = Item('poisonous_potato')
MAP = Item('map')
GOLDEN_CARROT = Item('golden_carrot')
SKELETON_SKULL = Item('skeleton_skull')
WITHER_SKELETON_SKULL = Item('wither_skeleton_skull')
PLAYER_HEAD = Item('player_head')
ZOMBIE_HEAD = Item('zombie_head')
CREEPER_HEAD = Item('creeper_head')
DRAGON_HEAD = Item('dragon_head')
CARROT_ON_A_STICK = Item('carrot_on_a_stick')
WARPED_FUNGUS_ON_A_STICK = Item('warped_fungus_on_a_stick')
NETHER_STAR = Item('nether_star')
PUMPKIN_PIE = Item('pumpkin_pie')
FIREWORK_ROCKET = Item('firework_rocket')
FIREWORK_STAR = Item('firework_star')
ENCHANTED_BOOK = Item('enchanted_book')
NETHER_BRICK = Item('nether_brick')
QUARTZ = Item('quartz')
TNT_MINECART = Item('tnt_minecart')
HOPPER_MINECART = Item('hopper_minecart')
PRISMARINE_SHARD = Item('prismarine_shard')
PRISMARINE_CRYSTALS = Item('prismarine_crystals')
RABBIT = Item('rabbit')
COOKED_RABBIT = Item('cooked_rabbit')
RABBIT_STEW = Item('rabbit_stew')
RABBIT_FOOT = Item('rabbit_foot')
RABBIT_HIDE = Item('rabbit_hide')
ARMOR_STAND = Item('armor_stand')
IRON_HORSE_ARMOR = Item('iron_horse_armor')
GOLDEN_HORSE_ARMOR = Item('golden_horse_armor')
DIAMOND_HORSE_ARMOR = Item('diamond_horse_armor')
LEATHER_HORSE_ARMOR = Item('leather_horse_armor')
LEAD = Item('lead')
NAME_TAG = Item('name_tag')
COMMAND_BLOCK_MINECART = Item('command_block_minecart')
MUTTON = Item('mutton')
COOKED_MUTTON = Item('cooked_mutton')
WHITE_BANNER = Item('white_banner')
ORANGE_BANNER = Item('orange_banner')
MAGENTA_BANNER = Item('magenta_banner')
LIGHT_BLUE_BANNER = Item('light_blue_banner')
YELLOW_BANNER = Item('yellow_banner')
LIME_BANNER = Item('lime_banner')
PINK_BANNER = Item('pink_banner')
GRAY_BANNER = Item('gray_banner')
LIGHT_GRAY_BANNER = Item('light_gray_banner')
CYAN_BANNER = Item('cyan_banner')
PURPLE_BANNER = Item('purple_banner')
BLUE_BANNER = Item('blue_banner')
BROWN_BANNER = Item('brown_banner')
GREEN_BANNER = Item('green_banner')
RED_BANNER = Item('red_banner')
BLACK_BANNER = Item('black_banner')
END_CRYSTAL = Item('end_crystal')
CHORUS_FRUIT = Item('chorus_fruit')
POPPED_CHORUS_FRUIT = Item('popped_chorus_fruit')
BEETROOT = Item('beetroot')
BEETROOT_SEEDS = Item('beetroot_seeds')
BEETROOT_SOUP = Item('beetroot_soup')
DRAGON_BREATH = Item('dragon_breath')
SPLASH_POTION = Item('splash_potion')
SPECTRAL_ARROW = Item('spectral_arrow')
TIPPED_ARROW = Item('tipped_arrow')
LINGERING_POTION = Item('lingering_potion')
SHIELD = Item('shield')
ELYTRA = Item('elytra')
SPRUCE_BOAT = Item('spruce_boat')
BIRCH_BOAT = Item('birch_boat')
JUNGLE_BOAT = Item('jungle_boat')
ACACIA_BOAT = Item('acacia_boat')
DARK_OAK_BOAT = Item('dark_oak_boat')
TOTEM_OF_UNDYING = Item('totem_of_undying')
SHULKER_SHELL = Item('shulker_shell')
IRON_NUGGET = Item('iron_nugget')
KNOWLEDGE_BOOK = Item('knowledge_book')
DEBUG_STICK = Item('debug_stick')
MUSIC_DISC_13 = Item('music_disc_13')
MUSIC_DISC_CAT = Item('music_disc_cat')
MUSIC_DISC_BLOCKS = Item('music_disc_blocks')
MUSIC_DISC_CHIRP = Item('music_disc_chirp')
MUSIC_DISC_FAR = Item('music_disc_far')
MUSIC_DISC_MALL = Item('music_disc_mall')
MUSIC_DISC_MELLOHI = Item('music_disc_mellohi')
MUSIC_DISC_STAL = Item('music_disc_stal')
MUSIC_DISC_STRAD = Item('music_disc_strad')
MUSIC_DISC_WARD = Item('music_disc_ward')
MUSIC_DISC_11 = Item('music_disc_11')
MUSIC_DISC_WAIT = Item('music_disc_wait')
MUSIC_DISC_PIGSTEP = Item('music_disc_pigstep')
TRIDENT = Item('trident')
PHANTOM_MEMBRANE = Item('phantom_membrane')
NAUTILUS_SHELL = Item('nautilus_shell')
HEART_OF_THE_SEA = Item('heart_of_the_sea')
CROSSBOW = Item('crossbow')
SUSPICIOUS_STEW = Item('suspicious_stew')
LOOM = Item('loom')
FLOWER_BANNER_PATTERN = Item('flower_banner_pattern')
CREEPER_BANNER_PATTERN = Item('creeper_banner_pattern')
SKULL_BANNER_PATTERN = Item('skull_banner_pattern')
MOJANG_BANNER_PATTERN = Item('mojang_banner_pattern')
GLOBE_BANNER_PATTER = Item('globe_banner_patter')
PIGLIN_BANNER_PATTERN = Item('piglin_banner_pattern')
COMPOSTER = Item('composter')
BARREL = Item('barrel')
SMOKER = Item('smoker')
BLAST_FURNACE = Item('blast_furnace')
CARTOGRAPHY_TABLE = Item('cartography_table')
FLETCHING_TABLE = Item('fletching_table')
GRINDSTONE = Item('grindstone')
LECTERN = Item('lectern')
SMITHING_TABLE = Item('smithing_table')
STONECUTTER = Item('stonecutter')
BELL = Item('bell')
LANTERN = Item('lantern')
SOUL_LANTERN = Item('soul_lantern')
SWEET_BERRIES = Item('sweet_berries')
CAMPFIRE = Item('campfire')
SOUL_CAMPFIRE = Item('soul_campfire')
SHROOMLIGHT = Item('shroomlight')
HONEYCOMB = Item('honeycomb')
BEE_NEST = Item('bee_nest')
BEEHIVE = Item('beehive')
HONEY_BOTTLE = Item('honey_bottle')
HONEY_BLOCK = Item('honey_block')
HONEYCOMB_BLOCK = Item('honeycomb_block')
LODESTONE = Item('lodestone')
NETHERITE_BLOCK = Item('netherite_block')
ANCIENT_DEBRIS = Item('ancient_debris')
TARGET = Item('target')
CRYING_OBSIDIAN = Item('crying_obsidian')
BLACKSTONE = Item('blackstone')
BLACKSTONE_SLAB = Item('blackstone_slab')
BLACKSTONE_STAIRS = Item('blackstone_stairs')
GILDED_BLACKSTONE = Item('gilded_blackstone')
POLISHED_BLACKSTONE = Item('polished_blackstone')
POLISHED_BLACKSTONE_SLAB = Item('polished_blackstone_slab')
POLISHED_BLACKSTONE_STAIRS = Item('polished_blackstone_stairs')
CHISELED_POLISHED_BLACKSTONE = Item('chiseled_polished_blackstone')
POLISHED_BLACKSTONE_BRICKS = Item('polished_blackstone_bricks')
POLISHED_BLACKSTONE_BRICK_SLAB = Item('polished_blackstone_brick_slab')
POLISHED_BLACKSTONE_BRICK_STAIRS = Item('polished_blackstone_brick_stairs')
CRACKED_POLISHED_BLACKSTONE_BRICKS = Item('cracked_polished_blackstone_bricks')
RESPAWN_ANCHOR = Item('respawn_anchor')
|
MinecraftDataHelper/items.py
|
class Item:
def __init__(self, itemID, modid='minecraft'):
super().__init__()
self.modid = modid
self.id = itemID
def __str__(self) -> str:
return f"{self.modid}:{self.id}"
WHITE_CANDLE = Item('white_candle')
ORANGE_CANDLE = Item('orange_candle')
MAGENTA_CANDLE = Item('magenta_candle')
LIGHT_BLUE_CANDLE = Item('lisgt_blue_cnadle')
YELLOW_CANDLE = Item('yellow_cnadle')
LIME_CANDLE = Item('lime_cnadle')
PINK_CANDLE = Item('pink_candle')
GRAY_CANDLE = Item('gray_cnadle')
LIGHT_GRAY_CANDLE = Item('light_gray_candle')
CYAN_CANDLE = Item('cyan_candle')
PURPLE_CANDLE = Item('purple_candle')
BLUE_CANDLE = Item('blue_candle')
BROWN_CANDLE = Item('brown_candle')
GREEN_CANDLE = Item('green_candle')
RED_CANDLE = Item('red_candle')
BLACK_CANDLE = Item('black_candle')
CANDLE = Item('candle')
DEEPSLATE_COAL_ORE = Item('deepslate_coal_ore')
COPPER_ORE = Item('copper_ore')
DEEPSLATE_COPPER_ORE = Item('deepslate_copper_ore')
DEEPSLATE_DIAMOND_ORE = Item('DEEPSLATE_DIAMOND_ORE'.lower())
DEEPSLATE_EMERALD_ORE = Item('DEEPSLATE_EMERALD_ORE'.lower())
FLOWERING_AZALEA_LEAVES = Item('FLOWERING_AZALEA_LEAVES'.lower())
FLOWERING_AZALEA = Item('FLOWERING_AZALEA'.lower())
GLOW_BERRIES = Item('GLOW_BERRIES'.lower())
DEEPSLATE_GOLD_ORE = Item('DEEPSLATE_GOLD_ORE'.lower())
DEEPSLATE_IRON_ORE = Item('DEEPSLATE_IRON_ORE'.lower())
DEEPSLATE_LAPIS_ORE = Item('DEEPSLATE_LAPIS_ORE'.lower())
DEEPSLATE_REDSTONE_ORE = Item('DEEPSLATE_REDSTONE_ORE'.lower())
COBBLED_DEEPSLATE = Item('COBBLED_DEEPSLATE'.lower())
COBBLED_DEEPSLATE_WALL = Item('COBBLED_DEEPSLATE_WALL'.lower())
POLISHED_DEEPSLATE_WALL = Item('POLISHED_DEEPSLATE_WALL'.lower())
POLISHED_DEEPSLATE_STAIRS = Item('polished_deepslate_stairs')
DEEPSLATE_TILE_STAIRS = Item('deepslate_tile_stairs')
DEEPSLATE_BRICK_STAIRS = Item('deepslate_brick_stairs')
OXIDIZED_CUT_COPPER_STAIRS = Item('oxidized_cut_copper_stairs')
WEATHERED_CUT_COPPER_STAIRS = Item('weathered_cut_copper_stairs')
EXPOSED_CUT_COPPER_STAIRS = Item('exposed_cut_copper_stairs')
CUT_COPPER_STAIRS = Item('cut_copper_stairs')
WAXED_WEATHERED_CUT_COPPER_STAIRS = Item('waxed_weathered_cut_copper_stairs')
WAXED_EXPOSED_CUT_COPPER_STAIRS = Item('waxed_exposed_cut_copper_stairs')
WAXED_CUT_COPPER_STAIRS = Item('waxed_cut_copper_stairs')
WAXED_OXIDIZED_CUT_COPPER_STAIRS = Item('waxed_oxidized_cut_copper_stairs')
COBBLED_DEEPSLATE_SLAB = Item('cobbled_deepslate_slab')
POLISHED_DEEPSLATE_SLAB = Item('polished_deepslate_slab')
DEEPSLATE_TILE_SLAB = Item('deepslate_tile_slab')
DEEPSLATE_BRICK_SLAB = Item('deepslate_brick_slab')
WAXED_WEATHERED_CUT_COPPER_SLAB = Item('waxed_weathered_cut_copper_slab')
WAXED_EXPOSED_CUT_COPPER_SLAB = Item('waxed_exposed_cut_copper_slab')
WAXED_CUT_COPPER_SLAB = Item('waxed_cut_copper_slab')
OXIDIZED_CUT_COPPER_SLAB = Item('oxidized_cut_copper_slab')
WEATHERED_CUT_COPPER_SLAB = Item('weathered_cut_copper_slab')
EXPOSED_CUT_COPPER_SLAB = Item('exposed_cut_copper_slab')
CUT_COPPER_SLAB = Item('cut_copper_slab')
WAXED_OXIDIZED_CUT_COPPER_SLAB = Item('waxed_oxidized_cut_copper_slab')
COBBLED_DEEPSLATE_STAIRS = Item('COBBLED_DEEPSLATE_STAIRS'.lower())
DEEPSLATE_TILE_WALL = Item('DEEPSLATE_TILE_WALL'.lower())
DEEPSLATE_BRICK_WALL = Item('DEEPSLATE_BRICK_WALL'.lower())
CUT_SANDSTONE_SLAB = Item('CUT_SANDSTONE_SLAB'.lower())
AZALEA_LEAVES = Item('AZALEA_LEAVES'.lower())
RAW_GOLD = Item('RAW_GOLD'.lower())
RAW_GOLD_BLOCK = Item('RAW_GOLD_BLOCK'.lower())
AZALEA = Item('AZALEA'.lower())
AIR = Item('air')
STONE = Item('stone')
GRANITE = Item('granite')
POLISHED_GRANITE = Item('polished_granite')
DIORITE = Item('diorite')
POLISHED_DIORITE = Item('polished_diorite')
ANDESITE = Item('andesite')
POLISHED_ANDESITE = Item('polished_andesite')
GRASS_BLOCK = Item('grass_block')
DIRT = Item('dirt')
COARSE_DIRT = Item('coarse_dirt')
PODZOL = Item('podzol')
CRIMSON_NYLIUM = Item('crimson_nylium')
WARPED_NYLIUM = Item('warped_nylium')
COBBLESTONE = Item('cobblestone')
OAK_PLANKS = Item('oak_planks')
SPRUCE_PLANKS = Item('spruce_planks')
BIRCH_PLANKS = Item('birch_planks')
JUNGLE_PLANKS = Item('jungle_planks')
ACACIA_PLANKS = Item('acacia_planks')
DARK_OAK_PLANKS = Item('dark_oak_planks')
CRIMSON_PLANKS = Item('crimson_planks')
WARPED_PLANKS = Item('warped_planks')
OAK_SAPLING = Item('oak_sapling')
SPRUCE_SAPLING = Item('spruce_sapling')
BIRCH_SAPLING = Item('birch_sapling')
JUNGLE_SAPLING = Item('jungle_sapling')
ACACIA_SAPLING = Item('acacia_sapling')
DARK_OAK_SAPLING = Item('dark_oak_sapling')
BEDROCK = Item('bedrock')
SAND = Item('sand')
RED_SAND = Item('red_sand')
GRAVEL = Item('gravel')
GOLD_ORE = Item('gold_ore')
IRON_ORE = Item('iron_ore')
COAL_ORE = Item('coal_ore')
NETHER_GOLD_ORE = Item('nether_gold_ore')
OAK_LOG = Item('oak_log')
SPRUCE_LOG = Item('spruce_log')
BIRCH_LOG = Item('birch_log')
JUNGLE_LOG = Item('jungle_log')
ACACIA_LOG = Item('acacia_log')
DARK_OAK_LOG = Item('dark_oak_log')
CRIMSON_STEM = Item('crimson_stem')
WARPED_STEM = Item('warped_stem')
STRIPPED_OAK_LOG = Item('stripped_oak_log')
STRIPPED_SPRUCE_LOG = Item('stripped_spruce_log')
STRIPPED_BIRCH_LOG = Item('stripped_birch_log')
STRIPPED_JUNGLE_LOG = Item('stripped_jungle_log')
STRIPPED_ACACIA_LOG = Item('stripped_acacia_log')
STRIPPED_DARK_OAK_LOG = Item('stripped_dark_oak_log')
STRIPPED_CRIMSON_STEM = Item('stripped_crimson_stem')
STRIPPED_WARPED_STEM = Item('stripped_warped_stem')
STRIPPED_OAK_WOOD = Item('stripped_oak_wood')
STRIPPED_SPRUCE_WOOD = Item('stripped_spruce_wood')
STRIPPED_BIRCH_WOOD = Item('stripped_birch_wood')
STRIPPED_JUNGLE_WOOD = Item('stripped_jungle_wood')
STRIPPED_ACACIA_WOOD = Item('stripped_acacia_wood')
STRIPPED_DARK_OAK_WOOD = Item('stripped_dark_oak_wood')
STRIPPED_CRIMSON_HYPHAE = Item('stripped_crimson_hyphae')
STRIPPED_WARPED_HYPHAE = Item('stripped_warped_hyphae')
OAK_WOOD = Item('oak_wood')
SPRUCE_WOOD = Item('spruce_wood')
BIRCH_WOOD = Item('birch_wood')
JUNGLE_WOOD = Item('jungle_wood')
ACACIA_WOOD = Item('acacia_wood')
DARK_OAK_WOOD = Item('dark_oak_wood')
CRIMSON_HYPHAE = Item('crimson_hyphae')
WARPED_HYPHAE = Item('warped_hyphae')
OAK_LEAVES = Item('oak_leaves')
SPRUCE_LEAVES = Item('spruce_leaves')
BIRCH_LEAVES = Item('birch_leaves')
JUNGLE_LEAVES = Item('jungle_leaves')
ACACIA_LEAVES = Item('acacia_leaves')
DARK_OAK_LEAVES = Item('dark_oak_leaves')
SPONGE = Item('sponge')
WET_SPONGE = Item('wet_sponge')
GLASS = Item('glass')
LAPIS_ORE = Item('lapis_ore')
LAPIS_BLOCK = Item('lapis_block')
DISPENSER = Item('dispenser')
SANDSTONE = Item('sandstone')
CHISELED_SANDSTONE = Item('chiseled_sandstone')
CUT_SANDSTONE = Item('cut_sandstone')
NOTE_BLOCK = Item('note_block')
POWERED_RAIL = Item('powered_rail')
DETECTOR_RAIL = Item('detector_rail')
STICKY_PISTON = Item('sticky_piston')
COBWEB = Item('cobweb')
GRASS = Item('grass')
FERN = Item('fern')
DEAD_BUSH = Item('dead_bush')
SEAGRASS = Item('seagrass')
SEA_PICKLE = Item('sea_pickle')
PISTON = Item('piston')
WHITE_WOOL = Item('white_wool')
ORANGE_WOOL = Item('orange_wool')
MAGENTA_WOOL = Item('magenta_wool')
LIGHT_BLUE_WOOL = Item('light_blue_wool')
YELLOW_WOOL = Item('yellow_wool')
LIME_WOOL = Item('lime_wool')
PINK_WOOL = Item('pink_wool')
GRAY_WOOL = Item('gray_wool')
LIGHT_GRAY_WOOL = Item('light_gray_wool')
CYAN_WOOL = Item('cyan_wool')
PURPLE_WOOL = Item('purple_wool')
BLUE_WOOL = Item('blue_wool')
BROWN_WOOL = Item('brown_wool')
GREEN_WOOL = Item('green_wool')
RED_WOOL = Item('red_wool')
BLACK_WOOL = Item('black_wool')
DANDELION = Item('dandelion')
POPPY = Item('poppy')
BLUE_ORCHID = Item('blue_orchid')
ALLIUM = Item('allium')
AZURE_BLUET = Item('azure_bluet')
RED_TULIP = Item('red_tulip')
ORANGE_TULIP = Item('orange_tulip')
WHITE_TULIP = Item('white_tulip')
PINK_TULIP = Item('pink_tulip')
OXEYE_DAISY = Item('oxeye_daisy')
CORNFLOWER = Item('cornflower')
LILY_OF_THE_VALLEY = Item('lily_of_the_valley')
WITHER_ROSE = Item('wither_rose')
BROWN_MUSHROOM = Item('brown_mushroom')
RED_MUSHROOM = Item('red_mushroom')
CRIMSON_FUNGUS = Item('crimson_fungus')
WARPED_FUNGUS = Item('warped_fungus')
CRIMSON_ROOTS = Item('crimson_roots')
WARPED_ROOTS = Item('warped_roots')
NETHER_SPROUTS = Item('nether_sprouts')
WEEPING_VINES = Item('weeping_vines')
TWISTING_VINES = Item('twisting_vines')
SUGAR_CANE = Item('sugar_cane')
KELP = Item('kelp')
BAMBOO = Item('bamboo')
GOLD_BLOCK = Item('gold_block')
IRON_BLOCK = Item('iron_block')
OAK_SLAB = Item('oak_slab')
SPRUCE_SLAB = Item('spruce_slab')
BIRCH_SLAB = Item('birch_slab')
JUNGLE_SLAB = Item('jungle_slab')
ACACIA_SLAB = Item('acacia_slab')
DARK_OAK_SLAB = Item('dark_oak_slab')
CRIMSON_SLAB = Item('crimson_slab')
WARPED_SLAB = Item('warped_slab')
STONE_SLAB = Item('stone_slab')
SMOOTH_STONE_SLAB = Item('smooth_stone_slab')
SANDSTONE_SLAB = Item('sandstone_slab')
CUT_STANDSTONE_SLAB = Item('cut_standstone_slab')
PETRIFIED_OAK_SLAB = Item('petrified_oak_slab')
COBBLESTONE_SLAB = Item('cobblestone_slab')
BRICK_SLAB = Item('brick_slab')
STONE_BRICK_SLAB = Item('stone_brick_slab')
NETHER_BRICK_SLAB = Item('nether_brick_slab')
QUARTZ_SLAB = Item('quartz_slab')
RED_SANDSTONE_SLAB = Item('red_sandstone_slab')
CUT_RED_SANDSTONE_SLAB = Item('cut_red_sandstone_slab')
PURPUR_SLAB = Item('purpur_slab')
PRISMARINE_SLAB = Item('prismarine_slab')
PRISMARINE_BRICK_SLAB = Item('prismarine_brick_slab')
DARK_PRISMARINE_SLAB = Item('dark_prismarine_slab')
SMOOTH_QUARTZ = Item('smooth_quartz')
SMOOTH_RED_SANDSTONE = Item('smooth_red_sandstone')
SMOOTH_SANDSTONE = Item('smooth_sandstone')
SMOOTH_STONE = Item('smooth_stone')
BRICKS = Item('bricks')
TNT = Item('tnt')
BOOKSHELF = Item('bookshelf')
MOSSY_COBBLESTONE = Item('mossy_cobblestone')
OBSIDIAN = Item('obsidian')
TORCH = Item('torch')
END_ROD = Item('end_rod')
CHORUS_PLANT = Item('chorus_plant')
CHORUS_FLOWER = Item('chorus_flower')
PURPUR_BLOCK = Item('purpur_block')
PURPUR_PILLAR = Item('purpur_pillar')
PURPUR_STAIRS = Item('purpur_stairs')
SPAWNER = Item('spawner')
OAK_STAIRS = Item('oak_stairs')
CHEST = Item('chest')
DIAMOND_ORE = Item('diamond_ore')
DIAMOND_BLOCK = Item('diamond_block')
CRAFTING_TABLE = Item('crafting_table')
FARMLAND = Item('farmland')
FURNACE = Item('furnace')
LADDER = Item('ladder')
RAIL = Item('rail')
COBBLESTONE_STAIRS = Item('cobblestone_stairs')
LEVER = Item('lever')
STONE_PRESSURE_PLATE = Item('stone_pressure_plate')
OAK_PRESSURE_PLATE = Item('oak_pressure_plate')
SPRUCE_PRESSURE_PLATE = Item('spruce_pressure_plate')
BIRCH_PRESSURE_PLATE = Item('birch_pressure_plate')
JUNGLE_PRESSURE_PLATE = Item('jungle_pressure_plate')
ACACIA_PRESSURE_PLATE = Item('acacia_pressure_plate')
DARK_OAK_PRESSURE_PLATE = Item('dark_oak_pressure_plate')
CRIMSON_PRESSURE_PLATE = Item('crimson_pressure_plate')
WARPED_PRESSURE_PLATE = Item('warped_pressure_plate')
POLISHED_BLACKSTONE_PRESSURE_PLATE = Item('polished_blackstone_pressure_plate')
REDSTONE_ORE = Item('redstone_ore')
REDSTONE_TORCH = Item('redstone_torch')
SNOW = Item('snow')
ICE = Item('ice')
SNOW_BLOCK = Item('snow_block')
CACTUS = Item('cactus')
CLAY = Item('clay')
JUKEBOX = Item('jukebox')
OAK_FENCE = Item('oak_fence')
SPRUCE_FENCE = Item('spruce_fence')
BIRCH_FENCE = Item('birch_fence')
JUNGLE_FENCE = Item('jungle_fence')
ACACIA_FENCE = Item('acacia_fence')
DARK_OAK_FENCE = Item('dark_oak_fence')
CRIMSON_FENCE = Item('crimson_fence')
WARPED_FENCE = Item('warped_fence')
PUMPKIN = Item('pumpkin')
CARVED_PUMPKIN = Item('carved_pumpkin')
NETHERRACK = Item('netherrack')
SOUL_SAND = Item('soul_sand')
SOUL_SOIL = Item('soul_soil')
BASALT = Item('basalt')
POLISHED_BASALT = Item('polished_basalt')
SOUL_TORCH = Item('soul_torch')
GLOWSTONE = Item('glowstone')
JACK_O_LANTERN = Item('jack_o_lantern')
OAK_TRAPDOOR = Item('oak_trapdoor')
SPRUCE_TRAPDOOR = Item('spruce_trapdoor')
BIRCH_TRAPDOOR = Item('birch_trapdoor')
JUNGLE_TRAPDOOR = Item('jungle_trapdoor')
ACACIA_TRAPDOOR = Item('acacia_trapdoor')
DARK_OAK_TRAPDOOR = Item('dark_oak_trapdoor')
CRIMSON_TRAPDOOR = Item('crimson_trapdoor')
WARPED_TRAPDOOR = Item('warped_trapdoor')
INFESTED_STONE = Item('infested_stone')
INFESTED_COBBLESTONE = Item('infested_cobblestone')
INFESTED_STONE_BRICKS = Item('infested_stone_bricks')
INFESTED_MOSSY_STONE_BRICKS = Item('infested_mossy_stone_bricks')
INFESTED_CRACKED_STONE_BRICKS = Item('infested_cracked_stone_bricks')
INFESTED_CHISELED_STONE_BRICKS = Item('infested_chiseled_stone_bricks')
STONE_BRICKS = Item('stone_bricks')
MOSSY_STONE_BRICKS = Item('mossy_stone_bricks')
CRACKED_STONE_BRICKS = Item('cracked_stone_bricks')
CHISELED_STONE_BRICKS = Item('chiseled_stone_bricks')
BROWN_MUSHROOM_BLOCK = Item('brown_mushroom_block')
RED_MUSHROOM_BLOCK = Item('red_mushroom_block')
MUSHROOM_STEM = Item('mushroom_stem')
IRON_BARS = Item('iron_bars')
CHAIN = Item('chain')
GLASS_PANE = Item('glass_pane')
MELON = Item('melon')
VINE = Item('vine')
OAK_FENCE_GATE = Item('oak_fence_gate')
SPRUCE_FENCE_GATE = Item('spruce_fence_gate')
BIRCH_FENCE_GATE = Item('birch_fence_gate')
JUNGLE_FENCE_GATE = Item('jungle_fence_gate')
ACACIA_FENCE_GATE = Item('acacia_fence_gate')
DARK_OAK_FENCE_GATE = Item('dark_oak_fence_gate')
CRIMSON_FENCE_GATE = Item('crimson_fence_gate')
WARPED_FENCE_GATE = Item('warped_fence_gate')
BRICK_STAIRS = Item('brick_stairs')
STONE_BRICK_STAIRS = Item('stone_brick_stairs')
MYCELIUM = Item('mycelium')
LILY_PAD = Item('lily_pad')
NETHER_BRICKS = Item('nether_bricks')
CRACKED_NETHER_BRICKS = Item('cracked_nether_bricks')
CHISELED_NETHER_BRICKS = Item('chiseled_nether_bricks')
NETHER_BRICK_FENCE = Item('nether_brick_fence')
NETHER_BRICK_STAIRS = Item('nether_brick_stairs')
ENCHANTING_TABLE = Item('enchanting_table')
END_PORTAL_FRAME = Item('end_portal_frame')
END_STONE = Item('end_stone')
END_STONE_BRICKS = Item('end_stone_bricks')
DRAGON_EGG = Item('dragon_egg')
REDSTONE_LAMP = Item('redstone_lamp')
SANDSTONE_STAIRS = Item('sandstone_stairs')
EMERALD_ORE = Item('emerald_ore')
ENDER_CHEST = Item('ender_chest')
TRIPWIRE_HOOK = Item('tripwire_hook')
EMERALD_BLOCK = Item('emerald_block')
SPRUCE_STAIRS = Item('spruce_stairs')
BIRCH_STAIRS = Item('birch_stairs')
JUNGLE_STAIRS = Item('jungle_stairs')
CRIMSON_STAIRS = Item('crimson_stairs')
WARPED_STAIRS = Item('warped_stairs')
COMMAND_BLOCK = Item('command_block')
BEACON = Item('beacon')
COBBLESTONE_WALL = Item('cobblestone_wall')
MOSSY_COBBLESTONE_WALL = Item('mossy_cobblestone_wall')
BRICK_WALL = Item('brick_wall')
PRISMARINE_WALL = Item('prismarine_wall')
RED_SANDSTONE_WALL = Item('red_sandstone_wall')
MOSSY_STONE_BRICK_WALL = Item('mossy_stone_brick_wall')
GRANITE_WALL = Item('granite_wall')
STONE_BRICK_WALL = Item('stone_brick_wall')
NETHER_BRICK_WALL = Item('nether_brick_wall')
ANDESITE_WALL = Item('andesite_wall')
RED_NETHER_BRICK_WALL = Item('red_nether_brick_wall')
SANDSTONE_WALL = Item('sandstone_wall')
END_STONE_BRICK_WALL = Item('end_stone_brick_wall')
DIORITE_WALL = Item('diorite_wall')
BLACKSTONE_WALL = Item('blackstone_wall')
POLISHED_BLACKSTONE_WALL = Item('polished_blackstone_wall')
POLISHED_BLACKSTONE_BRICK_WALL = Item('polished_blackstone_brick_wall')
STONE_BUTTON = Item('stone_button')
OAK_BUTTON = Item('oak_button')
SPRUCE_BUTTON = Item('spruce_button')
BIRCH_BUTTON = Item('birch_button')
JUNGLE_BUTTON = Item('jungle_button')
ACACIA_BUTTON = Item('acacia_button')
DARK_OAK_BUTTON = Item('dark_oak_button')
CRIMSON_BUTTON = Item('crimson_button')
WARPED_BUTTON = Item('warped_button')
POLISHED_BLACKSTONE_BUTTON = Item('polished_blackstone_button')
ANVIL = Item('anvil')
CHIPPED_ANVIL = Item('chipped_anvil')
DAMAGED_ANVIL = Item('damaged_anvil')
TRAPPED_CHEST = Item('trapped_chest')
LIGHT_WEIGHTED_PRESSURE_PLATE = Item('light_weighted_pressure_plate')
HEAVY_WEIGHTED_PRESSURE_PLATE = Item('heavy_weighted_pressure_plate')
DAYLIGHT_DETECTOR = Item('daylight_detector')
REDSTONE_BLOCK = Item('redstone_block')
NETHER_QUARTZ_ORE = Item('nether_quartz_ore')
HOPPER = Item('hopper')
CHISELED_QUARTZ_BLOCK = Item('chiseled_quartz_block')
QUARTZ_BLOCK = Item('quartz_block')
QUARTZ_BRICKS = Item('quartz_bricks')
QUARTZ_PILLAR = Item('quartz_pillar')
QUARTZ_STAIRS = Item('quartz_stairs')
ACTIVATOR_RAIL = Item('activator_rail')
DROPPER = Item('dropper')
WHITE_TERRACOTTA = Item('white_terracotta')
ORANGE_TERRACOTTA = Item('orange_terracotta')
MAGENTA_TERRACOTTA = Item('magenta_terracotta')
LIGHT_BLUE_TERRACOTTA = Item('light_blue_terracotta')
YELLOW_TERRACOTTA = Item('yellow_terracotta')
LIME_TERRACOTTA = Item('lime_terracotta')
PINK_TERRACOTTA = Item('pink_terracotta')
GRAY_TERRACOTTA = Item('gray_terracotta')
LIGHT_GRAY_TERRACOTTA = Item('light_gray_terracotta')
CYAN_TERRACOTTA = Item('cyan_terracotta')
PURPLE_TERRACOTTA = Item('purple_terracotta')
BLUE_TERRACOTTA = Item('blue_terracotta')
BROWN_TERRACOTTA = Item('brown_terracotta')
GREEN_TERRACOTTA = Item('green_terracotta')
RED_TERRACOTTA = Item('red_terracotta')
BLACK_TERRACOTTA = Item('black_terracotta')
BARRIER = Item('barrier')
IRON_TRAPDOOR = Item('iron_trapdoor')
HAY_BLOCK = Item('hay_block')
WHITE_CARPET = Item('white_carpet')
ORANGE_CARPET = Item('orange_carpet')
MAGENTA_CARPET = Item('magenta_carpet')
LIGHT_BLUE_CARPET = Item('light_blue_carpet')
YELLOW_CARPET = Item('yellow_carpet')
LIME_CARPET = Item('lime_carpet')
PINK_CARPET = Item('pink_carpet')
GRAY_CARPET = Item('gray_carpet')
LIGHT_GRAY_CARPET = Item('light_gray_carpet')
CYAN_CARPET = Item('cyan_carpet')
PURPLE_CARPET = Item('purple_carpet')
BLUE_CARPET = Item('blue_carpet')
BROWN_CARPET = Item('brown_carpet')
GREEN_CARPET = Item('green_carpet')
RED_CARPET = Item('red_carpet')
BLACK_CARPET = Item('black_carpet')
TERRACOTTA = Item('terracotta')
COAL_BLOCK = Item('coal_block')
PACKED_ICE = Item('packed_ice')
ACACIA_STAIRS = Item('acacia_stairs')
DARK_OAK_STAIRS = Item('dark_oak_stairs')
SLIME_BLOCK = Item('slime_block')
GRASS_PATH = Item('grass_path')
SUNFLOWER = Item('sunflower')
LILAC = Item('lilac')
ROSE_BUSH = Item('rose_bush')
PEONY = Item('peony')
TALL_GRASS = Item('tall_grass')
LARGE_FERN = Item('large_fern')
WHITE_STAINED_GLASS = Item('white_stained_glass')
ORANGE_STAINED_GLASS = Item('orange_stained_glass')
MAGENTA_STAINED_GLASS = Item('magenta_stained_glass')
LIGHT_BLUE_STAINED_GLASS = Item('light_blue_stained_glass')
YELLOW_STAINED_GLASS = Item('yellow_stained_glass')
LIME_STAINED_GLASS = Item('lime_stained_glass')
PINK_STAINED_GLASS = Item('pink_stained_glass')
GRAY_STAINED_GLASS = Item('gray_stained_glass')
LIGHT_GRAY_STAINED_GLASS = Item('light_gray_stained_glass')
CYAN_STAINED_GLASS = Item('cyan_stained_glass')
PURPLE_STAINED_GLASS = Item('purple_stained_glass')
BLUE_STAINED_GLASS = Item('blue_stained_glass')
BROWN_STAINED_GLASS = Item('brown_stained_glass')
GREEN_STAINED_GLASS = Item('green_stained_glass')
RED_STAINED_GLASS = Item('red_stained_glass')
BLACK_STAINED_GLASS = Item('black_stained_glass')
WHITE_STAINED_GLASS_PANE = Item('white_stained_glass_pane')
ORANGE_STAINED_GLASS_PANE = Item('orange_stained_glass_pane')
MAGENTA_STAINED_GLASS_PANE = Item('magenta_stained_glass_pane')
LIGHT_BLUE_STAINED_GLASS_PANE = Item('light_blue_stained_glass_pane')
YELLOW_STAINED_GLASS_PANE = Item('yellow_stained_glass_pane')
LIME_STAINED_GLASS_PANE = Item('lime_stained_glass_pane')
PINK_STAINED_GLASS_PANE = Item('pink_stained_glass_pane')
GRAY_STAINED_GLASS_PANE = Item('gray_stained_glass_pane')
LIGHT_GRAY_STAINED_GLASS_PANE = Item('light_gray_stained_glass_pane')
CYAN_STAINED_GLASS_PANE = Item('cyan_stained_glass_pane')
PURPLE_STAINED_GLASS_PANE = Item('purple_stained_glass_pane')
BLUE_STAINED_GLASS_PANE = Item('blue_stained_glass_pane')
BROWN_STAINED_GLASS_PANE = Item('brown_stained_glass_pane')
GREEN_STAINED_GLASS_PANE = Item('green_stained_glass_pane')
RED_STAINED_GLASS_PANE = Item('red_stained_glass_pane')
BLACK_STAINED_GLASS_PANE = Item('black_stained_glass_pane')
PRISMARINE = Item('prismarine')
PRISMARINE_BRICKS = Item('prismarine_bricks')
DARK_PRISMARINE = Item('dark_prismarine')
PRISMARINE_STAIRS = Item('prismarine_stairs')
PRISMARINE_BRICK_STAIRS = Item('prismarine_brick_stairs')
DARK_PRISMARINE_STAIRS = Item('dark_prismarine_stairs')
SEA_LANTERN = Item('sea_lantern')
RED_SANDSTONE = Item('red_sandstone')
CHISELED_RED_SANDSTONE = Item('chiseled_red_sandstone')
CUT_RED_SANDSTONE = Item('cut_red_sandstone')
RED_SANDSTONE_STAIRS = Item('red_sandstone_stairs')
REPEATING_COMMAND_BLOCK = Item('repeating_command_block')
CHAIN_COMMAND_BLOCK = Item('chain_command_block')
MAGMA_BLOCK = Item('magma_block')
NETHER_WART_BLOCK = Item('nether_wart_block')
WARPED_WART_BLOCK = Item('warped_wart_block')
RED_NETHER_BRICKS = Item('red_nether_bricks')
BONE_BLOCK = Item('bone_block')
STRUCTURE_VOID = Item('structure_void')
OBSERVER = Item('observer')
SHULKER_BOX = Item('shulker_box')
WHITE_SHULKER_BOX = Item('white_shulker_box')
ORANGE_SHULKER_BOX = Item('orange_shulker_box')
MAGENTA_SHULKER_BOX = Item('magenta_shulker_box')
LIGHT_BLUE_SHULKER_BOX = Item('light_blue_shulker_box')
YELLOW_SHULKER_BOX = Item('yellow_shulker_box')
LIME_SHULKER_BOX = Item('lime_shulker_box')
PINK_SHULKER_BOX = Item('pink_shulker_box')
GRAY_SHULKER_BOX = Item('gray_shulker_box')
LIGHT_GRAY_SHULKER_BOX = Item('light_gray_shulker_box')
CYAN_SHULKER_BOX = Item('cyan_shulker_box')
PURPLE_SHULKER_BOX = Item('purple_shulker_box')
BLUE_SHULKER_BOX = Item('blue_shulker_box')
BROWN_SHULKER_BOX = Item('brown_shulker_box')
GREEN_SHULKER_BOX = Item('green_shulker_box')
RED_SHULKER_BOX = Item('red_shulker_box')
BLACK_SHULKER_BOX = Item('black_shulker_box')
WHITE_GLAZED_TERRACOTTA = Item('white_glazed_terracotta')
ORANGE_GLAZED_TERRACOTTA = Item('orange_glazed_terracotta')
MAGENTA_GLAZED_TERRACOTTA = Item('magenta_glazed_terracotta')
LIGHT_BLUE_GLAZED_TERRACOTTA = Item('light_blue_glazed_terracotta')
YELLOW_GLAZED_TERRACOTTA = Item('yellow_glazed_terracotta')
LIME_GLAZED_TERRACOTTA = Item('lime_glazed_terracotta')
PINK_GLAZED_TERRACOTTA = Item('pink_glazed_terracotta')
GRAY_GLAZED_TERRACOTTA = Item('gray_glazed_terracotta')
LIGHT_GRAY_GLAZED_TERRACOTTA = Item('light_gray_glazed_terracotta')
CYAN_GLAZED_TERRACOTTA = Item('cyan_glazed_terracotta')
PURPLE_GLAZED_TERRACOTTA = Item('purple_glazed_terracotta')
BLUE_GLAZED_TERRACOTTA = Item('blue_glazed_terracotta')
BROWN_GLAZED_TERRACOTTA = Item('brown_glazed_terracotta')
GREEN_GLAZED_TERRACOTTA = Item('green_glazed_terracotta')
RED_GLAZED_TERRACOTTA = Item('red_glazed_terracotta')
BLACK_GLAZED_TERRACOTTA = Item('black_glazed_terracotta')
WHITE_CONCRETE = Item('white_concrete')
ORANGE_CONCRETE = Item('orange_concrete')
MAGENTA_CONCRETE = Item('magenta_concrete')
LIGHT_BLUE_CONCRETE = Item('light_blue_concrete')
YELLOW_CONCRETE = Item('yellow_concrete')
LIME_CONCRETE = Item('lime_concrete')
PINK_CONCRETE = Item('pink_concrete')
GRAY_CONCRETE = Item('gray_concrete')
LIGHT_GRAY_CONCRETE = Item('light_gray_concrete')
CYAN_CONCRETE = Item('cyan_concrete')
PURPLE_CONCRETE = Item('purple_concrete')
BLUE_CONCRETE = Item('blue_concrete')
BROWN_CONCRETE = Item('brown_concrete')
GREEN_CONCRETE = Item('green_concrete')
RED_CONCRETE = Item('red_concrete')
BLACK_CONCRETE = Item('black_concrete')
WHITE_CONCRETE_POWDER = Item('white_concrete_powder')
ORANGE_CONCRETE_POWDER = Item('orange_concrete_powder')
MAGENTA_CONCRETE_POWDER = Item('magenta_concrete_powder')
LIGHT_BLUE_CONCRETE_POWDER = Item('light_blue_concrete_powder')
YELLOW_CONCRETE_POWDER = Item('yellow_concrete_powder')
LIME_CONCRETE_POWDER = Item('lime_concrete_powder')
PINK_CONCRETE_POWDER = Item('pink_concrete_powder')
GRAY_CONCRETE_POWDER = Item('gray_concrete_powder')
LIGHT_GRAY_CONCRETE_POWDER = Item('light_gray_concrete_powder')
CYAN_CONCRETE_POWDER = Item('cyan_concrete_powder')
PURPLE_CONCRETE_POWDER = Item('purple_concrete_powder')
BLUE_CONCRETE_POWDER = Item('blue_concrete_powder')
BROWN_CONCRETE_POWDER = Item('brown_concrete_powder')
GREEN_CONCRETE_POWDER = Item('green_concrete_powder')
RED_CONCRETE_POWDER = Item('red_concrete_powder')
BLACK_CONCRETE_POWDER = Item('black_concrete_powder')
TURTLE_EGG = Item('turtle_egg')
DEAD_TUBE_CORAL_BLOCK = Item('dead_tube_coral_block')
DEAD_BRAIN_CORAL_BLOCK = Item('dead_brain_coral_block')
DEAD_BUBBLE_CORAL_BLOCK = Item('dead_bubble_coral_block')
DEAD_FIRE_CORAL_BLOCK = Item('dead_fire_coral_block')
DEAD_HORN_CORAL_BLOCK = Item('dead_horn_coral_block')
TUBE_CORAL_BLOCK = Item('tube_coral_block')
BRAIN_CORAL_BLOCK = Item('brain_coral_block')
BUBBLE_CORAL_BLOCK = Item('bubble_coral_block')
FIRE_CORAL_BLOCK = Item('fire_coral_block')
HORN_CORAL_BLOCK = Item('horn_coral_block')
TUBE_CORAL = Item('tube_coral')
BRAIN_CORAL = Item('brain_coral')
BUBBLE_CORAL = Item('bubble_coral')
FIRE_CORAL = Item('fire_coral')
HORN_CORAL = Item('horn_coral')
DEAD_BRAIN_CORAL = Item('dead_brain_coral')
DEAD_BUBBLE_CORAL = Item('dead_bubble_coral')
DEAD_FIRE_CORAL = Item('dead_fire_coral')
DEAD_HORN_CORAL = Item('dead_horn_coral')
DEAD_TUBE_CORAL = Item('dead_tube_coral')
TUBE_CORAL_FAN = Item('tube_coral_fan')
BRAIN_CORAL_FAN = Item('brain_coral_fan')
BUBBLE_CORAL_FAN = Item('bubble_coral_fan')
FIRE_CORAL_FAN = Item('fire_coral_fan')
HORN_CORAL_FAN = Item('horn_coral_fan')
DEAD_TUBE_CORAL_FAN = Item('dead_tube_coral_fan')
DEAD_BRAIN_CORAL_FAN = Item('dead_brain_coral_fan')
DEAD_BUBBLE_CORAL_FAN = Item('dead_bubble_coral_fan')
DEAD_FIRE_CORAL_FAN = Item('dead_fire_coral_fan')
DEAD_HORN_CORAL_FAN = Item('dead_horn_coral_fan')
BLUE_ICE = Item('blue_ice')
CONDUIT = Item('conduit')
POLISHED_GRANITE_STAIRS = Item('polished_granite_stairs')
SMOOTH_RED_SANDSTONE_STAIRS = Item('smooth_red_sandstone_stairs')
MOSSY_STONE_BRICK_STAIRS = Item('mossy_stone_brick_stairs')
POLISHED_DIORITE_STAIRS = Item('polished_diorite_stairs')
MOSSY_COBBLESTONE_STAIRS = Item('mossy_cobblestone_stairs')
END_STONE_BRICK_STAIRS = Item('end_stone_brick_stairs')
STONE_STAIRS = Item('stone_stairs')
SMOOTH_SANDSTONE_STAIRS = Item('smooth_sandstone_stairs')
SMOOTH_QUARTZ_STAIRS = Item('smooth_quartz_stairs')
GRANITE_STAIRS = Item('granite_stairs')
ANDESITE_STAIRS = Item('andesite_stairs')
RED_NETHER_BRICK_STAIRS = Item('red_nether_brick_stairs')
POLISHED_ANDESITE_STAIRS = Item('polished_andesite_stairs')
DIORITE_STAIRS = Item('diorite_stairs')
POLISHED_GRANITE_SLAB = Item('polished_granite_slab')
SMOOTH_RED_SANDSTONE_SLAB = Item('smooth_red_sandstone_slab')
MOSSY_STONE_BRICK_SLAB = Item('mossy_stone_brick_slab')
POLISHED_DIORITE_SLAB = Item('polished_diorite_slab')
MOSSY_COBBLESTONE_SLAB = Item('mossy_cobblestone_slab')
END_STONE_BRICK_SLAB = Item('end_stone_brick_slab')
SMOOTH_SANDSTONE_SLAB = Item('smooth_sandstone_slab')
SMOOTH_QUARTZ_SLAB = Item('smooth_quartz_slab')
GRANITE_SLAB = Item('granite_slab')
ANDESITE_SLAB = Item('andesite_slab')
RED_NETHER_BRICK_SLAB = Item('red_nether_brick_slab')
POLISHED_ANDESITE_SLAB = Item('polished_andesite_slab')
DIORITE_SLAB = Item('diorite_slab')
SCAFFOLDING = Item('scaffolding')
IRON_DOOR = Item('iron_door')
OAK_DOOR = Item('oak_door')
SPRUCE_DOOR = Item('spruce_door')
BIRCH_DOOR = Item('birch_door')
JUNGLE_DOOR = Item('jungle_door')
ACACIA_DOOR = Item('acacia_door')
DARK_OAK_DOOR = Item('dark_oak_door')
CRIMSON_DOOR = Item('crimson_door')
WARPED_DOOR = Item('warped_door')
REPEATER = Item('repeater')
COMPARATOR = Item('comparator')
STRUCTURE_BLOCK = Item('structure_block')
JIGSAW = Item('jigsaw')
TURTLE_HELMET = Item('turtle_helmet')
SCUTE = Item('scute')
FLINT_AND_STEEL = Item('flint_and_steel')
APPLE = Item('apple')
BOW = Item('bow')
ARROW = Item('arrow')
COAL = Item('coal')
CHARCOAL = Item('charcoal')
DIAMOND = Item('diamond')
IRON_INGOT = Item('iron_ingot')
GOLD_INGOT = Item('gold_ingot')
NETHERITE_INGOT = Item('netherite_ingot')
NETHERITE_SCRAP = Item('netherite_scrap')
WOODEN_SWORD = Item('wooden_sword')
WOODEN_SHOVEL = Item('wooden_shovel')
WOODEN_PICKAXE = Item('wooden_pickaxe')
WOODEN_AXE = Item('wooden_axe')
WOODEN_HOE = Item('wooden_hoe')
STONE_SWORD = Item('stone_sword')
STONE_SHOVEL = Item('stone_shovel')
STONE_PICKAXE = Item('stone_pickaxe')
STONE_AXE = Item('stone_axe')
STONE_HOE = Item('stone_hoe')
GOLDEN_SWORD = Item('golden_sword')
GOLDEN_SHOVEL = Item('golden_shovel')
GOLDEN_PICKAXE = Item('golden_pickaxe')
GOLDEN_AXE = Item('golden_axe')
GOLDEN_HOE = Item('golden_hoe')
IRON_SWORD = Item('iron_sword')
IRON_SHOVEL = Item('iron_shovel')
IRON_PICKAXE = Item('iron_pickaxe')
IRON_AXE = Item('iron_axe')
IRON_HOE = Item('iron_hoe')
DIAMOND_SWORD = Item('diamond_sword')
DIAMOND_SHOVEL = Item('diamond_shovel')
DIAMOND_PICKAXE = Item('diamond_pickaxe')
DIAMOND_AXE = Item('diamond_axe')
DIAMOND_HOE = Item('diamond_hoe')
NETHERITE_SWORD = Item('netherite_sword')
NETHERITE_SHOVEL = Item('netherite_shovel')
NETHERITE_PICKAXE = Item('netherite_pickaxe')
NETHERITE_AXE = Item('netherite_axe')
NETHERITE_HOE = Item('netherite_hoe')
STICK = Item('stick')
BOWL = Item('bowl')
MUSHROOM_STEW = Item('mushroom_stew')
STRING = Item('string')
FEATHER = Item('feather')
GUNPOWDER = Item('gunpowder')
WHEAT_SEEDS = Item('wheat_seeds')
WHEAT = Item('wheat')
BREAD = Item('bread')
LEATHER_HELMET = Item('leather_helmet')
LEATHER_CHESTPLATE = Item('leather_chestplate')
LEATHER_LEGGINGS = Item('leather_leggings')
LEATHER_BOOTS = Item('leather_boots')
CHAINMAIL_HELMET = Item('chainmail_helmet')
CHAINMAIL_CHESTPLATE = Item('chainmail_chestplate')
CHAINMAIL_LEGGINGS = Item('chainmail_leggings')
CHAINMAIL_BOOTS = Item('chainmail_boots')
IRON_HELMET = Item('iron_helmet')
IRON_CHESTPLATE = Item('iron_chestplate')
IRON_LEGGINGS = Item('iron_leggings')
IRON_BOOTS = Item('iron_boots')
DIAMOND_HELMET = Item('diamond_helmet')
DIAMOND_CHESTPLATE = Item('diamond_chestplate')
DIAMOND_LEGGINGS = Item('diamond_leggings')
DIAMOND_BOOTS = Item('diamond_boots')
GOLDEN_HELMET = Item('golden_helmet')
GOLDEN_CHESTPLATE = Item('golden_chestplate')
GOLDEN_LEGGINGS = Item('golden_leggings')
GOLDEN_BOOTS = Item('golden_boots')
NETHERITE_HELMET = Item('netherite_helmet')
NETHERITE_CHESTPLATE = Item('netherite_chestplate')
NETHERITE_LEGGINGS = Item('netherite_leggings')
NETHERITE_BOOTS = Item('netherite_boots')
FLINT = Item('flint')
PORKCHOP = Item('porkchop')
COOKED_PORKCHOP = Item('cooked_porkchop')
PAINTING = Item('painting')
GOLDEN_APPLE = Item('golden_apple')
ENCHANTED_GOLDEN_APPLE = Item('enchanted_golden_apple')
OAK_SIGN = Item('oak_sign')
SPRUCE_SIGN = Item('spruce_sign')
BIRCH_SIGN = Item('birch_sign')
JUNGLE_SIGN = Item('jungle_sign')
ACACIA_SIGN = Item('acacia_sign')
DARK_OAK_SIGN = Item('dark_oak_sign')
CRIMSON_SIGN = Item('crimson_sign')
WARPED_SIGN = Item('warped_sign')
BUCKET = Item('bucket')
WATER_BUCKET = Item('water_bucket')
LAVA_BUCKET = Item('lava_bucket')
MINECART = Item('minecart')
SADDLE = Item('saddle')
REDSTONE = Item('redstone')
SNOWBALL = Item('snowball')
OAK_BOAT = Item('oak_boat')
LEATHER = Item('leather')
MILK_BUCKET = Item('milk_bucket')
PUFFERFISH_BUCKET = Item('pufferfish_bucket')
SALMON_BUCKET = Item('salmon_bucket')
COD_BUCKET = Item('cod_bucket')
TROPICAL_FISH_BUCKET = Item('tropical_fish_bucket')
BRICK = Item('brick')
CLAY_BALL = Item('clay_ball')
DRIED_KELP_BLOCK = Item('dried_kelp_block')
PAPER = Item('paper')
BOOK = Item('book')
SLIME_BALL = Item('slime_ball')
CHEST_MINECART = Item('chest_minecart')
FURNACE_MINECART = Item('furnace_minecart')
EGG = Item('egg')
COMPASS = Item('compass')
FISHING_ROD = Item('fishing_rod')
CLOCK = Item('clock')
GLOWSTONE_DUST = Item('glowstone_dust')
COD = Item('cod')
SALMON = Item('salmon')
TROPICAL_FISH = Item('tropical_fish')
PUFFERFISH = Item('pufferfish')
COOKED_COD = Item('cooked_cod')
COOKED_SALMON = Item('cooked_salmon')
INK_SAC = Item('ink_sac')
COCOA_BEANS = Item('cocoa_beans')
LAPIS_LAZULI = Item('lapis_lazuli')
WHITE_DYE = Item('white_dye')
ORANGE_DYE = Item('orange_dye')
MAGENTA_DYE = Item('magenta_dye')
LIGHT_BLUE_DYE = Item('light_blue_dye')
YELLOW_DYE = Item('yellow_dye')
LIME_DYE = Item('lime_dye')
PINK_DYE = Item('pink_dye')
GRAY_DYE = Item('gray_dye')
LIGHT_GRAY_DYE = Item('light_gray_dye')
CYAN_DYE = Item('cyan_dye')
PURPLE_DYE = Item('purple_dye')
BLUE_DYE = Item('blue_dye')
BROWN_DYE = Item('brown_dye')
GREEN_DYE = Item('green_dye')
RED_DYE = Item('red_dye')
BLACK_DYE = Item('black_dye')
BONE_MEAL = Item('bone_meal')
BONE = Item('bone')
SUGAR = Item('sugar')
CAKE = Item('cake')
WHITE_BED = Item('white_bed')
ORANGE_BED = Item('orange_bed')
MAGENTA_BED = Item('magenta_bed')
LIGHT_BLUE_BED = Item('light_blue_bed')
YELLOW_BED = Item('yellow_bed')
LIME_BED = Item('lime_bed')
PINK_BED = Item('pink_bed')
GRAY_BED = Item('gray_bed')
LIGHT_GRAY_BED = Item('light_gray_bed')
CYAN_BED = Item('cyan_bed')
PURPLE_BED = Item('purple_bed')
BLUE_BED = Item('blue_bed')
BROWN_BED = Item('brown_bed')
GREEN_BED = Item('green_bed')
RED_BED = Item('red_bed')
BLACK_BED = Item('black_bed')
COOKIE = Item('cookie')
FILLED_MAP = Item('filled_map')
SHEARS = Item('shears')
MELON_SLICE = Item('melon_slice')
DRIED_KELP = Item('dried_kelp')
PUMPKIN_SEEDS = Item('pumpkin_seeds')
MELON_SEEDS = Item('melon_seeds')
BEEF = Item('beef')
COOKED_BEEF = Item('cooked_beef')
CHICKEN = Item('chicken')
COOKED_CHICKEN = Item('cooked_chicken')
ROTTEN_FLESH = Item('rotten_flesh')
ENDER_PEARL = Item('ender_pearl')
BLAZE_ROD = Item('blaze_rod')
GHAST_TEAR = Item('ghast_tear')
GOLD_NUGGET = Item('gold_nugget')
NETHER_WART = Item('nether_wart')
POTION = Item('potion')
GLASS_BOTTLE = Item('glass_bottle')
SPIDER_EYE = Item('spider_eye')
FERMENTED_SPIDER_EYE = Item('fermented_spider_eye')
BLAZE_POWDER = Item('blaze_powder')
MAGMA_CREAM = Item('magma_cream')
BREWING_STAND = Item('brewing_stand')
CAULDRON = Item('cauldron')
ENDER_EYE = Item('ender_eye')
GLISTERING_MELON_SLICE = Item('glistering_melon_slice')
BAT_SPAWN_EGG = Item('bat_spawn_egg')
BEE_SPAWN_EGG = Item('bee_spawn_egg')
BLAZE_SPAWN_EGG = Item('blaze_spawn_egg')
CAT_SPAWN_EGG = Item('cat_spawn_egg')
CAVE_SPIDER_SPAWN_EGG = Item('cave_spider_spawn_egg')
CHICKEN_SPAWN_EGG = Item('chicken_spawn_egg')
COD_SPAWN_EGG = Item('cod_spawn_egg')
COW_SPAWN_EGG = Item('cow_spawn_egg')
CREEPER_SPAWN_EGG = Item('creeper_spawn_egg')
DOLPHIN_SPAWN_EGG = Item('dolphin_spawn_egg')
DONKEY_SPAWN_EGG = Item('donkey_spawn_egg')
DROWNED_SPAWN_EGG = Item('drowned_spawn_egg')
ELDER_GUARDIAN_SPAWN_EGG = Item('elder_guardian_spawn_egg')
ENDERMAN_SPAWN_EGG = Item('enderman_spawn_egg')
ENDERMITE_SPAWN_EGG = Item('endermite_spawn_egg')
EVOKER_SPAWN_EGG = Item('evoker_spawn_egg')
FOX_SPAWN_EGG = Item('fox_spawn_egg')
GHAST_SPAWN_EGG = Item('ghast_spawn_egg')
GUARDIAN_SPAWN_EGG = Item('guardian_spawn_egg')
HOGLIN_SPAWN_EGG = Item('hoglin_spawn_egg')
HORSE_SPAWN_EGG = Item('horse_spawn_egg')
HUSK_SPAWN_EGG = Item('husk_spawn_egg')
LLAMA_SPAWN_EGG = Item('llama_spawn_egg')
MAGMA_CUBE_SPAWN_EGG = Item('magma_cube_spawn_egg')
MOOSHROOM_SPAWN_EGG = Item('mooshroom_spawn_egg')
MULE_SPAWN_EGG = Item('mule_spawn_egg')
OCELOT_SPAWN_EGG = Item('ocelot_spawn_egg')
PANDA_SPAWN_EGG = Item('panda_spawn_egg')
PARROT_SPAWN_EGG = Item('parrot_spawn_egg')
PHANTOM_SPAWN_EGG = Item('phantom_spawn_egg')
PIG_SPAWN_EGG = Item('pig_spawn_egg')
PIGLIN_SPAWN_EGG = Item('piglin_spawn_egg')
PIGLIN_BRUTE_SPAWN_EGG = Item('piglin_brute_spawn_egg')
PILLAGER_SPAWN_EGG = Item('pillager_spawn_egg')
POLAR_BEAR_SPAWN_EGG = Item('polar_bear_spawn_egg')
PUFFERFISH_SPAWN_EGG = Item('pufferfish_spawn_egg')
RABBIT_SPAWN_EGG = Item('rabbit_spawn_egg')
RAVAGER_SPAWN_EGG = Item('ravager_spawn_egg')
SALMON_SPAWN_EGG = Item('salmon_spawn_egg')
SHEEP_SPAWN_EGG = Item('sheep_spawn_egg')
SHULKER_SPAWN_EGG = Item('shulker_spawn_egg')
SILVERFISH_SPAWN_EGG = Item('silverfish_spawn_egg')
SKELETON_SPAWN_EGG = Item('skeleton_spawn_egg')
SKELETON_HORSE_SPAWN_EGG = Item('skeleton_horse_spawn_egg')
SLIME_SPAWN_EGG = Item('slime_spawn_egg')
SPIDER_SPAWN_EGG = Item('spider_spawn_egg')
SQUID_SPAWN_EGG = Item('squid_spawn_egg')
STRAY_SPAWN_EGG = Item('stray_spawn_egg')
STRIDER_SPAWN_EGG = Item('strider_spawn_egg')
TRADER_LLAMA_SPAWN_EGG = Item('trader_llama_spawn_egg')
TROPICAL_FISH_SPAWN_EGG = Item('tropical_fish_spawn_egg')
TURTLE_SPAWN_EGG = Item('turtle_spawn_egg')
VEX_SPAWN_EGG = Item('vex_spawn_egg')
VILLAGER_SPAWN_EGG = Item('villager_spawn_egg')
VINDICATOR_SPAWN_EGG = Item('vindicator_spawn_egg')
WANDERING_TRADER_SPAWN_EGG = Item('wandering_trader_spawn_egg')
WITCH_SPAWN_EGG = Item('witch_spawn_egg')
WITHER_SKELETON_SPAWN_EGG = Item('wither_skeleton_spawn_egg')
WOLF_SPAWN_EGG = Item('wolf_spawn_egg')
ZOGLIN_SPAWN_EGG = Item('zoglin_spawn_egg')
ZOMBIE_SPAWN_EGG = Item('zombie_spawn_egg')
ZOMBIE_HORSE_SPAWN_EGG = Item('zombie_horse_spawn_egg')
ZOMBIE_VILLAGER_SPAWN_EGG = Item('zombie_villager_spawn_egg')
ZOMBIFIED_PIGLIN_SPAWN_EGG = Item('zombified_piglin_spawn_egg')
EXPERIENCE_BOTTLE = Item('experience_bottle')
FIRE_CHARGE = Item('fire_charge')
WRITABLE_BOOK = Item('writable_book')
WRITTEN_BOOK = Item('written_book')
EMERALD = Item('emerald')
ITEM_FRAME = Item('item_frame')
FLOWER_POT = Item('flower_pot')
CARROT = Item('carrot')
POTATO = Item('potato')
BAKED_POTATO = Item('baked_potato')
POISONOUS_POTATO = Item('poisonous_potato')
MAP = Item('map')
GOLDEN_CARROT = Item('golden_carrot')
SKELETON_SKULL = Item('skeleton_skull')
WITHER_SKELETON_SKULL = Item('wither_skeleton_skull')
PLAYER_HEAD = Item('player_head')
ZOMBIE_HEAD = Item('zombie_head')
CREEPER_HEAD = Item('creeper_head')
DRAGON_HEAD = Item('dragon_head')
CARROT_ON_A_STICK = Item('carrot_on_a_stick')
WARPED_FUNGUS_ON_A_STICK = Item('warped_fungus_on_a_stick')
NETHER_STAR = Item('nether_star')
PUMPKIN_PIE = Item('pumpkin_pie')
FIREWORK_ROCKET = Item('firework_rocket')
FIREWORK_STAR = Item('firework_star')
ENCHANTED_BOOK = Item('enchanted_book')
NETHER_BRICK = Item('nether_brick')
QUARTZ = Item('quartz')
TNT_MINECART = Item('tnt_minecart')
HOPPER_MINECART = Item('hopper_minecart')
PRISMARINE_SHARD = Item('prismarine_shard')
PRISMARINE_CRYSTALS = Item('prismarine_crystals')
RABBIT = Item('rabbit')
COOKED_RABBIT = Item('cooked_rabbit')
RABBIT_STEW = Item('rabbit_stew')
RABBIT_FOOT = Item('rabbit_foot')
RABBIT_HIDE = Item('rabbit_hide')
ARMOR_STAND = Item('armor_stand')
IRON_HORSE_ARMOR = Item('iron_horse_armor')
GOLDEN_HORSE_ARMOR = Item('golden_horse_armor')
DIAMOND_HORSE_ARMOR = Item('diamond_horse_armor')
LEATHER_HORSE_ARMOR = Item('leather_horse_armor')
LEAD = Item('lead')
NAME_TAG = Item('name_tag')
COMMAND_BLOCK_MINECART = Item('command_block_minecart')
MUTTON = Item('mutton')
COOKED_MUTTON = Item('cooked_mutton')
WHITE_BANNER = Item('white_banner')
ORANGE_BANNER = Item('orange_banner')
MAGENTA_BANNER = Item('magenta_banner')
LIGHT_BLUE_BANNER = Item('light_blue_banner')
YELLOW_BANNER = Item('yellow_banner')
LIME_BANNER = Item('lime_banner')
PINK_BANNER = Item('pink_banner')
GRAY_BANNER = Item('gray_banner')
LIGHT_GRAY_BANNER = Item('light_gray_banner')
CYAN_BANNER = Item('cyan_banner')
PURPLE_BANNER = Item('purple_banner')
BLUE_BANNER = Item('blue_banner')
BROWN_BANNER = Item('brown_banner')
GREEN_BANNER = Item('green_banner')
RED_BANNER = Item('red_banner')
BLACK_BANNER = Item('black_banner')
END_CRYSTAL = Item('end_crystal')
CHORUS_FRUIT = Item('chorus_fruit')
POPPED_CHORUS_FRUIT = Item('popped_chorus_fruit')
BEETROOT = Item('beetroot')
BEETROOT_SEEDS = Item('beetroot_seeds')
BEETROOT_SOUP = Item('beetroot_soup')
DRAGON_BREATH = Item('dragon_breath')
SPLASH_POTION = Item('splash_potion')
SPECTRAL_ARROW = Item('spectral_arrow')
TIPPED_ARROW = Item('tipped_arrow')
LINGERING_POTION = Item('lingering_potion')
SHIELD = Item('shield')
ELYTRA = Item('elytra')
SPRUCE_BOAT = Item('spruce_boat')
BIRCH_BOAT = Item('birch_boat')
JUNGLE_BOAT = Item('jungle_boat')
ACACIA_BOAT = Item('acacia_boat')
DARK_OAK_BOAT = Item('dark_oak_boat')
TOTEM_OF_UNDYING = Item('totem_of_undying')
SHULKER_SHELL = Item('shulker_shell')
IRON_NUGGET = Item('iron_nugget')
KNOWLEDGE_BOOK = Item('knowledge_book')
DEBUG_STICK = Item('debug_stick')
MUSIC_DISC_13 = Item('music_disc_13')
MUSIC_DISC_CAT = Item('music_disc_cat')
MUSIC_DISC_BLOCKS = Item('music_disc_blocks')
MUSIC_DISC_CHIRP = Item('music_disc_chirp')
MUSIC_DISC_FAR = Item('music_disc_far')
MUSIC_DISC_MALL = Item('music_disc_mall')
MUSIC_DISC_MELLOHI = Item('music_disc_mellohi')
MUSIC_DISC_STAL = Item('music_disc_stal')
MUSIC_DISC_STRAD = Item('music_disc_strad')
MUSIC_DISC_WARD = Item('music_disc_ward')
MUSIC_DISC_11 = Item('music_disc_11')
MUSIC_DISC_WAIT = Item('music_disc_wait')
MUSIC_DISC_PIGSTEP = Item('music_disc_pigstep')
TRIDENT = Item('trident')
PHANTOM_MEMBRANE = Item('phantom_membrane')
NAUTILUS_SHELL = Item('nautilus_shell')
HEART_OF_THE_SEA = Item('heart_of_the_sea')
CROSSBOW = Item('crossbow')
SUSPICIOUS_STEW = Item('suspicious_stew')
LOOM = Item('loom')
FLOWER_BANNER_PATTERN = Item('flower_banner_pattern')
CREEPER_BANNER_PATTERN = Item('creeper_banner_pattern')
SKULL_BANNER_PATTERN = Item('skull_banner_pattern')
MOJANG_BANNER_PATTERN = Item('mojang_banner_pattern')
GLOBE_BANNER_PATTER = Item('globe_banner_patter')
PIGLIN_BANNER_PATTERN = Item('piglin_banner_pattern')
COMPOSTER = Item('composter')
BARREL = Item('barrel')
SMOKER = Item('smoker')
BLAST_FURNACE = Item('blast_furnace')
CARTOGRAPHY_TABLE = Item('cartography_table')
FLETCHING_TABLE = Item('fletching_table')
GRINDSTONE = Item('grindstone')
LECTERN = Item('lectern')
SMITHING_TABLE = Item('smithing_table')
STONECUTTER = Item('stonecutter')
BELL = Item('bell')
LANTERN = Item('lantern')
SOUL_LANTERN = Item('soul_lantern')
SWEET_BERRIES = Item('sweet_berries')
CAMPFIRE = Item('campfire')
SOUL_CAMPFIRE = Item('soul_campfire')
SHROOMLIGHT = Item('shroomlight')
HONEYCOMB = Item('honeycomb')
BEE_NEST = Item('bee_nest')
BEEHIVE = Item('beehive')
HONEY_BOTTLE = Item('honey_bottle')
HONEY_BLOCK = Item('honey_block')
HONEYCOMB_BLOCK = Item('honeycomb_block')
LODESTONE = Item('lodestone')
NETHERITE_BLOCK = Item('netherite_block')
ANCIENT_DEBRIS = Item('ancient_debris')
TARGET = Item('target')
CRYING_OBSIDIAN = Item('crying_obsidian')
BLACKSTONE = Item('blackstone')
BLACKSTONE_SLAB = Item('blackstone_slab')
BLACKSTONE_STAIRS = Item('blackstone_stairs')
GILDED_BLACKSTONE = Item('gilded_blackstone')
POLISHED_BLACKSTONE = Item('polished_blackstone')
POLISHED_BLACKSTONE_SLAB = Item('polished_blackstone_slab')
POLISHED_BLACKSTONE_STAIRS = Item('polished_blackstone_stairs')
CHISELED_POLISHED_BLACKSTONE = Item('chiseled_polished_blackstone')
POLISHED_BLACKSTONE_BRICKS = Item('polished_blackstone_bricks')
POLISHED_BLACKSTONE_BRICK_SLAB = Item('polished_blackstone_brick_slab')
POLISHED_BLACKSTONE_BRICK_STAIRS = Item('polished_blackstone_brick_stairs')
CRACKED_POLISHED_BLACKSTONE_BRICKS = Item('cracked_polished_blackstone_bricks')
RESPAWN_ANCHOR = Item('respawn_anchor')
| 0.381104 | 0.040541 |
import os
import shutil
import sys
from typing import List, Union, Callable
import functools
import importlib
def require(pkg_name) -> Callable:
"""Returns a decorator function, ensures pkg_name is available and can be imported.
Parameters
----------
pkg_name: str
Name of the package required.
Returns
-------
deco_require: Callable
Decorator function
Raises
------
ModuleNotFoundError
When pkg_name is not found.
Example:
--------
@require("some_pkg")
def foo(...):
...
"""
def deco_require(func):
@functools.wraps(func)
def inner_func(*args, **kwargs):
if not which_import(pkg_name, return_bool=True):
raise ModuleNotFoundError(f"Could not find or import {pkg_name}.")
return func(*args, **kwargs)
return inner_func
return deco_require
def which_import(
module: str,
*,
return_bool: bool = False,
raise_error: bool = False,
raise_msg: str = None,
package: str = None,
namespace_ok: bool = False,
) -> Union[bool, None, str, List[str]]:
"""Tests to see if a Python module is available.
Returns
-------
str or None
By default, returns `__init__.py`-like path if `module` found or `None` if not.
For namespace packages and if `namespace_ok=True`, returns the list of pieces locations if `module` found or `None` if not.
bool
When `return_bool=True`, returns whether or not found.
Namespace packages only `True` if `namespace_ok=True`.
Raises
------
ModuleNotFoundError
When `raise_error=True` and module not found. Raises generic message plus any `raise_msg`.
"""
try:
module_spec = importlib.util.find_spec(module, package=package)
except ModuleNotFoundError:
module_spec = None
# module_spec.origin is 'namespace' for py36, None for >=py37
namespace_package = module_spec is not None and module_spec.origin in [
None,
"namespace",
]
if (module_spec is None) or (namespace_package and not namespace_ok):
if raise_error:
raise ModuleNotFoundError(
f"Python module '{module}' not found in envvar PYTHONPATH.{' ' + raise_msg if raise_msg else ''}"
)
elif return_bool:
return False
else:
return None
else:
if return_bool:
return True
else:
if namespace_package:
return module_spec.submodule_search_locations
else:
return module_spec.origin
def which(
command: str,
*,
return_bool: bool = False,
raise_error: bool = False,
raise_msg: str = None,
env: str = None,
) -> Union[bool, None, str]:
"""Test to see if a command is available.
Returns
-------
str or None
By default, returns command path if command found or `None` if not.
Environment is $PATH or `os.pathsep`-separated `env`, less any None values.
bool
When `return_bool=True`, returns whether or not found.
Raises
------
ModuleNotFoundError
When `raises_error=True` and command not found. Raises generic message plus any `raise_msg`.
"""
if env is None:
lenv = {
"PATH": os.pathsep
+ os.environ.get("PATH", "")
+ os.path.dirname(sys.executable)
}
else:
lenv = {
"PATH": os.pathsep.join(
[os.path.abspath(x) for x in env.split(os.pathsep) if x != ""]
)
}
lenv = {k: v for k, v in lenv.items() if v is not None}
ans = shutil.which(command, mode=os.F_OK | os.X_OK, path=lenv["PATH"])
if raise_error and ans is None:
raise ModuleNotFoundError(
f"Command '{command}' not found in envvar PATH.{' ' + raise_msg if raise_msg else ''}"
)
if return_bool:
return bool(ans)
else:
return ans
def safe_version(*args, **kwargs) -> str:
"""
Package resources is a very slow load
"""
import pkg_resources
return pkg_resources.safe_version(*args, **kwargs)
def parse_version(*args, **kwargs):
"""
Package resources is a very slow load
"""
import pkg_resources
return pkg_resources.parse_version(*args, **kwargs)
|
cmselemental/util/importing.py
|
import os
import shutil
import sys
from typing import List, Union, Callable
import functools
import importlib
def require(pkg_name) -> Callable:
"""Returns a decorator function, ensures pkg_name is available and can be imported.
Parameters
----------
pkg_name: str
Name of the package required.
Returns
-------
deco_require: Callable
Decorator function
Raises
------
ModuleNotFoundError
When pkg_name is not found.
Example:
--------
@require("some_pkg")
def foo(...):
...
"""
def deco_require(func):
@functools.wraps(func)
def inner_func(*args, **kwargs):
if not which_import(pkg_name, return_bool=True):
raise ModuleNotFoundError(f"Could not find or import {pkg_name}.")
return func(*args, **kwargs)
return inner_func
return deco_require
def which_import(
module: str,
*,
return_bool: bool = False,
raise_error: bool = False,
raise_msg: str = None,
package: str = None,
namespace_ok: bool = False,
) -> Union[bool, None, str, List[str]]:
"""Tests to see if a Python module is available.
Returns
-------
str or None
By default, returns `__init__.py`-like path if `module` found or `None` if not.
For namespace packages and if `namespace_ok=True`, returns the list of pieces locations if `module` found or `None` if not.
bool
When `return_bool=True`, returns whether or not found.
Namespace packages only `True` if `namespace_ok=True`.
Raises
------
ModuleNotFoundError
When `raise_error=True` and module not found. Raises generic message plus any `raise_msg`.
"""
try:
module_spec = importlib.util.find_spec(module, package=package)
except ModuleNotFoundError:
module_spec = None
# module_spec.origin is 'namespace' for py36, None for >=py37
namespace_package = module_spec is not None and module_spec.origin in [
None,
"namespace",
]
if (module_spec is None) or (namespace_package and not namespace_ok):
if raise_error:
raise ModuleNotFoundError(
f"Python module '{module}' not found in envvar PYTHONPATH.{' ' + raise_msg if raise_msg else ''}"
)
elif return_bool:
return False
else:
return None
else:
if return_bool:
return True
else:
if namespace_package:
return module_spec.submodule_search_locations
else:
return module_spec.origin
def which(
command: str,
*,
return_bool: bool = False,
raise_error: bool = False,
raise_msg: str = None,
env: str = None,
) -> Union[bool, None, str]:
"""Test to see if a command is available.
Returns
-------
str or None
By default, returns command path if command found or `None` if not.
Environment is $PATH or `os.pathsep`-separated `env`, less any None values.
bool
When `return_bool=True`, returns whether or not found.
Raises
------
ModuleNotFoundError
When `raises_error=True` and command not found. Raises generic message plus any `raise_msg`.
"""
if env is None:
lenv = {
"PATH": os.pathsep
+ os.environ.get("PATH", "")
+ os.path.dirname(sys.executable)
}
else:
lenv = {
"PATH": os.pathsep.join(
[os.path.abspath(x) for x in env.split(os.pathsep) if x != ""]
)
}
lenv = {k: v for k, v in lenv.items() if v is not None}
ans = shutil.which(command, mode=os.F_OK | os.X_OK, path=lenv["PATH"])
if raise_error and ans is None:
raise ModuleNotFoundError(
f"Command '{command}' not found in envvar PATH.{' ' + raise_msg if raise_msg else ''}"
)
if return_bool:
return bool(ans)
else:
return ans
def safe_version(*args, **kwargs) -> str:
"""
Package resources is a very slow load
"""
import pkg_resources
return pkg_resources.safe_version(*args, **kwargs)
def parse_version(*args, **kwargs):
"""
Package resources is a very slow load
"""
import pkg_resources
return pkg_resources.parse_version(*args, **kwargs)
| 0.61115 | 0.306929 |
import numpy as np
import cvxpy as cp
import scipy.linalg
import scipy.optimize
import matplotlib.pyplot as plt
from pytope import Polytope
if __name__ == '__main__':
from generate_invariant_set import invariant_set
else:
from envs.generate_invariant_set import invariant_set
import torch
#%%%
XX = np.array([[-.5,0],[0,7.5],[.6,5],[.95,-7.5]])
def vertices(A,B,E,X,U,D,h,env_name):
# Generate the matrices Y and V.
# The columns of V are the vertices of the action polytope at each vertex of the invariant set.
# The columns of Y are the vertices of the invariant set, repeated once for each corresponding vertex of the action polytope.
# The set {x: Fx @ x <= gx} describes the target set: u must be chosen such that Fx @ (Ax + Bu) <= gx. This set is smaller than the invariant set in order to account for disturbances.
# The set {x: Fi @ x <= gi} is the actual invariant set.
# Generate invariant and target set:
Fx,gx,Fi,gi = invariant_set(A,B,E,X,U,D,h,env_name)
S_targ = Polytope(A = Fx, b = gx)
S_safe = Polytope(A = Fi, b = gi)
# Get dimensions:
p = np.shape(S_safe.V)[0]
n,m = np.shape(B)
# Matrix whose columns are vertices of invariant set:
Y = (S_safe.V).T
YY = Y
if __name__ == '__main__':
plt.figure(3,figsize=(8,4),dpi=500)
plt.subplot(122)
plt.plot(U.V,[0,0],'-ok',label='U',linewidth=3)
plt.autoscale(enable=True)
# Build V matrix and expand Y matrix:
V = np.zeros((m,p))
for i,x in enumerate(list(YY.T)):
x = np.reshape(x,(n,1))
Ui_H = np.block([[Fx@B,gx - Fx@A@x],[U.A,U.b]])
Ui = Polytope(A = Ui_H[:,:-1],b = Ui_H[:,-1])
qi = np.shape(Ui.V)[0] # Number of vertices of Ui
Y_new_i = np.tile(np.reshape(Y[:,i],(n,1)),(1,qi))
if i == 0:
V = Ui.V.T
Y_new = Y_new_i
else:
V = np.append(V,Ui.V.T,axis = 1)
Y_new = np.append(Y_new,Y_new_i,axis = 1)
if __name__ == '__main__':
for i,x in enumerate(list(XX)):
x = np.reshape(x,(n,1))
Ui_H = np.block([[Fx@B,gx - Fx@A@x],[U.A,U.b]])
Ui = Polytope(A = Ui_H[:,:-1],b = Ui_H[:,-1])
plt.figure(3)
plt.subplot(122)
if i == 0:
plt.plot(Ui.V,(i+1)*np.ones(len(Ui.V)),'-bo',label=r'$\Omega(x_i)$',linewidth=3)
else:
plt.plot(Ui.V,(i+1)*np.ones(len(Ui.V)),'-bo',linewidth=3)
Y = Y_new
p = np.shape(Y)[1]
Y = torch.tensor(Y).type(torch.FloatTensor)
V = torch.tensor(V).type(torch.FloatTensor)
if __name__ == '__main__':
return Y,V,YY,S_safe
else:
return Y,V,Fx,gx,Fi,gi
if __name__ == '__main__':
def parameters_power_system_2():
max_speed = 8
max_action = 15.
dt = .05
g = -1.
m = 1.
l = 1.
safe_th = 1. # safe region [-1, 1]
env_name = 'power_system_2'
d = 0.1 # damping
# Linearized dynamics:
A = np.array([[1,dt],[0,1-dt*d]]) # Linear portion of dynamics
C = 3*g/(2*l) * np.array([[dt**2],[dt]])@np.array([[1,0]]) # Linearized nonlinear portion of dynamics
A = A + C
B = 3/(m*l**2) * np.array([[dt**2],[dt]]) # Control input
E = 3*g/(2*l) * np.array([[dt**2],[dt]]) # Linearization error disturbance input
# State and input bounds:
noise_max = .5
d_max = safe_th - np.sin(safe_th) + noise_max # Max linearization error inside safe set, plus noise
# Constraint sets:
X = Polytope(lb = (-safe_th,-max_speed),ub = (safe_th,max_speed)) # Safe set
U = Polytope(lb = -max_action, ub = max_action) # Control set
D = Polytope(lb = -d_max, ub = d_max) # Disturbance set
return A,B,E,X,U,D,dt,env_name
def parameters_pendulum():
# Parameters:
h = .05
g = 10.
m = 1.
l = 1.
env_name = 'pendulum'
# Linearized dynamics:
A = np.array([[1,h],[0,1]]) # Linear portion of dynamics
C = 3*g/(2*l) * np.array([[h**2],[h]])@np.array([[1,0]]) # Linearized nonlinear portion of dynamics
A = A + C # Linearized dynamics
B = 3/(m*l**2) * np.array([[h**2],[h]]) # Control input
E = 3*g/(2*l) * np.array([[h**2],[h]]) # Linearization error disturbance input
# State and input bounds:
theta_max = 1. # Max angle
omega_max = 8 # Max speed
u_max = 15 # Max control
noise_max = 0
d_max = theta_max - np.sin(theta_max) + noise_max # Max linearization error inside safe set, plus noise
# Constraints sets:
X = Polytope(lb = (-theta_max,-omega_max),ub = (theta_max,omega_max)) # Safe set
U = Polytope(lb = -u_max, ub = u_max) # Control set
D = Polytope(lb = -d_max, ub = d_max) # Disturbance set
return A,B,E,X,U,D,h,env_name
A,B,E,X,U,D,h,env_name = parameters_power_system_2()
Y,V,YY,S = vertices(A,B,E,X,U,D,h,env_name)
print(np.round(Y,2))
p = Y.size()[1]
z = np.ones((1,p))
for i,x in enumerate(list(XX)):
x = np.reshape(x,(2,1))
vmin = scipy.optimize.linprog(c=V.numpy().flatten(),A_eq = np.block([[Y.numpy()],[z]]),b_eq = np.block([[x],[1]]),bounds=(0,None)).fun
vmax = -scipy.optimize.linprog(c=-V.numpy().flatten(),A_eq = np.block([[Y.numpy()],[z]]),b_eq = np.block([[x],[1]]),bounds=(0,None)).fun
plt.figure(3)
plt.subplot(122)
if i == 0:
plt.plot([vmin,vmax],(i+1)*np.ones(2),'--ro',label = r'$V(x_i)$',linewidth=3)
else:
plt.plot([vmin,vmax],(i+1)*np.ones(2),'--ro',linewidth=3)
plt.legend(fontsize=15)
plt.figure(3)
plt.subplot(121)
X.plot(alpha = 0.5,color = (0,1,0),label = 'X')
S.plot(alpha=0.5,color = (0,0,1),label = 'S')
plt.xlabel('Angle (rad)',fontsize=25)
plt.ylabel('Frequency (rad/sec)',fontsize=25)
plt.title('Safe and invariant sets',fontsize=25)
plt.xticks(fontsize=20)
plt.yticks([-8,-4,0,4,8],fontsize=20)
plt.subplot(121)
plt.plot(XX[:,0],XX[:,1],'kd',label=r'$x_i$')
plt.legend(fontsize=15)
plt.annotate(r'$x_1$',.05+XX[0,:],fontsize=20)
plt.annotate(r'$x_2$',np.array([0,-1.5])+XX[1,:],fontsize=20)
plt.annotate(r'$x_3$',np.array([0,-2.])+XX[2,:],fontsize=20)
plt.annotate(r'$x_4$',np.array([-.3,.3])+XX[3,:],fontsize=20)
plt.subplot(122)
plt.xlabel('Control input',fontsize=25)
plt.ylabel('Sample point',fontsize=25)
plt.title('Sample action sets',fontsize=25)
plt.yticks(ticks = [0,1,2,3,4],labels=['U',r'$x_1$',r'$x_2$',r'$x_3$',r'$x_4$'],fontsize=20)
plt.xticks([-15,0,15],fontsize=20)
plt.tight_layout()
'''a_1 = torch.rand((p,1))**5
a_1 = a_1/torch.norm(a_1,p=1)
a_1_traj = a_1
plt.figure(3)
for i in range(10):
a_1 = (torch.eye(p) - [email protected]([email protected])@Y) @ a_1
a_1 = a_1 + [email protected]([email protected])@x
a_1 = torch.maximum(a_1,torch.zeros((p,1)))
#a_1 = a_1/torch.norm(a_1,p=1)
a_1 = a_1 + z/p*(1-torch.sum(a_1))
a_1_traj = torch.cat((a_1_traj,a_1),dim = 1)
plt.plot(a_1_traj.T)'''
def newton_step(Y,a,x,t):
n,p = np.shape(Y)
z = np.ones(p)
P = [email protected]([email protected])
#g = Y.T@Y@a - Y.T@x + np.ones((p,p))@a - z - 1/t * np.diag(1/a) @ z
g = P@Y@a - P@x + np.ones((p,p))@a - z - 1/t * np.diag(1/a) @ z
#Z = np.block([[P@Y],[z.T]])
Z1 = np.block([P,np.ones((p,1))])
Z2 = np.block([[Y],[np.ones((1,p))]])
Dinv = np.diag(a**2)
Hinv = t*Dinv - t**2*Dinv@[email protected](np.eye(n+1) + t*Z2@Dinv@Z1)@Z2@Dinv
da_nt = -Hinv@g
return a + .25*da_nt
Y = Y.numpy()
P = [email protected]([email protected])
x = np.array([.6,5])
a = np.random.rand(p)
a = a**10
a = a/sum(a)
penalty_traj = [np.linalg.norm(P@(Y@a-x))]
for t in np.logspace(2,7,15):
for j in range(3):
a = newton_step(Y,a,x,t)
penalty_traj.append(np.linalg.norm(P@(Y@a-x)))
plt.figure()
plt.semilogy(penalty_traj)
|
Cyclic_projections/envs/generate_vertices.py
|
import numpy as np
import cvxpy as cp
import scipy.linalg
import scipy.optimize
import matplotlib.pyplot as plt
from pytope import Polytope
if __name__ == '__main__':
from generate_invariant_set import invariant_set
else:
from envs.generate_invariant_set import invariant_set
import torch
#%%%
XX = np.array([[-.5,0],[0,7.5],[.6,5],[.95,-7.5]])
def vertices(A,B,E,X,U,D,h,env_name):
# Generate the matrices Y and V.
# The columns of V are the vertices of the action polytope at each vertex of the invariant set.
# The columns of Y are the vertices of the invariant set, repeated once for each corresponding vertex of the action polytope.
# The set {x: Fx @ x <= gx} describes the target set: u must be chosen such that Fx @ (Ax + Bu) <= gx. This set is smaller than the invariant set in order to account for disturbances.
# The set {x: Fi @ x <= gi} is the actual invariant set.
# Generate invariant and target set:
Fx,gx,Fi,gi = invariant_set(A,B,E,X,U,D,h,env_name)
S_targ = Polytope(A = Fx, b = gx)
S_safe = Polytope(A = Fi, b = gi)
# Get dimensions:
p = np.shape(S_safe.V)[0]
n,m = np.shape(B)
# Matrix whose columns are vertices of invariant set:
Y = (S_safe.V).T
YY = Y
if __name__ == '__main__':
plt.figure(3,figsize=(8,4),dpi=500)
plt.subplot(122)
plt.plot(U.V,[0,0],'-ok',label='U',linewidth=3)
plt.autoscale(enable=True)
# Build V matrix and expand Y matrix:
V = np.zeros((m,p))
for i,x in enumerate(list(YY.T)):
x = np.reshape(x,(n,1))
Ui_H = np.block([[Fx@B,gx - Fx@A@x],[U.A,U.b]])
Ui = Polytope(A = Ui_H[:,:-1],b = Ui_H[:,-1])
qi = np.shape(Ui.V)[0] # Number of vertices of Ui
Y_new_i = np.tile(np.reshape(Y[:,i],(n,1)),(1,qi))
if i == 0:
V = Ui.V.T
Y_new = Y_new_i
else:
V = np.append(V,Ui.V.T,axis = 1)
Y_new = np.append(Y_new,Y_new_i,axis = 1)
if __name__ == '__main__':
for i,x in enumerate(list(XX)):
x = np.reshape(x,(n,1))
Ui_H = np.block([[Fx@B,gx - Fx@A@x],[U.A,U.b]])
Ui = Polytope(A = Ui_H[:,:-1],b = Ui_H[:,-1])
plt.figure(3)
plt.subplot(122)
if i == 0:
plt.plot(Ui.V,(i+1)*np.ones(len(Ui.V)),'-bo',label=r'$\Omega(x_i)$',linewidth=3)
else:
plt.plot(Ui.V,(i+1)*np.ones(len(Ui.V)),'-bo',linewidth=3)
Y = Y_new
p = np.shape(Y)[1]
Y = torch.tensor(Y).type(torch.FloatTensor)
V = torch.tensor(V).type(torch.FloatTensor)
if __name__ == '__main__':
return Y,V,YY,S_safe
else:
return Y,V,Fx,gx,Fi,gi
if __name__ == '__main__':
def parameters_power_system_2():
max_speed = 8
max_action = 15.
dt = .05
g = -1.
m = 1.
l = 1.
safe_th = 1. # safe region [-1, 1]
env_name = 'power_system_2'
d = 0.1 # damping
# Linearized dynamics:
A = np.array([[1,dt],[0,1-dt*d]]) # Linear portion of dynamics
C = 3*g/(2*l) * np.array([[dt**2],[dt]])@np.array([[1,0]]) # Linearized nonlinear portion of dynamics
A = A + C
B = 3/(m*l**2) * np.array([[dt**2],[dt]]) # Control input
E = 3*g/(2*l) * np.array([[dt**2],[dt]]) # Linearization error disturbance input
# State and input bounds:
noise_max = .5
d_max = safe_th - np.sin(safe_th) + noise_max # Max linearization error inside safe set, plus noise
# Constraint sets:
X = Polytope(lb = (-safe_th,-max_speed),ub = (safe_th,max_speed)) # Safe set
U = Polytope(lb = -max_action, ub = max_action) # Control set
D = Polytope(lb = -d_max, ub = d_max) # Disturbance set
return A,B,E,X,U,D,dt,env_name
def parameters_pendulum():
# Parameters:
h = .05
g = 10.
m = 1.
l = 1.
env_name = 'pendulum'
# Linearized dynamics:
A = np.array([[1,h],[0,1]]) # Linear portion of dynamics
C = 3*g/(2*l) * np.array([[h**2],[h]])@np.array([[1,0]]) # Linearized nonlinear portion of dynamics
A = A + C # Linearized dynamics
B = 3/(m*l**2) * np.array([[h**2],[h]]) # Control input
E = 3*g/(2*l) * np.array([[h**2],[h]]) # Linearization error disturbance input
# State and input bounds:
theta_max = 1. # Max angle
omega_max = 8 # Max speed
u_max = 15 # Max control
noise_max = 0
d_max = theta_max - np.sin(theta_max) + noise_max # Max linearization error inside safe set, plus noise
# Constraints sets:
X = Polytope(lb = (-theta_max,-omega_max),ub = (theta_max,omega_max)) # Safe set
U = Polytope(lb = -u_max, ub = u_max) # Control set
D = Polytope(lb = -d_max, ub = d_max) # Disturbance set
return A,B,E,X,U,D,h,env_name
A,B,E,X,U,D,h,env_name = parameters_power_system_2()
Y,V,YY,S = vertices(A,B,E,X,U,D,h,env_name)
print(np.round(Y,2))
p = Y.size()[1]
z = np.ones((1,p))
for i,x in enumerate(list(XX)):
x = np.reshape(x,(2,1))
vmin = scipy.optimize.linprog(c=V.numpy().flatten(),A_eq = np.block([[Y.numpy()],[z]]),b_eq = np.block([[x],[1]]),bounds=(0,None)).fun
vmax = -scipy.optimize.linprog(c=-V.numpy().flatten(),A_eq = np.block([[Y.numpy()],[z]]),b_eq = np.block([[x],[1]]),bounds=(0,None)).fun
plt.figure(3)
plt.subplot(122)
if i == 0:
plt.plot([vmin,vmax],(i+1)*np.ones(2),'--ro',label = r'$V(x_i)$',linewidth=3)
else:
plt.plot([vmin,vmax],(i+1)*np.ones(2),'--ro',linewidth=3)
plt.legend(fontsize=15)
plt.figure(3)
plt.subplot(121)
X.plot(alpha = 0.5,color = (0,1,0),label = 'X')
S.plot(alpha=0.5,color = (0,0,1),label = 'S')
plt.xlabel('Angle (rad)',fontsize=25)
plt.ylabel('Frequency (rad/sec)',fontsize=25)
plt.title('Safe and invariant sets',fontsize=25)
plt.xticks(fontsize=20)
plt.yticks([-8,-4,0,4,8],fontsize=20)
plt.subplot(121)
plt.plot(XX[:,0],XX[:,1],'kd',label=r'$x_i$')
plt.legend(fontsize=15)
plt.annotate(r'$x_1$',.05+XX[0,:],fontsize=20)
plt.annotate(r'$x_2$',np.array([0,-1.5])+XX[1,:],fontsize=20)
plt.annotate(r'$x_3$',np.array([0,-2.])+XX[2,:],fontsize=20)
plt.annotate(r'$x_4$',np.array([-.3,.3])+XX[3,:],fontsize=20)
plt.subplot(122)
plt.xlabel('Control input',fontsize=25)
plt.ylabel('Sample point',fontsize=25)
plt.title('Sample action sets',fontsize=25)
plt.yticks(ticks = [0,1,2,3,4],labels=['U',r'$x_1$',r'$x_2$',r'$x_3$',r'$x_4$'],fontsize=20)
plt.xticks([-15,0,15],fontsize=20)
plt.tight_layout()
'''a_1 = torch.rand((p,1))**5
a_1 = a_1/torch.norm(a_1,p=1)
a_1_traj = a_1
plt.figure(3)
for i in range(10):
a_1 = (torch.eye(p) - [email protected]([email protected])@Y) @ a_1
a_1 = a_1 + [email protected]([email protected])@x
a_1 = torch.maximum(a_1,torch.zeros((p,1)))
#a_1 = a_1/torch.norm(a_1,p=1)
a_1 = a_1 + z/p*(1-torch.sum(a_1))
a_1_traj = torch.cat((a_1_traj,a_1),dim = 1)
plt.plot(a_1_traj.T)'''
def newton_step(Y,a,x,t):
n,p = np.shape(Y)
z = np.ones(p)
P = [email protected]([email protected])
#g = Y.T@Y@a - Y.T@x + np.ones((p,p))@a - z - 1/t * np.diag(1/a) @ z
g = P@Y@a - P@x + np.ones((p,p))@a - z - 1/t * np.diag(1/a) @ z
#Z = np.block([[P@Y],[z.T]])
Z1 = np.block([P,np.ones((p,1))])
Z2 = np.block([[Y],[np.ones((1,p))]])
Dinv = np.diag(a**2)
Hinv = t*Dinv - t**2*Dinv@[email protected](np.eye(n+1) + t*Z2@Dinv@Z1)@Z2@Dinv
da_nt = -Hinv@g
return a + .25*da_nt
Y = Y.numpy()
P = [email protected]([email protected])
x = np.array([.6,5])
a = np.random.rand(p)
a = a**10
a = a/sum(a)
penalty_traj = [np.linalg.norm(P@(Y@a-x))]
for t in np.logspace(2,7,15):
for j in range(3):
a = newton_step(Y,a,x,t)
penalty_traj.append(np.linalg.norm(P@(Y@a-x)))
plt.figure()
plt.semilogy(penalty_traj)
| 0.539954 | 0.466481 |
import os
import unittest
from functools import partial
from textwrap import dedent
from typing import Dict, List, Optional
from pants.base.build_environment import get_buildroot
from pants.option.option_value_container import OptionValueContainer
from pants.option.options_bootstrapper import OptionsBootstrapper
from pants.option.scope import ScopeInfo
from pants.util.contextutil import temporary_dir, temporary_file, temporary_file_path
from pants.util.logging import LogLevel
class OptionsBootstrapperTest(unittest.TestCase):
@staticmethod
def _config_path(path: Optional[str]) -> List[str]:
if path is None:
return ["--pants-config-files=[]"]
return [f"--pants-config-files=['{path}']"]
def assert_bootstrap_options(
self,
*,
config: Optional[Dict[str, str]] = None,
env: Optional[Dict[str, str]] = None,
args: Optional[List[str]] = None,
**expected_entries,
) -> None:
with temporary_file(binary_mode=False) as fp:
fp.write("[DEFAULT]\n")
if config:
for k, v in config.items():
fp.write(f"{k} = {repr(v)}\n")
fp.close()
args = [*self._config_path(fp.name), *(args or [])]
bootstrapper = OptionsBootstrapper.create(env=env or {}, args=args, allow_pantsrc=False)
vals = bootstrapper.get_bootstrap_options().for_global_scope()
vals_dict = {k: getattr(vals, k) for k in expected_entries}
self.assertEqual(expected_entries, vals_dict)
def test_bootstrap_seed_values(self) -> None:
def assert_seed_values(
*,
config: Optional[Dict[str, str]] = None,
env: Optional[Dict[str, str]] = None,
args: Optional[List[str]] = None,
workdir: Optional[str] = None,
supportdir: Optional[str] = None,
distdir: Optional[str] = None,
) -> None:
self.assert_bootstrap_options(
config=config,
env=env,
args=args,
pants_workdir=workdir or os.path.join(get_buildroot(), ".pants.d"),
pants_supportdir=supportdir or os.path.join(get_buildroot(), "build-support"),
pants_distdir=distdir or os.path.join(get_buildroot(), "dist"),
)
# Check for valid default seed values
assert_seed_values()
# Check getting values from config, env and args.
assert_seed_values(
config={"pants_workdir": "/from_config/.pants.d"},
workdir="/from_config/.pants.d",
)
assert_seed_values(
env={"PANTS_SUPPORTDIR": "/from_env/build-support"},
supportdir="/from_env/build-support",
)
assert_seed_values(args=["--pants-distdir=/from_args/dist"], distdir="/from_args/dist")
# Check that args > env > config.
assert_seed_values(
config={
"pants_workdir": "/from_config/.pants.d",
"pants_supportdir": "/from_config/build-support",
"pants_distdir": "/from_config/dist",
},
env={"PANTS_SUPPORTDIR": "/from_env/build-support", "PANTS_DISTDIR": "/from_env/dist"},
args=["--pants-distdir=/from_args/dist"],
workdir="/from_config/.pants.d",
supportdir="/from_env/build-support",
distdir="/from_args/dist",
)
# Check that unrelated args and config don't confuse us.
assert_seed_values(
config={
"pants_workdir": "/from_config/.pants.d",
"pants_supportdir": "/from_config/build-support",
"pants_distdir": "/from_config/dist",
"unrelated": "foo",
},
env={
"PANTS_SUPPORTDIR": "/from_env/build-support",
"PANTS_DISTDIR": "/from_env/dist",
"PANTS_NO_RELATIONSHIP": "foo",
},
args=["--pants-distdir=/from_args/dist", "--foo=bar", "--baz"],
workdir="/from_config/.pants.d",
supportdir="/from_env/build-support",
distdir="/from_args/dist",
)
def test_bootstrap_bool_option_values(self) -> None:
# Check the default.
self.assert_bootstrap_options(pantsrc=True)
assert_pantsrc_is_false = partial(self.assert_bootstrap_options, pantsrc=False)
assert_pantsrc_is_false(args=["--no-pantsrc"])
assert_pantsrc_is_false(config={"pantsrc": "false"})
assert_pantsrc_is_false(env={"PANTS_PANTSRC": "False"})
def test_create_bootstrapped_options(self) -> None:
# Check that we can set a bootstrap option from a cmd-line flag and have that interpolate
# correctly into regular config.
with temporary_file(binary_mode=False) as fp:
fp.write(
dedent(
"""
[foo]
bar = "%(pants_workdir)s/baz"
[fruit]
apple = "%(pants_supportdir)s/banana"
"""
)
)
fp.close()
args = ["--pants-workdir=/qux"] + self._config_path(fp.name)
bootstrapper = OptionsBootstrapper.create(
env={"PANTS_SUPPORTDIR": "/pear"}, args=args, allow_pantsrc=False
)
opts = bootstrapper.get_full_options(
known_scope_infos=[
ScopeInfo(""),
ScopeInfo("foo"),
ScopeInfo("fruit"),
]
)
# So we don't choke on these on the cmd line.
opts.register("", "--pants-workdir")
opts.register("", "--pants-config-files")
opts.register("foo", "--bar")
opts.register("fruit", "--apple")
self.assertEqual("/qux/baz", opts.for_scope("foo").bar)
self.assertEqual("/pear/banana", opts.for_scope("fruit").apple)
def test_bootstrapped_options_ignore_irrelevant_env(self) -> None:
included = "PANTS_SUPPORTDIR"
excluded = "NON_PANTS_ENV"
bootstrapper = OptionsBootstrapper.create(
env={excluded: "pear", included: "banana"}, args=[], allow_pantsrc=False
)
self.assertIn(included, bootstrapper.env)
self.assertNotIn(excluded, bootstrapper.env)
def test_create_bootstrapped_multiple_pants_config_files(self) -> None:
"""When given multiple config files, the later files should take precedence when options
conflict."""
def create_options_bootstrapper(*config_paths: str) -> OptionsBootstrapper:
return OptionsBootstrapper.create(
env={},
args=[f"--pants-config-files={cp}" for cp in config_paths],
allow_pantsrc=False,
)
def assert_config_read_correctly(
options_bootstrapper: OptionsBootstrapper,
*,
expected_worker_count: int,
) -> None:
options = options_bootstrapper.get_full_options(
known_scope_infos=[
ScopeInfo(""),
ScopeInfo("compile.apt"),
ScopeInfo("fruit"),
],
)
# So we don't choke on these on the cmd line.
options.register("", "--pants-config-files", type=list)
options.register("", "--config-override", type=list)
options.register("compile.apt", "--worker-count")
options.register("fruit", "--apple")
self.assertEqual(
str(expected_worker_count), options.for_scope("compile.apt").worker_count
)
self.assertEqual("red", options.for_scope("fruit").apple)
with temporary_file(binary_mode=False) as fp1, temporary_file(binary_mode=False) as fp2:
fp1.write(
dedent(
"""\
[compile.apt]
worker_count = 1
[fruit]
apple = "red"
"""
)
)
fp2.write(
dedent(
"""\
[compile.apt]
worker_count = 2
"""
)
)
fp1.close()
fp2.close()
assert_config_read_correctly(
create_options_bootstrapper(fp1.name),
expected_worker_count=1,
)
assert_config_read_correctly(
create_options_bootstrapper(fp1.name, fp2.name),
expected_worker_count=2,
)
assert_config_read_correctly(
create_options_bootstrapper(fp2.name, fp1.name),
expected_worker_count=1,
)
def test_options_pantsrc_files(self) -> None:
def create_options_bootstrapper(*config_paths: str) -> OptionsBootstrapper:
return OptionsBootstrapper.create(
env={},
args=[f"--pantsrc-files={cp}" for cp in config_paths],
allow_pantsrc=True,
)
with temporary_file(binary_mode=False) as fp:
fp.write(
dedent(
"""
[resolver]
resolver = "coursier"
"""
)
)
fp.close()
bootstrapped_options = create_options_bootstrapper(fp.name)
opts_single_config = bootstrapped_options.get_full_options(
known_scope_infos=[
ScopeInfo(""),
ScopeInfo("resolver"),
]
)
opts_single_config.register("", "--pantsrc-files", type=list)
opts_single_config.register("resolver", "--resolver")
self.assertEqual("coursier", opts_single_config.for_scope("resolver").resolver)
def test_full_options_caching(self) -> None:
with temporary_file_path() as config:
args = self._config_path(config)
bootstrapper = OptionsBootstrapper.create(env={}, args=args, allow_pantsrc=False)
opts1 = bootstrapper.get_full_options(
known_scope_infos=[
ScopeInfo(""),
ScopeInfo("foo"),
]
)
opts2 = bootstrapper.get_full_options(
known_scope_infos=[
ScopeInfo("foo"),
ScopeInfo(""),
]
)
assert opts1 is opts2
opts3 = bootstrapper.get_full_options(
known_scope_infos=[
ScopeInfo(""),
ScopeInfo("foo"),
ScopeInfo(""),
]
)
assert opts1 is opts3
opts4 = bootstrapper.get_full_options(known_scope_infos=[ScopeInfo("")])
assert opts1 is not opts4
opts5 = bootstrapper.get_full_options(known_scope_infos=[ScopeInfo("")])
assert opts4 is opts5
assert opts1 is not opts5
def test_bootstrap_short_options(self) -> None:
def parse_options(*args: str) -> OptionValueContainer:
full_args = [*args, *self._config_path(None)]
return (
OptionsBootstrapper.create(env={}, args=full_args, allow_pantsrc=False)
.get_bootstrap_options()
.for_global_scope()
)
# No short options passed - defaults presented.
vals = parse_options()
self.assertIsNone(vals.logdir)
self.assertEqual(LogLevel.INFO, vals.level)
# Unrecognized short options passed and ignored - defaults presented.
vals = parse_options("-_UnderscoreValue", "-^")
self.assertIsNone(vals.logdir)
self.assertEqual(LogLevel.INFO, vals.level)
vals = parse_options("-d/tmp/logs", "-ldebug")
self.assertEqual("/tmp/logs", vals.logdir)
self.assertEqual(LogLevel.DEBUG, vals.level)
def test_bootstrap_options_passthrough_dup_ignored(self) -> None:
def parse_options(*args: str) -> OptionValueContainer:
full_args = [*args, *self._config_path(None)]
return (
OptionsBootstrapper.create(env={}, args=full_args, allow_pantsrc=False)
.get_bootstrap_options()
.for_global_scope()
)
vals = parse_options("main", "args", "-d/tmp/frogs", "--", "-d/tmp/logs")
self.assertEqual("/tmp/frogs", vals.logdir)
vals = parse_options("main", "args", "--", "-d/tmp/logs")
self.assertIsNone(vals.logdir)
def test_bootstrap_options_explicit_config_path(self) -> None:
def config_path(*args, **env):
return OptionsBootstrapper.get_config_file_paths(env, args)
self.assertEqual(
["/foo/bar/pants.toml"],
config_path("main", "args", "--pants-config-files=['/foo/bar/pants.toml']"),
)
self.assertEqual(
["/from/env1", "/from/env2"],
config_path("main", "args", PANTS_CONFIG_FILES="['/from/env1', '/from/env2']"),
)
self.assertEqual(
["/from/flag"],
config_path(
"main",
"args",
"-x",
"--pants-config-files=['/from/flag']",
"goal",
"--other-flag",
PANTS_CONFIG_FILES="['/from/env']",
),
)
# Test appending to the default.
self.assertEqual(
[f"{get_buildroot()}/pants.toml", "/from/env", "/from/flag"],
config_path(
"main",
"args",
"-x",
"--pants-config-files=+['/from/flag']",
"goal",
"--other-flag",
PANTS_CONFIG_FILES="+['/from/env']",
),
)
# Test replacing the default, then appending.
self.assertEqual(
["/from/env", "/from/flag"],
config_path(
"main",
"args",
"-x",
"--pants-config-files=+['/from/flag']",
"goal",
"--other-flag",
PANTS_CONFIG_FILES="['/from/env']",
),
)
self.assertEqual(
["/from/flag"],
config_path(
"main",
"args",
"-x",
"--pants-config-files=['/from/flag']",
"goal",
"--other-flag",
PANTS_CONFIG_FILES="+['/from/env']",
),
)
def test_setting_pants_config_in_config(self) -> None:
# Test that setting pants_config in the config file has no effect.
with temporary_dir() as tmpdir:
config1 = os.path.join(tmpdir, "config1")
config2 = os.path.join(tmpdir, "config2")
with open(config1, "w") as out1:
out1.write(f"[DEFAULT]\npants_config_files = ['{config2}']\nlogdir = 'logdir1'\n")
with open(config2, "w") as out2:
out2.write("[DEFAULT]\nlogdir = 'logdir2'\n")
ob = OptionsBootstrapper.create(
env={}, args=[f"--pants-config-files=['{config1}']"], allow_pantsrc=False
)
logdir = ob.get_bootstrap_options().for_global_scope().logdir
self.assertEqual("logdir1", logdir)
|
src/python/pants/option/options_bootstrapper_test.py
|
import os
import unittest
from functools import partial
from textwrap import dedent
from typing import Dict, List, Optional
from pants.base.build_environment import get_buildroot
from pants.option.option_value_container import OptionValueContainer
from pants.option.options_bootstrapper import OptionsBootstrapper
from pants.option.scope import ScopeInfo
from pants.util.contextutil import temporary_dir, temporary_file, temporary_file_path
from pants.util.logging import LogLevel
class OptionsBootstrapperTest(unittest.TestCase):
@staticmethod
def _config_path(path: Optional[str]) -> List[str]:
if path is None:
return ["--pants-config-files=[]"]
return [f"--pants-config-files=['{path}']"]
def assert_bootstrap_options(
self,
*,
config: Optional[Dict[str, str]] = None,
env: Optional[Dict[str, str]] = None,
args: Optional[List[str]] = None,
**expected_entries,
) -> None:
with temporary_file(binary_mode=False) as fp:
fp.write("[DEFAULT]\n")
if config:
for k, v in config.items():
fp.write(f"{k} = {repr(v)}\n")
fp.close()
args = [*self._config_path(fp.name), *(args or [])]
bootstrapper = OptionsBootstrapper.create(env=env or {}, args=args, allow_pantsrc=False)
vals = bootstrapper.get_bootstrap_options().for_global_scope()
vals_dict = {k: getattr(vals, k) for k in expected_entries}
self.assertEqual(expected_entries, vals_dict)
def test_bootstrap_seed_values(self) -> None:
def assert_seed_values(
*,
config: Optional[Dict[str, str]] = None,
env: Optional[Dict[str, str]] = None,
args: Optional[List[str]] = None,
workdir: Optional[str] = None,
supportdir: Optional[str] = None,
distdir: Optional[str] = None,
) -> None:
self.assert_bootstrap_options(
config=config,
env=env,
args=args,
pants_workdir=workdir or os.path.join(get_buildroot(), ".pants.d"),
pants_supportdir=supportdir or os.path.join(get_buildroot(), "build-support"),
pants_distdir=distdir or os.path.join(get_buildroot(), "dist"),
)
# Check for valid default seed values
assert_seed_values()
# Check getting values from config, env and args.
assert_seed_values(
config={"pants_workdir": "/from_config/.pants.d"},
workdir="/from_config/.pants.d",
)
assert_seed_values(
env={"PANTS_SUPPORTDIR": "/from_env/build-support"},
supportdir="/from_env/build-support",
)
assert_seed_values(args=["--pants-distdir=/from_args/dist"], distdir="/from_args/dist")
# Check that args > env > config.
assert_seed_values(
config={
"pants_workdir": "/from_config/.pants.d",
"pants_supportdir": "/from_config/build-support",
"pants_distdir": "/from_config/dist",
},
env={"PANTS_SUPPORTDIR": "/from_env/build-support", "PANTS_DISTDIR": "/from_env/dist"},
args=["--pants-distdir=/from_args/dist"],
workdir="/from_config/.pants.d",
supportdir="/from_env/build-support",
distdir="/from_args/dist",
)
# Check that unrelated args and config don't confuse us.
assert_seed_values(
config={
"pants_workdir": "/from_config/.pants.d",
"pants_supportdir": "/from_config/build-support",
"pants_distdir": "/from_config/dist",
"unrelated": "foo",
},
env={
"PANTS_SUPPORTDIR": "/from_env/build-support",
"PANTS_DISTDIR": "/from_env/dist",
"PANTS_NO_RELATIONSHIP": "foo",
},
args=["--pants-distdir=/from_args/dist", "--foo=bar", "--baz"],
workdir="/from_config/.pants.d",
supportdir="/from_env/build-support",
distdir="/from_args/dist",
)
def test_bootstrap_bool_option_values(self) -> None:
# Check the default.
self.assert_bootstrap_options(pantsrc=True)
assert_pantsrc_is_false = partial(self.assert_bootstrap_options, pantsrc=False)
assert_pantsrc_is_false(args=["--no-pantsrc"])
assert_pantsrc_is_false(config={"pantsrc": "false"})
assert_pantsrc_is_false(env={"PANTS_PANTSRC": "False"})
def test_create_bootstrapped_options(self) -> None:
# Check that we can set a bootstrap option from a cmd-line flag and have that interpolate
# correctly into regular config.
with temporary_file(binary_mode=False) as fp:
fp.write(
dedent(
"""
[foo]
bar = "%(pants_workdir)s/baz"
[fruit]
apple = "%(pants_supportdir)s/banana"
"""
)
)
fp.close()
args = ["--pants-workdir=/qux"] + self._config_path(fp.name)
bootstrapper = OptionsBootstrapper.create(
env={"PANTS_SUPPORTDIR": "/pear"}, args=args, allow_pantsrc=False
)
opts = bootstrapper.get_full_options(
known_scope_infos=[
ScopeInfo(""),
ScopeInfo("foo"),
ScopeInfo("fruit"),
]
)
# So we don't choke on these on the cmd line.
opts.register("", "--pants-workdir")
opts.register("", "--pants-config-files")
opts.register("foo", "--bar")
opts.register("fruit", "--apple")
self.assertEqual("/qux/baz", opts.for_scope("foo").bar)
self.assertEqual("/pear/banana", opts.for_scope("fruit").apple)
def test_bootstrapped_options_ignore_irrelevant_env(self) -> None:
included = "PANTS_SUPPORTDIR"
excluded = "NON_PANTS_ENV"
bootstrapper = OptionsBootstrapper.create(
env={excluded: "pear", included: "banana"}, args=[], allow_pantsrc=False
)
self.assertIn(included, bootstrapper.env)
self.assertNotIn(excluded, bootstrapper.env)
def test_create_bootstrapped_multiple_pants_config_files(self) -> None:
"""When given multiple config files, the later files should take precedence when options
conflict."""
def create_options_bootstrapper(*config_paths: str) -> OptionsBootstrapper:
return OptionsBootstrapper.create(
env={},
args=[f"--pants-config-files={cp}" for cp in config_paths],
allow_pantsrc=False,
)
def assert_config_read_correctly(
options_bootstrapper: OptionsBootstrapper,
*,
expected_worker_count: int,
) -> None:
options = options_bootstrapper.get_full_options(
known_scope_infos=[
ScopeInfo(""),
ScopeInfo("compile.apt"),
ScopeInfo("fruit"),
],
)
# So we don't choke on these on the cmd line.
options.register("", "--pants-config-files", type=list)
options.register("", "--config-override", type=list)
options.register("compile.apt", "--worker-count")
options.register("fruit", "--apple")
self.assertEqual(
str(expected_worker_count), options.for_scope("compile.apt").worker_count
)
self.assertEqual("red", options.for_scope("fruit").apple)
with temporary_file(binary_mode=False) as fp1, temporary_file(binary_mode=False) as fp2:
fp1.write(
dedent(
"""\
[compile.apt]
worker_count = 1
[fruit]
apple = "red"
"""
)
)
fp2.write(
dedent(
"""\
[compile.apt]
worker_count = 2
"""
)
)
fp1.close()
fp2.close()
assert_config_read_correctly(
create_options_bootstrapper(fp1.name),
expected_worker_count=1,
)
assert_config_read_correctly(
create_options_bootstrapper(fp1.name, fp2.name),
expected_worker_count=2,
)
assert_config_read_correctly(
create_options_bootstrapper(fp2.name, fp1.name),
expected_worker_count=1,
)
def test_options_pantsrc_files(self) -> None:
def create_options_bootstrapper(*config_paths: str) -> OptionsBootstrapper:
return OptionsBootstrapper.create(
env={},
args=[f"--pantsrc-files={cp}" for cp in config_paths],
allow_pantsrc=True,
)
with temporary_file(binary_mode=False) as fp:
fp.write(
dedent(
"""
[resolver]
resolver = "coursier"
"""
)
)
fp.close()
bootstrapped_options = create_options_bootstrapper(fp.name)
opts_single_config = bootstrapped_options.get_full_options(
known_scope_infos=[
ScopeInfo(""),
ScopeInfo("resolver"),
]
)
opts_single_config.register("", "--pantsrc-files", type=list)
opts_single_config.register("resolver", "--resolver")
self.assertEqual("coursier", opts_single_config.for_scope("resolver").resolver)
def test_full_options_caching(self) -> None:
with temporary_file_path() as config:
args = self._config_path(config)
bootstrapper = OptionsBootstrapper.create(env={}, args=args, allow_pantsrc=False)
opts1 = bootstrapper.get_full_options(
known_scope_infos=[
ScopeInfo(""),
ScopeInfo("foo"),
]
)
opts2 = bootstrapper.get_full_options(
known_scope_infos=[
ScopeInfo("foo"),
ScopeInfo(""),
]
)
assert opts1 is opts2
opts3 = bootstrapper.get_full_options(
known_scope_infos=[
ScopeInfo(""),
ScopeInfo("foo"),
ScopeInfo(""),
]
)
assert opts1 is opts3
opts4 = bootstrapper.get_full_options(known_scope_infos=[ScopeInfo("")])
assert opts1 is not opts4
opts5 = bootstrapper.get_full_options(known_scope_infos=[ScopeInfo("")])
assert opts4 is opts5
assert opts1 is not opts5
def test_bootstrap_short_options(self) -> None:
def parse_options(*args: str) -> OptionValueContainer:
full_args = [*args, *self._config_path(None)]
return (
OptionsBootstrapper.create(env={}, args=full_args, allow_pantsrc=False)
.get_bootstrap_options()
.for_global_scope()
)
# No short options passed - defaults presented.
vals = parse_options()
self.assertIsNone(vals.logdir)
self.assertEqual(LogLevel.INFO, vals.level)
# Unrecognized short options passed and ignored - defaults presented.
vals = parse_options("-_UnderscoreValue", "-^")
self.assertIsNone(vals.logdir)
self.assertEqual(LogLevel.INFO, vals.level)
vals = parse_options("-d/tmp/logs", "-ldebug")
self.assertEqual("/tmp/logs", vals.logdir)
self.assertEqual(LogLevel.DEBUG, vals.level)
def test_bootstrap_options_passthrough_dup_ignored(self) -> None:
def parse_options(*args: str) -> OptionValueContainer:
full_args = [*args, *self._config_path(None)]
return (
OptionsBootstrapper.create(env={}, args=full_args, allow_pantsrc=False)
.get_bootstrap_options()
.for_global_scope()
)
vals = parse_options("main", "args", "-d/tmp/frogs", "--", "-d/tmp/logs")
self.assertEqual("/tmp/frogs", vals.logdir)
vals = parse_options("main", "args", "--", "-d/tmp/logs")
self.assertIsNone(vals.logdir)
def test_bootstrap_options_explicit_config_path(self) -> None:
def config_path(*args, **env):
return OptionsBootstrapper.get_config_file_paths(env, args)
self.assertEqual(
["/foo/bar/pants.toml"],
config_path("main", "args", "--pants-config-files=['/foo/bar/pants.toml']"),
)
self.assertEqual(
["/from/env1", "/from/env2"],
config_path("main", "args", PANTS_CONFIG_FILES="['/from/env1', '/from/env2']"),
)
self.assertEqual(
["/from/flag"],
config_path(
"main",
"args",
"-x",
"--pants-config-files=['/from/flag']",
"goal",
"--other-flag",
PANTS_CONFIG_FILES="['/from/env']",
),
)
# Test appending to the default.
self.assertEqual(
[f"{get_buildroot()}/pants.toml", "/from/env", "/from/flag"],
config_path(
"main",
"args",
"-x",
"--pants-config-files=+['/from/flag']",
"goal",
"--other-flag",
PANTS_CONFIG_FILES="+['/from/env']",
),
)
# Test replacing the default, then appending.
self.assertEqual(
["/from/env", "/from/flag"],
config_path(
"main",
"args",
"-x",
"--pants-config-files=+['/from/flag']",
"goal",
"--other-flag",
PANTS_CONFIG_FILES="['/from/env']",
),
)
self.assertEqual(
["/from/flag"],
config_path(
"main",
"args",
"-x",
"--pants-config-files=['/from/flag']",
"goal",
"--other-flag",
PANTS_CONFIG_FILES="+['/from/env']",
),
)
def test_setting_pants_config_in_config(self) -> None:
# Test that setting pants_config in the config file has no effect.
with temporary_dir() as tmpdir:
config1 = os.path.join(tmpdir, "config1")
config2 = os.path.join(tmpdir, "config2")
with open(config1, "w") as out1:
out1.write(f"[DEFAULT]\npants_config_files = ['{config2}']\nlogdir = 'logdir1'\n")
with open(config2, "w") as out2:
out2.write("[DEFAULT]\nlogdir = 'logdir2'\n")
ob = OptionsBootstrapper.create(
env={}, args=[f"--pants-config-files=['{config1}']"], allow_pantsrc=False
)
logdir = ob.get_bootstrap_options().for_global_scope().logdir
self.assertEqual("logdir1", logdir)
| 0.787114 | 0.238151 |
import requests
import time
import pandas as pd
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from bs4 import BeautifulSoup
import json
url = 'https://stats.nba.com/players/traditional/?PerMode=Totals&Season=2019-20&SeasonType=Regular%20Season&sort=PLAYER_NAME&dir=-1'
options = Options()
driver = webdriver.Chrome()
driver.get(url)
driver.implicitly_wait(5)
# Abrindo o site no navegador
top10 = {}
rankings = {
'points': {'field': 'PTS', 'label': 'PTS'},
'assistants': {'field': 'AST', 'label': 'AST'},
'rebounds': {'field': 'REB', 'label': 'REB'},
'steals': {'field': 'STL', 'label': 'STL'},
'blocks': {'field': 'BLK', 'label': 'BLK'},
'vitorias': {'field': 'W','label':'W'}
}
# Definindo os campos do ranking
def buildrank(type):
field = rankings[type]['field']
label = rankings[type]['label']
driver.find_element_by_xpath(f"/html/body/div[3]/div[3]/div/div/div[2]/div/div/button").click()
time.sleep(3)
# Aceitando os cookies
driver.find_element_by_xpath(f"/html/body/main/div[2]/div/div[2]/div/div/nba-stat-table/div[2]/div[1]/table/thead/tr/th[@data-field='{field}']").click() #Resolver isso
# Clicando na tabela de pontos para ver o ranking
element = driver.find_element_by_xpath(f'/html/body/main/div[2]/div/div[2]/div/div/nba-stat-table/div[2]/div[1]/table')
# Procurando a tabela no html
html_content = element.get_attribute('outerHTML')
# Pegando o conteudo
soup = BeautifulSoup(html_content, 'html.parser')
# Parseando o htmli da página
table = soup.find(name='table')
# Encontrando a tabela
df_full = pd.read_html(str (table))[0].head(10)
df = df_full[['Unnamed: 0', 'PLAYER','TEAM',label]]
df.columns = ['Posicao', 'Jogador', 'Time', 'Pontos']
# Definindo o data frame
return df.to_dict('records')
# Retornando com um dicionario
for r in rankings:
top10[r] = buildrank(r)
driver.quit()
with open('ranking.json', 'w', encoding='utf-8') as jp:
js = json.dumps(top10, indent=4)
jp.write(js)
|
exemplo_02/Exemplo_02.py
|
import requests
import time
import pandas as pd
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from bs4 import BeautifulSoup
import json
url = 'https://stats.nba.com/players/traditional/?PerMode=Totals&Season=2019-20&SeasonType=Regular%20Season&sort=PLAYER_NAME&dir=-1'
options = Options()
driver = webdriver.Chrome()
driver.get(url)
driver.implicitly_wait(5)
# Abrindo o site no navegador
top10 = {}
rankings = {
'points': {'field': 'PTS', 'label': 'PTS'},
'assistants': {'field': 'AST', 'label': 'AST'},
'rebounds': {'field': 'REB', 'label': 'REB'},
'steals': {'field': 'STL', 'label': 'STL'},
'blocks': {'field': 'BLK', 'label': 'BLK'},
'vitorias': {'field': 'W','label':'W'}
}
# Definindo os campos do ranking
def buildrank(type):
field = rankings[type]['field']
label = rankings[type]['label']
driver.find_element_by_xpath(f"/html/body/div[3]/div[3]/div/div/div[2]/div/div/button").click()
time.sleep(3)
# Aceitando os cookies
driver.find_element_by_xpath(f"/html/body/main/div[2]/div/div[2]/div/div/nba-stat-table/div[2]/div[1]/table/thead/tr/th[@data-field='{field}']").click() #Resolver isso
# Clicando na tabela de pontos para ver o ranking
element = driver.find_element_by_xpath(f'/html/body/main/div[2]/div/div[2]/div/div/nba-stat-table/div[2]/div[1]/table')
# Procurando a tabela no html
html_content = element.get_attribute('outerHTML')
# Pegando o conteudo
soup = BeautifulSoup(html_content, 'html.parser')
# Parseando o htmli da página
table = soup.find(name='table')
# Encontrando a tabela
df_full = pd.read_html(str (table))[0].head(10)
df = df_full[['Unnamed: 0', 'PLAYER','TEAM',label]]
df.columns = ['Posicao', 'Jogador', 'Time', 'Pontos']
# Definindo o data frame
return df.to_dict('records')
# Retornando com um dicionario
for r in rankings:
top10[r] = buildrank(r)
driver.quit()
with open('ranking.json', 'w', encoding='utf-8') as jp:
js = json.dumps(top10, indent=4)
jp.write(js)
| 0.250546 | 0.078184 |
from urllib.request import Request
from api.drivers.student import student_drivers
from api.middlewares import authentication_middleware
from api.schemas.admin.admin_request_schema import admin_request_schemas
from api.schemas.student.request_schemas import student_request_schemas
from api.schemas.student.response_schemas import student_response_schemas
from api.utils.exceptions import exceptions
from fastapi import APIRouter, Depends, HTTPException, Request, status
from fastapi.responses import JSONResponse
from api.repository import admin_repo
import json
def construct_router():
admin = APIRouter(tags=["Admin"])
@admin.post("/notify/student")
async def notify_by_batch():
pass
@admin.post("/add/student/subscription")
async def add_student_subscription(
request: admin_request_schemas.ManipulateStudentSubscriptionSchema,
):
try:
response = await student_drivers.Student().update_array_of_str(
request.__dict__
)
return JSONResponse(status_code=200, content={"message": "info updated"})
except exceptions.DuplicateStudent:
return JSONResponse(
status_code=409, content={"message": "info cannot be updated"}
)
except exceptions.UnexpectedError:
return JSONResponse(
status_code=500, content={"message": "internal server error"}
)
@admin.post("/remove/student/subscription")
async def remove_student_subscription(
request: admin_request_schemas.ManipulateStudentSubscriptionSchema,
):
try:
response = await student_drivers.Student().delete_from_array_of_str(
request.__dict__
)
if response:
return JSONResponse(
status_code=200,
content={"message": "subscription deleted successfully"},
)
return JSONResponse(
status_code=500, content={"message": "subscription deletion failed"}
)
except exceptions.DuplicateStudent:
return JSONResponse(
status_code=409, content={"message": "info cannot be updated"}
)
except exceptions.UnexpectedError:
return JSONResponse(
status_code=500, content={"message": "internal server error"}
)
@admin.post("/verify/student")
async def verify_student(request: Request):
request = await request.json()
response = await admin_repo.assign_otp(request["student_ids"])
if response:
return JSONResponse(
status_code=200, content={"message": "otp assigned successfully"}
)
return JSONResponse(
status_code=500,
content={
"message": """otp cannot be assigned successfully for all student"""
},
)
@admin.get("/ban/student/{student_id}")
async def ban_student_account(student_id: str):
response = await student_drivers.Student().ban_student(student_id)
if response == "already_banned":
return JSONResponse(
status_code=404, content={"message": "student aleady banned"}
)
elif response:
return JSONResponse(
status_code=200, content={"message": "student banned successfully"}
)
return JSONResponse(
status_code=500, content={"message": "internal server error"}
)
@admin.delete("/delete/student/{student_id}")
async def delete_student_account(student_id: str):
response = await student_drivers.Student().delete_student(student_id)
if response:
return JSONResponse(
status_code=200, content={"message": "student deleted successfully"}
)
return JSONResponse(
status_code=404, content={"message": "student does not exist"}
)
@admin.get("/all_student")
async def get_student_profile():
try:
response = await (
student_drivers.Student().get_all_students()
)
return JSONResponse(
status_code=200,
content=response
)
except Exception as e:
print(e, "exception")
return admin
|
api/routes/admin/admin_student_routes.py
|
from urllib.request import Request
from api.drivers.student import student_drivers
from api.middlewares import authentication_middleware
from api.schemas.admin.admin_request_schema import admin_request_schemas
from api.schemas.student.request_schemas import student_request_schemas
from api.schemas.student.response_schemas import student_response_schemas
from api.utils.exceptions import exceptions
from fastapi import APIRouter, Depends, HTTPException, Request, status
from fastapi.responses import JSONResponse
from api.repository import admin_repo
import json
def construct_router():
admin = APIRouter(tags=["Admin"])
@admin.post("/notify/student")
async def notify_by_batch():
pass
@admin.post("/add/student/subscription")
async def add_student_subscription(
request: admin_request_schemas.ManipulateStudentSubscriptionSchema,
):
try:
response = await student_drivers.Student().update_array_of_str(
request.__dict__
)
return JSONResponse(status_code=200, content={"message": "info updated"})
except exceptions.DuplicateStudent:
return JSONResponse(
status_code=409, content={"message": "info cannot be updated"}
)
except exceptions.UnexpectedError:
return JSONResponse(
status_code=500, content={"message": "internal server error"}
)
@admin.post("/remove/student/subscription")
async def remove_student_subscription(
request: admin_request_schemas.ManipulateStudentSubscriptionSchema,
):
try:
response = await student_drivers.Student().delete_from_array_of_str(
request.__dict__
)
if response:
return JSONResponse(
status_code=200,
content={"message": "subscription deleted successfully"},
)
return JSONResponse(
status_code=500, content={"message": "subscription deletion failed"}
)
except exceptions.DuplicateStudent:
return JSONResponse(
status_code=409, content={"message": "info cannot be updated"}
)
except exceptions.UnexpectedError:
return JSONResponse(
status_code=500, content={"message": "internal server error"}
)
@admin.post("/verify/student")
async def verify_student(request: Request):
request = await request.json()
response = await admin_repo.assign_otp(request["student_ids"])
if response:
return JSONResponse(
status_code=200, content={"message": "otp assigned successfully"}
)
return JSONResponse(
status_code=500,
content={
"message": """otp cannot be assigned successfully for all student"""
},
)
@admin.get("/ban/student/{student_id}")
async def ban_student_account(student_id: str):
response = await student_drivers.Student().ban_student(student_id)
if response == "already_banned":
return JSONResponse(
status_code=404, content={"message": "student aleady banned"}
)
elif response:
return JSONResponse(
status_code=200, content={"message": "student banned successfully"}
)
return JSONResponse(
status_code=500, content={"message": "internal server error"}
)
@admin.delete("/delete/student/{student_id}")
async def delete_student_account(student_id: str):
response = await student_drivers.Student().delete_student(student_id)
if response:
return JSONResponse(
status_code=200, content={"message": "student deleted successfully"}
)
return JSONResponse(
status_code=404, content={"message": "student does not exist"}
)
@admin.get("/all_student")
async def get_student_profile():
try:
response = await (
student_drivers.Student().get_all_students()
)
return JSONResponse(
status_code=200,
content=response
)
except Exception as e:
print(e, "exception")
return admin
| 0.404743 | 0.060975 |
import pytest
from distutils.version import LooseVersion
from f5.bigip.resource import MissingRequiredCreationParameter
from f5.bigip.tm.security.nat import Destination_Translation
from f5.bigip.tm.security.nat import Policy
from f5.bigip.tm.security.nat import Rule
from f5.bigip.tm.security.nat import Source_Translation
from f5.sdk_exception import ExclusiveAttributesPresent
from requests.exceptions import HTTPError
DESC = 'TESTADDED'
@pytest.fixture(scope='function')
def srctranslation(mgmt_root):
s1 = mgmt_root.tm.security.nat.source_translations.source_translation.create(
name='fake_src', partition='Common', addresses=['192.168.3.11', '172.16.17.32'], ports=['1025-65535'], type='dynamic-pat')
yield s1
s1.delete()
@pytest.fixture(scope='function')
def dsttranslation(mgmt_root):
d1 = mgmt_root.tm.security.nat.destination_translations.destination_translation.create(
partition='Common', name='fake_dst', addresses=['192.168.3.11', '172.16.17.32'], ports=['1025-65535'], type='static-pat')
yield d1
d1.delete()
@pytest.fixture(scope='function')
def policy(mgmt_root):
p1 = mgmt_root.tm.security.nat.policy_s.policy.create(
name='fake_policy', partition='Common')
yield p1
p1.delete()
@pytest.fixture(scope='function')
def rule(mgmt_root):
p1 = mgmt_root.tm.security.nat.policy_s.policy.create(
name='fake_policy', partition='Common')
rule_lst = p1.rules_s
param_set = {'name': 'fake_rule', 'place-after': 'last'}
rule1 = rule_lst.rule.create(**param_set)
yield rule1
rule1.delete()
p1.delete()
@pytest.mark.skipif(
LooseVersion(pytest.config.getoption('--release')) < LooseVersion('12.1.0'),
reason='This collection is fully implemented on 12.1.0 or greater.'
)
class TestSrcTranslation(object):
def test_create_missing_mandatory_attr_raises(self, mgmt_root):
s1 = mgmt_root.tm.security.nat.source_translations.source_translation
with pytest.raises(HTTPError) as err:
s1.create(name='fail', partition='Common', type='dynamic-pat')
assert err.value.response.status_code == 400
def test_create_req_args(self, srctranslation):
s1 = srctranslation
URI = 'https://localhost/mgmt/tm/security/nat/source-translation/~Common~fake_src'
assert s1.name == 'fake_src'
assert s1.partition == 'Common'
assert s1.selfLink.startswith(URI)
assert s1.kind == 'tm:security:nat:source-translation:source-translationstate'
assert not hasattr(s1, 'description')
def test_create_opt_args(self, mgmt_root):
s1 = mgmt_root.tm.security.nat.source_translations.source_translation.create(
name='fake_src', partition='Common', addresses=['192.168.3.11', '172.16.17.32'], ports=['1025-65535'], type='dynamic-pat')
URI = 'https://localhost/mgmt/tm/security/nat/source-translation/~Common~fake_src'
assert s1.name == 'fake_src'
assert s1.partition == 'Common'
assert s1.selfLink.startswith(URI)
s1.modify(description=DESC)
assert hasattr(s1, 'description')
assert s1.description == DESC
s1.delete()
def test_refresh(self, mgmt_root, srctranslation):
sc = mgmt_root.tm.security.nat.source_translations
s1 = srctranslation
s2 = sc.source_translation.load(name='fake_src', partition='Common')
assert s1.name == s2.name
assert s1.kind == s2.kind
assert s1.selfLink == s2.selfLink
assert not hasattr(s1, 'description')
assert not hasattr(s2, 'description')
s2.modify(description=DESC)
assert hasattr(s2, 'description')
assert s2.description == DESC
s1.refresh()
assert s1.selfLink == s2.selfLink
assert hasattr(s1, 'description')
assert s1.description == s2.description
def test_delete(self, mgmt_root):
src = mgmt_root.tm.security.nat.source_translations
s1 = src.source_translation.create(name='fake_src', partition='Common', addresses=['192.168.3.11', '172.16.17.32'], ports=['1025-65535'], type='dynamic-pat')
s1.delete()
with pytest.raises(HTTPError) as err:
src.source_translation.load(partition='Common', name='fake_src')
assert err.value.response.status_code == 404
def test_load_no_object(self, mgmt_root):
src = mgmt_root.tm.security.nat.source_translations
with pytest.raises(HTTPError) as err:
src.source_translation.load(partition='Common', name='not_exists')
assert err.value.response.status_code == 404
def test_load_and_update(self, mgmt_root, srctranslation):
s1 = srctranslation
URI = 'https://localhost/mgmt/tm/security/nat/source-translation/~Common~fake_src'
assert s1.name == 'fake_src'
assert s1.partition == 'Common'
assert s1.selfLink.startswith(URI)
assert not hasattr(s1, 'description')
s1.description = DESC
s1.update()
assert hasattr(s1, 'description')
assert s1.description == DESC
sc = mgmt_root.tm.security.nat.source_translations
s2 = sc.source_translation.load(partition='Common', name='fake_src')
assert s1.name == s2.name
assert s1.partition == s2.partition
assert s1.selfLink == s2.selfLink
assert hasattr(s2, 'description')
assert s1.description == s2.description
def test_src_translation_collection(self, mgmt_root, srctranslation):
s1 = srctranslation
URI = 'https://localhost/mgmt/tm/security/nat/source-translation/~Common~fake_src'
assert s1.name == 'fake_src'
assert s1.partition == 'Common'
assert s1.selfLink.startswith(URI)
src = mgmt_root.tm.security.nat.source_translations.get_collection()
assert isinstance(src, list)
assert len(src)
assert isinstance(src[0], Source_Translation)
@pytest.mark.skipif(
LooseVersion(pytest.config.getoption('--release')) < LooseVersion('12.1.0'),
reason='This collection is fully implemented on 12.1.0 or greater.'
)
class TestDstTranslation(object):
def test_create_missing_mandatory_attr_raises(self, mgmt_root):
d1 = mgmt_root.tm.security.nat.destination_translations.destination_translation
with pytest.raises(HTTPError) as err:
d1.create(name='fail', partition='Common', type='static-nat')
assert err.value.response.status_code == 400
d2 = mgmt_root.tm.security.nat.destination_translations.destination_translation
with pytest.raises(HTTPError) as err:
d2.create(name='fail', partition='Common', type='static-pat')
assert err.value.response.status_code == 400
def test_create_req_args(self, dsttranslation):
d1 = dsttranslation
URI = 'https://localhost/mgmt/tm/security/' \
'nat/destination-translation/~Common~fake_dst'
assert d1.name == 'fake_dst'
assert d1.partition == 'Common'
assert d1.selfLink.startswith(URI)
assert d1.kind == 'tm:security:nat:destination-translation:destination-translationstate'
assert not hasattr(d1, 'description')
def test_create_opt_args(self, mgmt_root):
d1 = mgmt_root.tm.security.nat.destination_translations.destination_translation.create(
partition='Common', name='fake_dst', addresses=['192.168.3.11', '192.168.3.11'], ports=['1025-65535'], type='static-pat')
URI = 'https://localhost/mgmt/tm/security/' \
'nat/destination-translation/~Common~fake_dst'
assert d1.name == 'fake_dst'
assert d1.partition == 'Common'
assert d1.selfLink.startswith(URI)
d1.modify(description=DESC)
assert hasattr(d1, 'description')
assert d1.description == DESC
d1.delete()
def test_refresh(self, mgmt_root, dsttranslation):
d1 = dsttranslation
dst = mgmt_root.tm.security.nat.destination_translations
d2 = dst.destination_translation.load(
name='fake_dst', partition='Common')
assert d1.name == d2.name
assert d1.partition == d2.partition
assert d1.kind == d2.kind
assert d1.selfLink == d2.selfLink
assert not hasattr(d1, 'description')
assert not hasattr(d2, 'description')
d2.modify(description=DESC)
assert hasattr(d2, 'description')
assert d2.description == DESC
d1.refresh()
assert d1.selfLink == d2.selfLink
assert hasattr(d1, 'description')
assert d1.description == d2.description
def test_delete(self, mgmt_root):
dst = mgmt_root.tm.security.nat.destination_translations
d1 = dst.destination_translation.create(
partition='Common', name='fake_dst', addresses=['192.168.3.11', '192.168.3.11'], ports=['1025-65535'], type='static-pat')
d1.delete()
with pytest.raises(HTTPError) as err:
dst.destination_translation.load(partition='Common', name='fake_dst')
assert err.value.response.status_code == 404
def test_load_no_object(self, mgmt_root):
dst = mgmt_root.tm.security.nat.destination_translations
with pytest.raises(HTTPError) as err:
dst.destination_translation.load(partition='Common', name='not_exists')
assert err.value.response.status_code == 404
def test_load_and_update(self, mgmt_root, dsttranslation):
d1 = dsttranslation
URI = 'https://localhost/mgmt/tm/security/' \
'nat/destination-translation/~Common~fake_dst'
assert d1.name == 'fake_dst'
assert d1.partition == 'Common'
assert d1.selfLink.startswith(URI)
assert not hasattr(d1, 'description')
d1.description = DESC
d1.update()
assert hasattr(d1, 'description')
assert d1.description == DESC
dst = mgmt_root.tm.security.nat.destination_translations
d2 = dst.destination_translation.load(partition='Common', name='fake_dst')
assert d1.name == d2.name
assert d1.partition == d2.partition
assert d1.kind == d2.kind
assert d1.selfLink == d2.selfLink
assert hasattr(d2, 'description')
assert d1.description == d2.description
def test_dst_translation_collection(self, mgmt_root, dsttranslation):
d1 = dsttranslation
URI = 'https://localhost/mgmt/tm/security/' \
'nat/destination-translation/~Common~fake_dst'
assert d1.name == 'fake_dst'
assert d1.partition == 'Common'
assert d1.selfLink.startswith(URI)
dst = mgmt_root.tm.security.nat.destination_translations.get_collection()
assert isinstance(dst, list)
assert len(dst)
assert isinstance(dst[0], Destination_Translation)
@pytest.mark.skipif(
LooseVersion(pytest.config.getoption('--release')) < LooseVersion('12.1.0'),
reason='This collection is fully implemented on 12.1.0 or greater.'
)
class TestRules(object):
def test_mutually_exclusive_raises(self, mgmt_root):
p1 = mgmt_root.tm.security.nat.policy_s.policy.create(
name='fake_policy', partition='Common')
rule_lst = p1.rules_s
param_set = {'name': 'fake_rule', 'place-after': 'first',
'action': 'reject', 'place-before': 'last'}
ERR = 'Mutually exclusive arguments submitted. The following arguments cannot be set together: "place-after, place-before".'
with pytest.raises(ExclusiveAttributesPresent) as err:
rule_lst.rule.create(**param_set)
assert str(err.value) == ERR
p1.delete()
def test_mandatory_attribute_missing(self, mgmt_root):
p1 = mgmt_root.tm.security.nat.policy_s.policy.create(
name='fake_policy', partition='Common')
rule_lst = p1.rules_s
param_set = {'name': 'fake_rule', 'action': 'reject'}
ERR = "This resource requires at least one of the mandatory additional parameters to be provided: place-after, place-before"
with pytest.raises(MissingRequiredCreationParameter) as err:
rule_lst.rule.create(**param_set)
assert str(err.value) == ERR
p1.delete()
def test_create_req_arg(self, rule):
r1 = rule
URI = 'https://localhost/mgmt/tm/security/' \
'nat/policy/~Common~fake_policy/rules/fake_rule'
assert r1.name == 'fake_rule'
assert r1.selfLink.startswith(URI)
assert not hasattr(r1, 'description')
def test_create_optional_args(self, mgmt_root):
p1 = mgmt_root.tm.security.nat.policy_s.policy.create(
name='fake_policy', partition='Common')
rule_lst = p1.rules_s
param_set = {'name': 'fake_rule', 'action': 'reject', 'place-after': 'first', 'description': DESC}
r1 = rule_lst.rule.create(**param_set)
URI = 'https://localhost/mgmt/tm/security/' \
'nat/policy/~Common~fake_policy/rules/fake_rule'
assert r1.name == 'fake_rule'
assert r1.selfLink.startswith(URI)
assert r1.kind == 'tm:security:nat:policy:rules:rulesstate'
assert r1.description == DESC
r1.delete()
p1.delete()
def test_refresh(self, rule, mgmt_root):
r1 = rule
rc = mgmt_root.tm.security.nat.policy_s.policy.load(
name='fake_policy', partition='Common')
rule_lst = rc.rules_s
r2 = rule_lst.rule.load(name='fake_rule')
assert r1.name == r2.name
assert r1.selfLink == r2.selfLink
assert r1.kind == r2.kind
assert not hasattr(r1, 'description')
assert not hasattr(r2, 'description')
r2.modify(description=DESC)
assert hasattr(r2, 'description')
assert r2.description == DESC
r1.refresh()
def test_delete(self, mgmt_root):
p1 = mgmt_root.tm.security.nat.policy_s.policy.create(
name='fake_policy', partition='Common')
rule_lst = p1.rules_s
param_set = {'name': 'delete_me', 'place-after': 'first'}
r1 = rule_lst.rule.create(**param_set)
r1.delete()
with pytest.raises(HTTPError) as err:
rule_lst.rule.load(name='delete_me')
assert err.value.response.status_code == 404
p1.delete()
def test_load_no_object(self, mgmt_root):
p1 = mgmt_root.tm.security.nat.policy_s.policy.create(
name='fake_policy', partition='Common')
rule_lst = p1.rules_s
with pytest.raises(HTTPError) as err:
rule_lst.rule.load(name='not_exist')
assert err.value.response.status_code == 404
p1.delete()
def test_load_and_update(self, rule, mgmt_root):
r1 = rule
URI = 'https://localhost/mgmt/tm/security/' \
'nat/policy/~Common~fake_policy/rules/fake_rule'
assert r1.name == 'fake_rule'
assert r1.selfLink.startswith(URI)
assert not hasattr(r1, 'description')
r1.description = DESC
r1.update()
assert hasattr(r1, 'description')
assert r1.description == DESC
rc = mgmt_root.tm.security.nat.policy_s.policy.load(name='fake_policy', partition='Common')
rule_lst = rc.rules_s
r2 = rule_lst.rule.load(name='fake_rule')
assert r1.name == r2.name
assert r1.selfLink == r2.selfLink
assert hasattr(r2, 'description')
assert r1.description == r2.description
def test_rules_subcollection(self, rule, mgmt_root):
r1 = rule
URI = 'https://localhost/mgmt/tm/security/' \
'nat/policy/~Common~fake_policy/rules/fake_rule'
assert r1.name == 'fake_rule'
assert r1.selfLink.startswith(URI)
assert not hasattr(r1, 'description')
nat_policy = mgmt_root.tm.security.nat.policy_s.policy.load(name='fake_policy', partition='Common')
rule_list = nat_policy.rules_s
rc = rule_list.get_collection()
assert isinstance(rc, list)
assert len(rc)
assert isinstance(rc[0], Rule)
@pytest.mark.skipif(
LooseVersion(pytest.config.getoption('--release')) < LooseVersion('12.1.0'),
reason='This collection is fully implemented on 12.1.0 or greater.'
)
class TestPolicy(object):
def test_create_req_args(self, mgmt_root):
p1 = mgmt_root.tm.security.nat.policy_s.policy.create(
name='fake_policy1', partition='Common')
URI = 'https://localhost/mgmt/tm/security/' \
'nat/policy/~Common~fake_policy'
assert p1.name == 'fake_policy1'
assert p1.partition == 'Common'
assert p1.selfLink.startswith(URI)
assert not hasattr(p1, 'description')
p1.delete()
def test_refresh(self, mgmt_root, policy):
p1 = policy
p2 = mgmt_root.tm.security.nat.policy_s.policy.load(
name='fake_policy', partition='Common')
assert p1.name == p2.name
assert p1.kind == p2.kind
assert p1.selfLink == p2.selfLink
assert not hasattr(p1, 'description')
assert not hasattr(p2, 'description')
p2.modify(description=DESC)
p1.modify(description=DESC)
assert hasattr(p2, 'description')
assert p2.description == DESC
p1.refresh()
assert p1.selfLink == p2.selfLink
assert hasattr(p1, 'description')
assert p1.description == p2.description
def test_delete(self, mgmt_root):
p = mgmt_root.tm.security.nat.policy_s.policy
p1 = p.create(name='delete_me', partition='Common')
p1.delete()
with pytest.raises(HTTPError) as err:
mgmt_root.tm.security.nat.policy_s.policy.load(
name='delete_me', partition='Common')
assert err.value.response.status_code == 404
def test_load_no_object(self, mgmt_root):
p = mgmt_root.tm.security.nat.policy_s.policy
with pytest.raises(HTTPError) as err:
p.load(name='not_exists', partition='Common')
assert err.value.response.status_code == 404
def test_load_and_update(self, mgmt_root, policy):
p1 = policy
URI = 'https://localhost/mgmt/tm/security/' \
'nat/policy/~Common~fake_policy'
assert p1.name == 'fake_policy'
assert p1.partition == 'Common'
assert p1.selfLink.startswith(URI)
assert not hasattr(p1, 'description')
p1.description = DESC
p1.update()
assert hasattr(p1, 'description')
assert p1.description == DESC
p = mgmt_root.tm.security.nat.policy_s.policy
p2 = p.load(name='fake_policy', partition='Common')
assert p1.name == p2.name
assert p1.partition == p2.partition
assert p1.selfLink == p2.selfLink
assert hasattr(p2, 'description')
assert p1.description == p2.description
def test_policies_collection(self, mgmt_root, policy):
pc = mgmt_root.tm.security.nat.policy_s.get_collection()
assert isinstance(pc, list)
assert len(pc)
assert isinstance(pc[0], Policy)
|
f5/bigip/tm/security/test/functional/test_nat.py
|
import pytest
from distutils.version import LooseVersion
from f5.bigip.resource import MissingRequiredCreationParameter
from f5.bigip.tm.security.nat import Destination_Translation
from f5.bigip.tm.security.nat import Policy
from f5.bigip.tm.security.nat import Rule
from f5.bigip.tm.security.nat import Source_Translation
from f5.sdk_exception import ExclusiveAttributesPresent
from requests.exceptions import HTTPError
DESC = 'TESTADDED'
@pytest.fixture(scope='function')
def srctranslation(mgmt_root):
s1 = mgmt_root.tm.security.nat.source_translations.source_translation.create(
name='fake_src', partition='Common', addresses=['192.168.3.11', '172.16.17.32'], ports=['1025-65535'], type='dynamic-pat')
yield s1
s1.delete()
@pytest.fixture(scope='function')
def dsttranslation(mgmt_root):
d1 = mgmt_root.tm.security.nat.destination_translations.destination_translation.create(
partition='Common', name='fake_dst', addresses=['192.168.3.11', '172.16.17.32'], ports=['1025-65535'], type='static-pat')
yield d1
d1.delete()
@pytest.fixture(scope='function')
def policy(mgmt_root):
p1 = mgmt_root.tm.security.nat.policy_s.policy.create(
name='fake_policy', partition='Common')
yield p1
p1.delete()
@pytest.fixture(scope='function')
def rule(mgmt_root):
p1 = mgmt_root.tm.security.nat.policy_s.policy.create(
name='fake_policy', partition='Common')
rule_lst = p1.rules_s
param_set = {'name': 'fake_rule', 'place-after': 'last'}
rule1 = rule_lst.rule.create(**param_set)
yield rule1
rule1.delete()
p1.delete()
@pytest.mark.skipif(
LooseVersion(pytest.config.getoption('--release')) < LooseVersion('12.1.0'),
reason='This collection is fully implemented on 12.1.0 or greater.'
)
class TestSrcTranslation(object):
def test_create_missing_mandatory_attr_raises(self, mgmt_root):
s1 = mgmt_root.tm.security.nat.source_translations.source_translation
with pytest.raises(HTTPError) as err:
s1.create(name='fail', partition='Common', type='dynamic-pat')
assert err.value.response.status_code == 400
def test_create_req_args(self, srctranslation):
s1 = srctranslation
URI = 'https://localhost/mgmt/tm/security/nat/source-translation/~Common~fake_src'
assert s1.name == 'fake_src'
assert s1.partition == 'Common'
assert s1.selfLink.startswith(URI)
assert s1.kind == 'tm:security:nat:source-translation:source-translationstate'
assert not hasattr(s1, 'description')
def test_create_opt_args(self, mgmt_root):
s1 = mgmt_root.tm.security.nat.source_translations.source_translation.create(
name='fake_src', partition='Common', addresses=['192.168.3.11', '172.16.17.32'], ports=['1025-65535'], type='dynamic-pat')
URI = 'https://localhost/mgmt/tm/security/nat/source-translation/~Common~fake_src'
assert s1.name == 'fake_src'
assert s1.partition == 'Common'
assert s1.selfLink.startswith(URI)
s1.modify(description=DESC)
assert hasattr(s1, 'description')
assert s1.description == DESC
s1.delete()
def test_refresh(self, mgmt_root, srctranslation):
sc = mgmt_root.tm.security.nat.source_translations
s1 = srctranslation
s2 = sc.source_translation.load(name='fake_src', partition='Common')
assert s1.name == s2.name
assert s1.kind == s2.kind
assert s1.selfLink == s2.selfLink
assert not hasattr(s1, 'description')
assert not hasattr(s2, 'description')
s2.modify(description=DESC)
assert hasattr(s2, 'description')
assert s2.description == DESC
s1.refresh()
assert s1.selfLink == s2.selfLink
assert hasattr(s1, 'description')
assert s1.description == s2.description
def test_delete(self, mgmt_root):
src = mgmt_root.tm.security.nat.source_translations
s1 = src.source_translation.create(name='fake_src', partition='Common', addresses=['192.168.3.11', '172.16.17.32'], ports=['1025-65535'], type='dynamic-pat')
s1.delete()
with pytest.raises(HTTPError) as err:
src.source_translation.load(partition='Common', name='fake_src')
assert err.value.response.status_code == 404
def test_load_no_object(self, mgmt_root):
src = mgmt_root.tm.security.nat.source_translations
with pytest.raises(HTTPError) as err:
src.source_translation.load(partition='Common', name='not_exists')
assert err.value.response.status_code == 404
def test_load_and_update(self, mgmt_root, srctranslation):
s1 = srctranslation
URI = 'https://localhost/mgmt/tm/security/nat/source-translation/~Common~fake_src'
assert s1.name == 'fake_src'
assert s1.partition == 'Common'
assert s1.selfLink.startswith(URI)
assert not hasattr(s1, 'description')
s1.description = DESC
s1.update()
assert hasattr(s1, 'description')
assert s1.description == DESC
sc = mgmt_root.tm.security.nat.source_translations
s2 = sc.source_translation.load(partition='Common', name='fake_src')
assert s1.name == s2.name
assert s1.partition == s2.partition
assert s1.selfLink == s2.selfLink
assert hasattr(s2, 'description')
assert s1.description == s2.description
def test_src_translation_collection(self, mgmt_root, srctranslation):
s1 = srctranslation
URI = 'https://localhost/mgmt/tm/security/nat/source-translation/~Common~fake_src'
assert s1.name == 'fake_src'
assert s1.partition == 'Common'
assert s1.selfLink.startswith(URI)
src = mgmt_root.tm.security.nat.source_translations.get_collection()
assert isinstance(src, list)
assert len(src)
assert isinstance(src[0], Source_Translation)
@pytest.mark.skipif(
LooseVersion(pytest.config.getoption('--release')) < LooseVersion('12.1.0'),
reason='This collection is fully implemented on 12.1.0 or greater.'
)
class TestDstTranslation(object):
def test_create_missing_mandatory_attr_raises(self, mgmt_root):
d1 = mgmt_root.tm.security.nat.destination_translations.destination_translation
with pytest.raises(HTTPError) as err:
d1.create(name='fail', partition='Common', type='static-nat')
assert err.value.response.status_code == 400
d2 = mgmt_root.tm.security.nat.destination_translations.destination_translation
with pytest.raises(HTTPError) as err:
d2.create(name='fail', partition='Common', type='static-pat')
assert err.value.response.status_code == 400
def test_create_req_args(self, dsttranslation):
d1 = dsttranslation
URI = 'https://localhost/mgmt/tm/security/' \
'nat/destination-translation/~Common~fake_dst'
assert d1.name == 'fake_dst'
assert d1.partition == 'Common'
assert d1.selfLink.startswith(URI)
assert d1.kind == 'tm:security:nat:destination-translation:destination-translationstate'
assert not hasattr(d1, 'description')
def test_create_opt_args(self, mgmt_root):
d1 = mgmt_root.tm.security.nat.destination_translations.destination_translation.create(
partition='Common', name='fake_dst', addresses=['192.168.3.11', '192.168.3.11'], ports=['1025-65535'], type='static-pat')
URI = 'https://localhost/mgmt/tm/security/' \
'nat/destination-translation/~Common~fake_dst'
assert d1.name == 'fake_dst'
assert d1.partition == 'Common'
assert d1.selfLink.startswith(URI)
d1.modify(description=DESC)
assert hasattr(d1, 'description')
assert d1.description == DESC
d1.delete()
def test_refresh(self, mgmt_root, dsttranslation):
d1 = dsttranslation
dst = mgmt_root.tm.security.nat.destination_translations
d2 = dst.destination_translation.load(
name='fake_dst', partition='Common')
assert d1.name == d2.name
assert d1.partition == d2.partition
assert d1.kind == d2.kind
assert d1.selfLink == d2.selfLink
assert not hasattr(d1, 'description')
assert not hasattr(d2, 'description')
d2.modify(description=DESC)
assert hasattr(d2, 'description')
assert d2.description == DESC
d1.refresh()
assert d1.selfLink == d2.selfLink
assert hasattr(d1, 'description')
assert d1.description == d2.description
def test_delete(self, mgmt_root):
dst = mgmt_root.tm.security.nat.destination_translations
d1 = dst.destination_translation.create(
partition='Common', name='fake_dst', addresses=['192.168.3.11', '192.168.3.11'], ports=['1025-65535'], type='static-pat')
d1.delete()
with pytest.raises(HTTPError) as err:
dst.destination_translation.load(partition='Common', name='fake_dst')
assert err.value.response.status_code == 404
def test_load_no_object(self, mgmt_root):
dst = mgmt_root.tm.security.nat.destination_translations
with pytest.raises(HTTPError) as err:
dst.destination_translation.load(partition='Common', name='not_exists')
assert err.value.response.status_code == 404
def test_load_and_update(self, mgmt_root, dsttranslation):
d1 = dsttranslation
URI = 'https://localhost/mgmt/tm/security/' \
'nat/destination-translation/~Common~fake_dst'
assert d1.name == 'fake_dst'
assert d1.partition == 'Common'
assert d1.selfLink.startswith(URI)
assert not hasattr(d1, 'description')
d1.description = DESC
d1.update()
assert hasattr(d1, 'description')
assert d1.description == DESC
dst = mgmt_root.tm.security.nat.destination_translations
d2 = dst.destination_translation.load(partition='Common', name='fake_dst')
assert d1.name == d2.name
assert d1.partition == d2.partition
assert d1.kind == d2.kind
assert d1.selfLink == d2.selfLink
assert hasattr(d2, 'description')
assert d1.description == d2.description
def test_dst_translation_collection(self, mgmt_root, dsttranslation):
d1 = dsttranslation
URI = 'https://localhost/mgmt/tm/security/' \
'nat/destination-translation/~Common~fake_dst'
assert d1.name == 'fake_dst'
assert d1.partition == 'Common'
assert d1.selfLink.startswith(URI)
dst = mgmt_root.tm.security.nat.destination_translations.get_collection()
assert isinstance(dst, list)
assert len(dst)
assert isinstance(dst[0], Destination_Translation)
@pytest.mark.skipif(
LooseVersion(pytest.config.getoption('--release')) < LooseVersion('12.1.0'),
reason='This collection is fully implemented on 12.1.0 or greater.'
)
class TestRules(object):
def test_mutually_exclusive_raises(self, mgmt_root):
p1 = mgmt_root.tm.security.nat.policy_s.policy.create(
name='fake_policy', partition='Common')
rule_lst = p1.rules_s
param_set = {'name': 'fake_rule', 'place-after': 'first',
'action': 'reject', 'place-before': 'last'}
ERR = 'Mutually exclusive arguments submitted. The following arguments cannot be set together: "place-after, place-before".'
with pytest.raises(ExclusiveAttributesPresent) as err:
rule_lst.rule.create(**param_set)
assert str(err.value) == ERR
p1.delete()
def test_mandatory_attribute_missing(self, mgmt_root):
p1 = mgmt_root.tm.security.nat.policy_s.policy.create(
name='fake_policy', partition='Common')
rule_lst = p1.rules_s
param_set = {'name': 'fake_rule', 'action': 'reject'}
ERR = "This resource requires at least one of the mandatory additional parameters to be provided: place-after, place-before"
with pytest.raises(MissingRequiredCreationParameter) as err:
rule_lst.rule.create(**param_set)
assert str(err.value) == ERR
p1.delete()
def test_create_req_arg(self, rule):
r1 = rule
URI = 'https://localhost/mgmt/tm/security/' \
'nat/policy/~Common~fake_policy/rules/fake_rule'
assert r1.name == 'fake_rule'
assert r1.selfLink.startswith(URI)
assert not hasattr(r1, 'description')
def test_create_optional_args(self, mgmt_root):
p1 = mgmt_root.tm.security.nat.policy_s.policy.create(
name='fake_policy', partition='Common')
rule_lst = p1.rules_s
param_set = {'name': 'fake_rule', 'action': 'reject', 'place-after': 'first', 'description': DESC}
r1 = rule_lst.rule.create(**param_set)
URI = 'https://localhost/mgmt/tm/security/' \
'nat/policy/~Common~fake_policy/rules/fake_rule'
assert r1.name == 'fake_rule'
assert r1.selfLink.startswith(URI)
assert r1.kind == 'tm:security:nat:policy:rules:rulesstate'
assert r1.description == DESC
r1.delete()
p1.delete()
def test_refresh(self, rule, mgmt_root):
r1 = rule
rc = mgmt_root.tm.security.nat.policy_s.policy.load(
name='fake_policy', partition='Common')
rule_lst = rc.rules_s
r2 = rule_lst.rule.load(name='fake_rule')
assert r1.name == r2.name
assert r1.selfLink == r2.selfLink
assert r1.kind == r2.kind
assert not hasattr(r1, 'description')
assert not hasattr(r2, 'description')
r2.modify(description=DESC)
assert hasattr(r2, 'description')
assert r2.description == DESC
r1.refresh()
def test_delete(self, mgmt_root):
p1 = mgmt_root.tm.security.nat.policy_s.policy.create(
name='fake_policy', partition='Common')
rule_lst = p1.rules_s
param_set = {'name': 'delete_me', 'place-after': 'first'}
r1 = rule_lst.rule.create(**param_set)
r1.delete()
with pytest.raises(HTTPError) as err:
rule_lst.rule.load(name='delete_me')
assert err.value.response.status_code == 404
p1.delete()
def test_load_no_object(self, mgmt_root):
p1 = mgmt_root.tm.security.nat.policy_s.policy.create(
name='fake_policy', partition='Common')
rule_lst = p1.rules_s
with pytest.raises(HTTPError) as err:
rule_lst.rule.load(name='not_exist')
assert err.value.response.status_code == 404
p1.delete()
def test_load_and_update(self, rule, mgmt_root):
r1 = rule
URI = 'https://localhost/mgmt/tm/security/' \
'nat/policy/~Common~fake_policy/rules/fake_rule'
assert r1.name == 'fake_rule'
assert r1.selfLink.startswith(URI)
assert not hasattr(r1, 'description')
r1.description = DESC
r1.update()
assert hasattr(r1, 'description')
assert r1.description == DESC
rc = mgmt_root.tm.security.nat.policy_s.policy.load(name='fake_policy', partition='Common')
rule_lst = rc.rules_s
r2 = rule_lst.rule.load(name='fake_rule')
assert r1.name == r2.name
assert r1.selfLink == r2.selfLink
assert hasattr(r2, 'description')
assert r1.description == r2.description
def test_rules_subcollection(self, rule, mgmt_root):
r1 = rule
URI = 'https://localhost/mgmt/tm/security/' \
'nat/policy/~Common~fake_policy/rules/fake_rule'
assert r1.name == 'fake_rule'
assert r1.selfLink.startswith(URI)
assert not hasattr(r1, 'description')
nat_policy = mgmt_root.tm.security.nat.policy_s.policy.load(name='fake_policy', partition='Common')
rule_list = nat_policy.rules_s
rc = rule_list.get_collection()
assert isinstance(rc, list)
assert len(rc)
assert isinstance(rc[0], Rule)
@pytest.mark.skipif(
LooseVersion(pytest.config.getoption('--release')) < LooseVersion('12.1.0'),
reason='This collection is fully implemented on 12.1.0 or greater.'
)
class TestPolicy(object):
def test_create_req_args(self, mgmt_root):
p1 = mgmt_root.tm.security.nat.policy_s.policy.create(
name='fake_policy1', partition='Common')
URI = 'https://localhost/mgmt/tm/security/' \
'nat/policy/~Common~fake_policy'
assert p1.name == 'fake_policy1'
assert p1.partition == 'Common'
assert p1.selfLink.startswith(URI)
assert not hasattr(p1, 'description')
p1.delete()
def test_refresh(self, mgmt_root, policy):
p1 = policy
p2 = mgmt_root.tm.security.nat.policy_s.policy.load(
name='fake_policy', partition='Common')
assert p1.name == p2.name
assert p1.kind == p2.kind
assert p1.selfLink == p2.selfLink
assert not hasattr(p1, 'description')
assert not hasattr(p2, 'description')
p2.modify(description=DESC)
p1.modify(description=DESC)
assert hasattr(p2, 'description')
assert p2.description == DESC
p1.refresh()
assert p1.selfLink == p2.selfLink
assert hasattr(p1, 'description')
assert p1.description == p2.description
def test_delete(self, mgmt_root):
p = mgmt_root.tm.security.nat.policy_s.policy
p1 = p.create(name='delete_me', partition='Common')
p1.delete()
with pytest.raises(HTTPError) as err:
mgmt_root.tm.security.nat.policy_s.policy.load(
name='delete_me', partition='Common')
assert err.value.response.status_code == 404
def test_load_no_object(self, mgmt_root):
p = mgmt_root.tm.security.nat.policy_s.policy
with pytest.raises(HTTPError) as err:
p.load(name='not_exists', partition='Common')
assert err.value.response.status_code == 404
def test_load_and_update(self, mgmt_root, policy):
p1 = policy
URI = 'https://localhost/mgmt/tm/security/' \
'nat/policy/~Common~fake_policy'
assert p1.name == 'fake_policy'
assert p1.partition == 'Common'
assert p1.selfLink.startswith(URI)
assert not hasattr(p1, 'description')
p1.description = DESC
p1.update()
assert hasattr(p1, 'description')
assert p1.description == DESC
p = mgmt_root.tm.security.nat.policy_s.policy
p2 = p.load(name='fake_policy', partition='Common')
assert p1.name == p2.name
assert p1.partition == p2.partition
assert p1.selfLink == p2.selfLink
assert hasattr(p2, 'description')
assert p1.description == p2.description
def test_policies_collection(self, mgmt_root, policy):
pc = mgmt_root.tm.security.nat.policy_s.get_collection()
assert isinstance(pc, list)
assert len(pc)
assert isinstance(pc[0], Policy)
| 0.569853 | 0.322953 |
from unittest import TestCase
from profile_generator.unit import Point
from .gamma import (
_exp,
_inverse_exp,
_inverse_linear,
_inverse_sqrt,
_linear,
_sqrt,
exp,
inverse_exp,
inverse_linear,
inverse_sqrt,
linear,
sqrt,
)
_GREY = Point(87 / 255, 119 / 255)
class GammaTest(TestCase):
def test_gamma_linear(self) -> None:
gamma = _linear(2)
inverse = _inverse_linear(2)
self.assertAlmostEqual(0, inverse(gamma(0)))
self.assertAlmostEqual(0.5, inverse(gamma(0.5)))
self.assertAlmostEqual(1, inverse(gamma(1)))
def test_gamma_of_linear(self) -> None:
gamma = linear(_GREY.x, 0.5)[0]
self.assertAlmostEqual(gamma(_GREY.x), 0.5)
def test_gamma_of_inverse_linear(self) -> None:
gamma_inverse = inverse_linear(0.5, _GREY.y)[0]
self.assertAlmostEqual(gamma_inverse(0.5), _GREY.y)
def test_gamma_sqrt(self) -> None:
gamma = _sqrt(2)
inverse = _inverse_sqrt(2)
self.assertAlmostEqual(0, inverse(gamma(0)))
self.assertAlmostEqual(0.5, inverse(gamma(0.5)))
self.assertAlmostEqual(1, inverse(gamma(1)))
def test_gamma_of_sqrt(self) -> None:
gamma = sqrt(_GREY.x, 0.5)
self.assertAlmostEqual(gamma(_GREY.x), 0.5)
def test_gamma_of_inverse_sqrt(self) -> None:
gamma_inverse = inverse_sqrt(0.5, _GREY.y)
self.assertAlmostEqual(gamma_inverse(0.5), _GREY.y)
def test_gamma_exp(self) -> None:
gamma = _exp(2)
inverse = _inverse_exp(-2)
self.assertAlmostEqual(0, inverse(gamma(0)))
self.assertAlmostEqual(0.5, inverse(gamma(0.5)))
self.assertAlmostEqual(1, inverse(gamma(1)))
def test_gamma_of_exp(self) -> None:
gamma = exp(_GREY.x, 0.5)[0]
self.assertAlmostEqual(gamma(_GREY.x), 0.5)
def test_gamma_of_inverse_exp(self) -> None:
gamma_inverse = inverse_exp(0.5, _GREY.y)[0]
self.assertAlmostEqual(gamma_inverse(0.5), _GREY.y)
|
profile_generator/model/gamma_test.py
|
from unittest import TestCase
from profile_generator.unit import Point
from .gamma import (
_exp,
_inverse_exp,
_inverse_linear,
_inverse_sqrt,
_linear,
_sqrt,
exp,
inverse_exp,
inverse_linear,
inverse_sqrt,
linear,
sqrt,
)
_GREY = Point(87 / 255, 119 / 255)
class GammaTest(TestCase):
def test_gamma_linear(self) -> None:
gamma = _linear(2)
inverse = _inverse_linear(2)
self.assertAlmostEqual(0, inverse(gamma(0)))
self.assertAlmostEqual(0.5, inverse(gamma(0.5)))
self.assertAlmostEqual(1, inverse(gamma(1)))
def test_gamma_of_linear(self) -> None:
gamma = linear(_GREY.x, 0.5)[0]
self.assertAlmostEqual(gamma(_GREY.x), 0.5)
def test_gamma_of_inverse_linear(self) -> None:
gamma_inverse = inverse_linear(0.5, _GREY.y)[0]
self.assertAlmostEqual(gamma_inverse(0.5), _GREY.y)
def test_gamma_sqrt(self) -> None:
gamma = _sqrt(2)
inverse = _inverse_sqrt(2)
self.assertAlmostEqual(0, inverse(gamma(0)))
self.assertAlmostEqual(0.5, inverse(gamma(0.5)))
self.assertAlmostEqual(1, inverse(gamma(1)))
def test_gamma_of_sqrt(self) -> None:
gamma = sqrt(_GREY.x, 0.5)
self.assertAlmostEqual(gamma(_GREY.x), 0.5)
def test_gamma_of_inverse_sqrt(self) -> None:
gamma_inverse = inverse_sqrt(0.5, _GREY.y)
self.assertAlmostEqual(gamma_inverse(0.5), _GREY.y)
def test_gamma_exp(self) -> None:
gamma = _exp(2)
inverse = _inverse_exp(-2)
self.assertAlmostEqual(0, inverse(gamma(0)))
self.assertAlmostEqual(0.5, inverse(gamma(0.5)))
self.assertAlmostEqual(1, inverse(gamma(1)))
def test_gamma_of_exp(self) -> None:
gamma = exp(_GREY.x, 0.5)[0]
self.assertAlmostEqual(gamma(_GREY.x), 0.5)
def test_gamma_of_inverse_exp(self) -> None:
gamma_inverse = inverse_exp(0.5, _GREY.y)[0]
self.assertAlmostEqual(gamma_inverse(0.5), _GREY.y)
| 0.890675 | 0.905322 |
'''
Created on 23.06.2010
@author: <NAME>
model description of a rope consisting of n pendulums as a benchmark test
'''
'''
Masse = 4.91 Gramm
Massenmittelpunkt: ( Millimeter )
X = -0.00
Y = 0.00
Z = -46.18
Trägheitsmomente: ( Gramm * QuadratMillimeter )
Bezogen auf den Massenmittelpunkt, ausgerichtet auf das Ausgabekoordinatensystem.
Lxx = 876.78 Lxy = 0.00 Lxz = 0.62
Lyx = 0.00 Lyy = 3633.74 Lyz = 0.00
Lzx = 0.62 Lzy = 0.00 Lzz = 2777.40
Masse = 0.74 Gramm
Massenmittelpunkt: ( Millimeter )
X = 0.00
Y = 0.00
Z = -22.32
Trägheitsmomente: ( Gramm * QuadratMillimeter )
Bezogen auf den Massenmittelpunkt, ausgerichtet auf das Ausgabekoordinatensystem.
Lxx = 27.92 Lxy = 0.00 Lxz = 0.00
Lyx = 0.00 Lyy = 110.24 Lyz = 0.00
Lzx = 0.00 Lzy = 0.00 Lzz = 83.08
2: 1018
3: 2250
4: 4714
5: 9642
6: 19498
7: 39210
8: 78634
'''
d = 0.02 # Rotational Damping
N = 8.0 # Recursoin Deepth
from PyMbs.Input import *
import os
from time import *
global globali
globali = 0
def getName():
global globali
globali = globali+1
return globali-1
def addBody(i,l,prebody,world,scale,m,c_z,I_xx,I_yy,I_zz):
if (i > N):
return
name = getName()
# Create Body and Frame
body = world.addBody(name='part%s'%name, mass=m, cg=[0,0,c_z], inertia=diag([I_xx,I_yy,I_zz]))
body.addFrame(name='left', p=[l*0.5,0,-l*0.7])
body.addFrame(name='right', p=[-l*0.5,0,-l*0.7])
# Create Joint
joint = world.addJoint(name='rot%s_'%name, CS1=prebody, CS2=body, dofList='Ry')
# Nice Graphics
world.addVisualisation.File(body, 'RopeBody.stl', scale=scale)
# Add Damping
s = world.addSensor.Joint(symbol='state%s_'%name, joint=joint, name='Sensor%s_'%name)
D = world.addExpression(name='Friction%s_'%name, symbol_str='T%s_'%name, exp=-d*s[1])
world.addLoad.Joint(joint=joint, symbol=D, name='Load%s_'%name)
addBody(i+1,l/2.0,body.left,world,scale/2.0,m*0.15071283095723015,c_z*0.4833261152013859,I_xx*0.03184379205730058,I_yy*0.030337888786759647,I_zz*0.029912868150068408)
addBody(i+1,l/2.0,body.right,world,scale/2.0,m*0.15071283095723015,c_z*0.4833261152013859,I_xx*0.03184379205730058,I_yy*0.030337888786759647,I_zz*0.029912868150068408)
world=MbsSystem([0,0,-1])
# Parameters
l = 100;
m = 4.91
c_z = -46.18
I_xx = 876.78
I_yy = 3633.74
I_zz = 2777.40
d = 0.2 # Rotational Damping
addBody(0,l,world,world,1000,m,c_z,I_xx,I_yy,I_zz)
print('System has been assembled (n:%s)'%int(N))
world.genEquations.OrderN(graphOptimizations=True)
world.genCode.Modelica('MultiRope%s_OrderN'%int(N), './Output')
#world.show('MultiRope%s'%int(N))
|
examples/misc/multi_rope.py
|
'''
Created on 23.06.2010
@author: <NAME>
model description of a rope consisting of n pendulums as a benchmark test
'''
'''
Masse = 4.91 Gramm
Massenmittelpunkt: ( Millimeter )
X = -0.00
Y = 0.00
Z = -46.18
Trägheitsmomente: ( Gramm * QuadratMillimeter )
Bezogen auf den Massenmittelpunkt, ausgerichtet auf das Ausgabekoordinatensystem.
Lxx = 876.78 Lxy = 0.00 Lxz = 0.62
Lyx = 0.00 Lyy = 3633.74 Lyz = 0.00
Lzx = 0.62 Lzy = 0.00 Lzz = 2777.40
Masse = 0.74 Gramm
Massenmittelpunkt: ( Millimeter )
X = 0.00
Y = 0.00
Z = -22.32
Trägheitsmomente: ( Gramm * QuadratMillimeter )
Bezogen auf den Massenmittelpunkt, ausgerichtet auf das Ausgabekoordinatensystem.
Lxx = 27.92 Lxy = 0.00 Lxz = 0.00
Lyx = 0.00 Lyy = 110.24 Lyz = 0.00
Lzx = 0.00 Lzy = 0.00 Lzz = 83.08
2: 1018
3: 2250
4: 4714
5: 9642
6: 19498
7: 39210
8: 78634
'''
d = 0.02 # Rotational Damping
N = 8.0 # Recursoin Deepth
from PyMbs.Input import *
import os
from time import *
global globali
globali = 0
def getName():
global globali
globali = globali+1
return globali-1
def addBody(i,l,prebody,world,scale,m,c_z,I_xx,I_yy,I_zz):
if (i > N):
return
name = getName()
# Create Body and Frame
body = world.addBody(name='part%s'%name, mass=m, cg=[0,0,c_z], inertia=diag([I_xx,I_yy,I_zz]))
body.addFrame(name='left', p=[l*0.5,0,-l*0.7])
body.addFrame(name='right', p=[-l*0.5,0,-l*0.7])
# Create Joint
joint = world.addJoint(name='rot%s_'%name, CS1=prebody, CS2=body, dofList='Ry')
# Nice Graphics
world.addVisualisation.File(body, 'RopeBody.stl', scale=scale)
# Add Damping
s = world.addSensor.Joint(symbol='state%s_'%name, joint=joint, name='Sensor%s_'%name)
D = world.addExpression(name='Friction%s_'%name, symbol_str='T%s_'%name, exp=-d*s[1])
world.addLoad.Joint(joint=joint, symbol=D, name='Load%s_'%name)
addBody(i+1,l/2.0,body.left,world,scale/2.0,m*0.15071283095723015,c_z*0.4833261152013859,I_xx*0.03184379205730058,I_yy*0.030337888786759647,I_zz*0.029912868150068408)
addBody(i+1,l/2.0,body.right,world,scale/2.0,m*0.15071283095723015,c_z*0.4833261152013859,I_xx*0.03184379205730058,I_yy*0.030337888786759647,I_zz*0.029912868150068408)
world=MbsSystem([0,0,-1])
# Parameters
l = 100;
m = 4.91
c_z = -46.18
I_xx = 876.78
I_yy = 3633.74
I_zz = 2777.40
d = 0.2 # Rotational Damping
addBody(0,l,world,world,1000,m,c_z,I_xx,I_yy,I_zz)
print('System has been assembled (n:%s)'%int(N))
world.genEquations.OrderN(graphOptimizations=True)
world.genCode.Modelica('MultiRope%s_OrderN'%int(N), './Output')
#world.show('MultiRope%s'%int(N))
| 0.496094 | 0.316581 |
import logging
from genomic_operations.dts.single_pos import PSEQPos, GeminiPos, TwoColPos
class Sniff(object):
"""
Creates and ordered list of functions that are used to try and
sniff the datatype from arbitrary files.
"""
def __init__(self):
self.sniffer_list = []
def add_sniffer_method(self, method, sniff_class):
self.sniffer_list.append([method,sniff_class])
def sniff_datatype(self, file_input):
for sniffer, sniff_class in self.sniffer_list:
sniff_result = sniffer.sniff_file(file_input)
if sniff_result.is_type:
return SniffReturnObject(sniff_result,sniff_class)
return None
class AbstractSnifferMethod(object):
"""
Abstract class that represents a file type Sniffer for genomics data.
This is currently limited to SNP data but could be extende to any kind of data.
"""
def sniff_file(self, input_file):
"""
Method needs to be overriden in the children this is the key method for a abrstract sniffer.
Returns true of false depending on whether the datatype is one of the other.
"""
raise NotImplementedError
class SniffResult(object):
def __init__(self, truth, header=None):
self.truth = truth
self._header = header
@property
def is_type(self):
return self.truth
@property
def has_header(self):
return self._header is not None
@property
def header(self):
return self._header
@header.setter
def header(self, value):
self._header = value
class SniffReturnObject(SniffResult):
def __init__(self, sniff_result, sniffer_class):
super(SniffReturnObject, self).__init__(sniff_result.truth, sniff_result.header)
self._sniffer_class = sniffer_class
@property
def sniffer_class(self):
return self._sniffer_class
@sniffer_class.setter
def sniffer_class(self, value):
self._sniffer_class = value
class PSEQSniffer(AbstractSnifferMethod):
def sniff_file(self, input_file):
header=None
with open(input_file) as in_file:
for line in in_file:
s_line = line.split()
if s_line[0] == "VAR":
header = s_line[1:]
continue
if 'chr' in s_line[0] and ':' in s_line[0]:
return SniffResult(True ,header)
return SniffResult(False)
# Adjust gemini having
class GeminiSniffer(AbstractSnifferMethod):
def sniff_file(self, input_file):
header = None
with open(input_file) as in_file:
for line in in_file:
s_line = line.split()
if s_line[0] == 'chrom':
header = s_line[3:]
continue
if 'chr' in s_line[0]:
try:
start = int(s_line[1])
end = int(s_line[2])
except ValueError:
return SniffResult(False)
if (end - start) == 1:
return SniffResult(True, header)
return SniffResult(False)
class TwoColSniffer(AbstractSnifferMethod):
def sniff_file(self, input_file):
header = None
with open(input_file) as in_file:
for line in in_file:
s_line = line.split()
if s_line[0] == "chr":
header = s_line[3:]
continue
if 'chr' in s_line[0]:
try:
int(s_line[1])
return SniffResult(True, header)
except ValueError:
pass
return SniffResult(False)
def setup_sniffers():
"""
Creates sniffers for genomic datasets.
The order matters here as these are determined in order.
"""
sniffer = Sniff()
sniffer.add_sniffer_method(PSEQSniffer(), PSEQPos)
sniffer.add_sniffer_method(GeminiSniffer(), GeminiPos)
sniffer.add_sniffer_method(TwoColSniffer(), TwoColPos)
return sniffer
if __name__ == "__main__":
import doctest
doctest.testmod()
|
genomic_operations/sniff/sniffer.py
|
import logging
from genomic_operations.dts.single_pos import PSEQPos, GeminiPos, TwoColPos
class Sniff(object):
"""
Creates and ordered list of functions that are used to try and
sniff the datatype from arbitrary files.
"""
def __init__(self):
self.sniffer_list = []
def add_sniffer_method(self, method, sniff_class):
self.sniffer_list.append([method,sniff_class])
def sniff_datatype(self, file_input):
for sniffer, sniff_class in self.sniffer_list:
sniff_result = sniffer.sniff_file(file_input)
if sniff_result.is_type:
return SniffReturnObject(sniff_result,sniff_class)
return None
class AbstractSnifferMethod(object):
"""
Abstract class that represents a file type Sniffer for genomics data.
This is currently limited to SNP data but could be extende to any kind of data.
"""
def sniff_file(self, input_file):
"""
Method needs to be overriden in the children this is the key method for a abrstract sniffer.
Returns true of false depending on whether the datatype is one of the other.
"""
raise NotImplementedError
class SniffResult(object):
def __init__(self, truth, header=None):
self.truth = truth
self._header = header
@property
def is_type(self):
return self.truth
@property
def has_header(self):
return self._header is not None
@property
def header(self):
return self._header
@header.setter
def header(self, value):
self._header = value
class SniffReturnObject(SniffResult):
def __init__(self, sniff_result, sniffer_class):
super(SniffReturnObject, self).__init__(sniff_result.truth, sniff_result.header)
self._sniffer_class = sniffer_class
@property
def sniffer_class(self):
return self._sniffer_class
@sniffer_class.setter
def sniffer_class(self, value):
self._sniffer_class = value
class PSEQSniffer(AbstractSnifferMethod):
def sniff_file(self, input_file):
header=None
with open(input_file) as in_file:
for line in in_file:
s_line = line.split()
if s_line[0] == "VAR":
header = s_line[1:]
continue
if 'chr' in s_line[0] and ':' in s_line[0]:
return SniffResult(True ,header)
return SniffResult(False)
# Adjust gemini having
class GeminiSniffer(AbstractSnifferMethod):
def sniff_file(self, input_file):
header = None
with open(input_file) as in_file:
for line in in_file:
s_line = line.split()
if s_line[0] == 'chrom':
header = s_line[3:]
continue
if 'chr' in s_line[0]:
try:
start = int(s_line[1])
end = int(s_line[2])
except ValueError:
return SniffResult(False)
if (end - start) == 1:
return SniffResult(True, header)
return SniffResult(False)
class TwoColSniffer(AbstractSnifferMethod):
def sniff_file(self, input_file):
header = None
with open(input_file) as in_file:
for line in in_file:
s_line = line.split()
if s_line[0] == "chr":
header = s_line[3:]
continue
if 'chr' in s_line[0]:
try:
int(s_line[1])
return SniffResult(True, header)
except ValueError:
pass
return SniffResult(False)
def setup_sniffers():
"""
Creates sniffers for genomic datasets.
The order matters here as these are determined in order.
"""
sniffer = Sniff()
sniffer.add_sniffer_method(PSEQSniffer(), PSEQPos)
sniffer.add_sniffer_method(GeminiSniffer(), GeminiPos)
sniffer.add_sniffer_method(TwoColSniffer(), TwoColPos)
return sniffer
if __name__ == "__main__":
import doctest
doctest.testmod()
| 0.672977 | 0.36886 |
import keras
import numpy as np
from skimage import io
import matplotlib.pyplot as plt
from keras.layers import Input, Dense, Reshape
from keras.layers import BatchNormalization, Activation, ZeroPadding2D
from keras.models import Sequential, Model
from keras.optimizers import Adam
# Load the target Model and make it untrainable
target_model = keras.models.load_model('./model.h5')
target_model.trainable = False
# Create the fake-ID-generator network. It takes as input the same kind of
# vector that the target network would ouput (in our case, 10 different digits)
attack_vector = Input(shape=(10,))
attack_model = Sequential()
# Yes, its perfectly enough to have a single dense layer. We only want to create
# a single image. We don't care about overfitting or generalisation or anything.
attack_model = Dense(28 * 28, activation='relu', input_dim=10)(attack_vector)
attack_img = Reshape((28, 28, 1))(attack_model)
attack_model = Model(attack_vector, attack_img)
# Now, we combine both models. Attack Network -> Target Network
target_output = target_model(attack_img)
combined_model = Model(attack_vector, target_output)
combined_model.compile(loss='binary_crossentropy', optimizer=Adam(0.0002, 0.5))
# Time to train. 1000 epochs is probably way overkill, but just to make
# sure it works for everyone. It's super fast anyway
batch_size = 128
total_epochs = 1000
# Create the target "access granted" vector. In our case that means that
# Digit 4 is set to 1. We added some minor randomness (0.9 - 1.0) just for
# good measur
final_target = np.zeros((batch_size, 10))
for i in range(batch_size):
final_target[i][4] = 0.9 + np.random.random() * 0.1
for x in range(total_epochs):
combined_model.train_on_batch(final_target, final_target)
if x % (int(total_epochs / 10)) == 0:
print('Epoch ' + str(x) + ' / ' + str(total_epochs))
# The model is trained, let's generate the fake-ID and save it!
# Don't worry if it doesn't look anything like a digit 4, it will still work
fake_id = attack_model.predict(final_target)
fake_id = np.asarray(fake_id[0])
fake_id = np.reshape(fake_id, (28, 28))
# The scipy.misc.toimage() function was deprecated in Scipy 1.0.0, and was completely removed in version 1.3.0.
io.imsave('./fake_id.png', fake_id)
|
2_ExtractingInformation/solution_2_0.py
|
import keras
import numpy as np
from skimage import io
import matplotlib.pyplot as plt
from keras.layers import Input, Dense, Reshape
from keras.layers import BatchNormalization, Activation, ZeroPadding2D
from keras.models import Sequential, Model
from keras.optimizers import Adam
# Load the target Model and make it untrainable
target_model = keras.models.load_model('./model.h5')
target_model.trainable = False
# Create the fake-ID-generator network. It takes as input the same kind of
# vector that the target network would ouput (in our case, 10 different digits)
attack_vector = Input(shape=(10,))
attack_model = Sequential()
# Yes, its perfectly enough to have a single dense layer. We only want to create
# a single image. We don't care about overfitting or generalisation or anything.
attack_model = Dense(28 * 28, activation='relu', input_dim=10)(attack_vector)
attack_img = Reshape((28, 28, 1))(attack_model)
attack_model = Model(attack_vector, attack_img)
# Now, we combine both models. Attack Network -> Target Network
target_output = target_model(attack_img)
combined_model = Model(attack_vector, target_output)
combined_model.compile(loss='binary_crossentropy', optimizer=Adam(0.0002, 0.5))
# Time to train. 1000 epochs is probably way overkill, but just to make
# sure it works for everyone. It's super fast anyway
batch_size = 128
total_epochs = 1000
# Create the target "access granted" vector. In our case that means that
# Digit 4 is set to 1. We added some minor randomness (0.9 - 1.0) just for
# good measur
final_target = np.zeros((batch_size, 10))
for i in range(batch_size):
final_target[i][4] = 0.9 + np.random.random() * 0.1
for x in range(total_epochs):
combined_model.train_on_batch(final_target, final_target)
if x % (int(total_epochs / 10)) == 0:
print('Epoch ' + str(x) + ' / ' + str(total_epochs))
# The model is trained, let's generate the fake-ID and save it!
# Don't worry if it doesn't look anything like a digit 4, it will still work
fake_id = attack_model.predict(final_target)
fake_id = np.asarray(fake_id[0])
fake_id = np.reshape(fake_id, (28, 28))
# The scipy.misc.toimage() function was deprecated in Scipy 1.0.0, and was completely removed in version 1.3.0.
io.imsave('./fake_id.png', fake_id)
| 0.792263 | 0.416322 |
from __future__ import print_function
import logging
import sys
import os
import cPickle
import numpy as np
from scipy.sparse import dok_matrix
from scipy.io import mmwrite, mmread
import text_entail.dictionary as td
import text_entail.io as tio
def w1Asfeature(d_triples, d_w1):
"""
"""
w1_mat = dok_matrix((len(d_triples), len(d_triples._m2ids)))
for w1, ids in d_triples._m2ids.items():
j = d_w1.add(w1)
for i in ids:
w1_mat[i,j] = 1
return w1_mat
def w2Asfeature(d_triples, d_w2):
"""
"""
w2_mat = dok_matrix((len(d_triples), len(d_triples._r2ids)))
for w2, ids in d_triples._r2ids.items():
j = d_w2.add(w2)
for i in ids:
w2_mat[i,j] = 1
return w2_mat
def ctxAsfeature(d_triples, d_ctx):
"""
"""
ctx_mat = dok_matrix((len(d_triples), len(d_triples._l2ids)))
for ctx, ids in d_triples._l2ids.items():
j = d_ctx.add(ctx)
for i in ids:
ctx_mat[i,j] = 1
return ctx_mat
def binarize_sparse_matrix(mat):
"""
"""
logging.info('binarizing feature matrix')
mat = mat.astype(bool)
mat = mat.astype(np.float64)
logging.info('finished binarizing feature matrix')
return mat
def pred_vectors_with_context(preds_file, has_header=True):
"""
"""
logging.info("creating predicate pairs class vector '{}'".format(preds_file))
temp = []
xy_predl_predr_entail = tio.read_preds_w_ctx(preds_file, has_header=has_header)
d_triples = td.TripleDict() # rows
duplicates = 0
contradicting_duplicates = 0
for ctx_X, ctx_Y, pred_l, pred_r, entailing in xy_predl_predr_entail:
ctx = '{}\t{}'.format(ctx_X, ctx_Y)
i = d_triples.add((ctx, pred_l, pred_r))
if i < len(temp):
label = 1 if entailing.strip().lower() == 'true' else 0
print("omitting duplicate example: '{} {} {} {}' ".format(ctx, pred_l, pred_r, entailing) ,file=sys.stderr)
duplicates += 1
if temp[i] != label:
print("duplicate example has different label: '{}' vs. '{}'".format(temp[i], label) ,file=sys.stderr)
contradicting_duplicates += 1
else:
temp.append(1 if entailing.strip().lower() == 'true' else 0)
vec = np.array(temp, dtype=np.float64)
logging.info("finished creating arg pairs class vector '{}'".format(preds_file))
logging.info("found {} duplicate examples with {} having contradicting labels.".format(duplicates, contradicting_duplicates))
return vec, d_triples
def arg_l_arg_r_pairs_vector(args_file, file_contains_context=False, has_header=True):
"""
"""
logging.info("creating arg pairs class vector '{}'".format(args_file))
temp = []
if file_contains_context:
ctx_argl_argr_entail = tio.read_args_w_ctx(args_file, has_header=has_header)
else:
argl_argr_entail = tio.read_args_wo_ctx(args_file, has_header=has_header)
def append_empty_context(tuples):
for l,r,e in tuples:
yield '', l, r, e
ctx_argl_argr_entail = append_empty_context(argl_argr_entail)
d_triples = td.TripleDict() # rows
duplicates = 0
contradicting_duplicates = 0
for ctx, arg_l, arg_r, entailing in ctx_argl_argr_entail:
i = d_triples.add((ctx, arg_l, arg_r))
if i < len(temp):
label = 1 if entailing.strip().lower() == 'true' else 0
print("omitting duplicate example: '{} {} {} {}' ".format(ctx, arg_l, arg_r, entailing) ,file=sys.stderr)
duplicates += 1
if temp[i] != label:
print("duplicate example has different label: '{}' vs. '{}'".format(temp[i], label) ,file=sys.stderr)
contradicting_duplicates += 1
else:
temp.append(1 if entailing.strip().lower() == 'true' else 0)
vec = np.array(temp, dtype=np.float64)
logging.info("finished creating arg pairs class vector '{}'".format(args_file))
logging.info("found {} duplicate examples with {} having contradicting labels.".format(duplicates, contradicting_duplicates))
return vec, d_triples
def arg_l_arg_r_asjo_matrix(
row_indices,
jb_file,
num_rows,
col_indices,
transform_w1 = lambda w1 : (w1[:w1.find('::@')], w1[w1.find('@::')+3:]),
transform_w2sig = lambda w2sig : w2sig,
mmfile_presuffix = '',
reload = False):
"""
"""
mm_file = os.path.splitext( jb_file )[0] + mmfile_presuffix + '.mm'
if not reload:
# legacy condition ( for files with file extension inside filename )
if not os.path.exists(mm_file):
mm_file = jb_file + mmfile_presuffix + '.mm'
if os.path.exists(mm_file) and os.path.isfile(mm_file):
logging.info("corresponding matrix file already exists for '{}'.".format(jb_file))
logging.info("loading '{}'.".format(mm_file))
mat = mmread(mm_file)
with open(mm_file+'i','r') as f:
col_indices._id2w = cPickle.load(f)
for i, w in enumerate(col_indices._id2w):
col_indices._w2id[w] = i
logging.info("finished loading '{}'.".format(mm_file))
return mat
logging.info("creating arg pair feature matrix '{}'".format(jb_file))
mat = dok_matrix((num_rows,1),dtype=np.float64) # len(d_pairs) = number of rows
j_bs = tio.read_jb_file_filter_by_jo(jb_file, lambda jo : transform_w1(jo) in row_indices)
for j, bs in j_bs:
ks = row_indices[transform_w1(j)]
for b, s in transform_w2sig(bs):
l = col_indices.add(b)
if mat.shape[1] <= l:
mat.resize((mat.shape[0],l+1))
for k in ks:
mat[k,l] = float(s)
logging.info("finished creating arg pair feature matrix '{}'".format(jb_file))
logging.info("saving matrix to '{}'.".format(mm_file))
with open(mm_file,'w') as f:
mmwrite(f, mat)
with open(mm_file+'i','w') as f:
cPickle.dump(col_indices._id2w, f)
logging.info("finshed saving matrix")
return mat
def arg_asjo_matrix(
row_indices,
col_indices,
jb_file,
num_rows,
transform_w1 = lambda w1 : w1,
transform_w2sig = lambda w2sig : w2sig,
mmfile_presuffix = '',
reload = False):
"""
"""
mm_file = os.path.splitext( jb_file )[0] + mmfile_presuffix + '.mm'
if not reload:
# legacy condition ( for files with file extension inside filename )
if not os.path.exists(mm_file):
mm_file = jb_file + mmfile_presuffix + '.mm'
if os.path.exists(mm_file) and os.path.isfile(mm_file):
logging.info("corresponding matrix file already exists for '{}'.".format(jb_file))
logging.info("loading '{}'.".format(mm_file))
mat = mmread(mm_file)
with open(mm_file+'i','r') as f:
col_indices._id2w = cPickle.load(f)
for i, w in enumerate(col_indices._id2w):
col_indices._w2id[w] = i
logging.info("finished loading '{}'.".format(mm_file))
return mat
logging.info("creating arg feature matrix '{}'".format(jb_file))
mat = dok_matrix((num_rows,1),dtype=np.float64) # number of rows x 1
j_bs = tio.read_jb_file_filter_by_jo(jb_file, lambda jo : transform_w1(jo) in row_indices)
for j, bs in j_bs:
j = transform_w1(j)
ks = row_indices[j]
for b, s in transform_w2sig(bs):
l = col_indices.add(b)
if mat.shape[1] <= l:
mat.resize((mat.shape[0],l+1))
for k in ks:
mat[k,l] = float(s)
logging.info("finished creating arg feature matrix '{}'".format(jb_file))
logging.info("saving matrix to '{}'.".format(mm_file))
with open(mm_file,'w') as f:
mmwrite(f, mat)
with open(mm_file+'i','w') as f:
cPickle.dump(col_indices._id2w, f)
logging.info("finshed saving matrix")
return mat
def arg_to_topic_matrix(
args,
word2topic_file,
num_rows,
transform_w = lambda w: w,
mmfile_presuffix = '',
reload = False):
"""
"""
mm_file = os.path.splitext( word2topic_file )[0] + mmfile_presuffix + '.mm'
if not reload:
# legacy condition ( for files with file extension inside filename )
if not os.path.exists(mm_file):
mm_file = word2topic_file + mmfile_presuffix + '.mm'
if os.path.exists(mm_file) and os.path.isfile(mm_file):
logging.info("corresponding matrix file already exists for '{}'.".format(word2topic_file))
logging.info("loading '{}'.".format(mm_file))
mat = mmread(mm_file)
logging.info("finished loading '{}'.".format(mm_file))
return mat
logging.info("creating topic feature matrix '{}'".format(word2topic_file))
mat = dok_matrix((num_rows,1),dtype=np.float64) # number of rows x 1
w2t = tio.read_word2topicfile(word2topic_file)
for w, t in w2t:
w = transform_w(w)
if not w in args:
continue
ks = args[w]
if mat.shape[1] <= t:
mat.resize((mat.shape[0],t+1))
for k in ks:
mat[k,t] = 1
logging.info("finished creating topic feature matrix '{}'".format(word2topic_file))
logging.info("saving matrix to '{}'.".format(word2topic_file))
with open(mm_file,'w') as f:
mmwrite(f, mat)
logging.info("finished saving matrix")
return mat
def arg_l_arg_r_to_topic_matrix(
row_indices,
pair2topic_file,
num_rows,
transform_w = lambda w1 : (w1[:w1.find('::@')], w1[w1.find('@::')+3:]),
mmfile_presuffix = '',
reload = False):
"""
"""
mm_file = os.path.splitext( pair2topic_file )[0] + mmfile_presuffix + '.mm'
if not reload:
# legacy condition ( for files with file extension inside filename )
if not os.path.exists(mm_file):
mm_file = pair2topic_file + mmfile_presuffix + '.mm'
if os.path.exists(mm_file) and os.path.isfile(mm_file):
logging.info("corresponding matrix file already exists for '{}'.".format(pair2topic_file))
logging.info("loading '{}'.".format(mm_file))
mat = mmread(mm_file)
logging.info("finished loading '{}'.".format(mm_file))
return mat
logging.info("creating topic feature matrix '{}'".format(pair2topic_file))
mat = dok_matrix((num_rows,1),dtype=np.float64) # number of rows x 1
w2t = tio.read_word2topicfile(pair2topic_file)
for w, t in w2t:
p = transform_w(w)
if p not in row_indices:
continue
ks = row_indices[p]
if mat.shape[1] <= t:
mat.resize((mat.shape[0],t+1))
for k in ks:
mat[k,t] = 1
logging.info("finished creating topic feature matrix '{}'".format(pair2topic_file))
logging.info("saving matrix to '{}'.".format(pair2topic_file))
with open(mm_file,'w') as f:
mmwrite(f, mat)
logging.info("finished saving matrix")
return mat
def topic_vector_matrix(
row_indices,
word2topicvector_file,
num_rows,
transform_w = lambda w: w,
mmfile_presuffix = '',
reload = False):
"""
"""
mm_file = os.path.splitext(word2topicvector_file)[0] + mmfile_presuffix + '.mm'
if not reload:
# # legacy condition ( for files with file extension inside filename )
# if not os.path.exists(mm_file):
# mm_file = word2topic_file + mmfile_presuffix + '.mm'
if os.path.exists(mm_file) and os.path.isfile(mm_file):
logging.info("corresponding matrix file already exists for '{}'.".format(word2topicvector_file))
logging.info("loading '{}'.".format(mm_file))
mat = mmread(mm_file)
logging.info("finished loading '{}'.".format(mm_file))
return mat
logging.info("creating topic vector feature matrix '{}'".format(word2topicvector_file))
mat = dok_matrix((num_rows,1),dtype=np.float64) # number of rows x 1
w2t = tio.read_word2topicvectorfile(word2topicvector_file)
for w, t in w2t:
w = transform_w(w)
if not w in row_indices:
continue
t = np.array(t.split(' '), dtype=np.float)
ks = row_indices[w]
if mat.shape[1] < len(t):
mat.resize((mat.shape[0],len(t)))
for k in ks:
mat[k,:] = t
logging.info("finished creating topic feature matrix '{}'".format(word2topicvector_file))
logging.info("saving matrix to '{}'.".format(word2topicvector_file))
with open(mm_file,'w') as f:
mmwrite(f, mat)
logging.info("finished saving matrix")
return mat
|
src/text_entail/matrix.py
|
from __future__ import print_function
import logging
import sys
import os
import cPickle
import numpy as np
from scipy.sparse import dok_matrix
from scipy.io import mmwrite, mmread
import text_entail.dictionary as td
import text_entail.io as tio
def w1Asfeature(d_triples, d_w1):
"""
"""
w1_mat = dok_matrix((len(d_triples), len(d_triples._m2ids)))
for w1, ids in d_triples._m2ids.items():
j = d_w1.add(w1)
for i in ids:
w1_mat[i,j] = 1
return w1_mat
def w2Asfeature(d_triples, d_w2):
"""
"""
w2_mat = dok_matrix((len(d_triples), len(d_triples._r2ids)))
for w2, ids in d_triples._r2ids.items():
j = d_w2.add(w2)
for i in ids:
w2_mat[i,j] = 1
return w2_mat
def ctxAsfeature(d_triples, d_ctx):
"""
"""
ctx_mat = dok_matrix((len(d_triples), len(d_triples._l2ids)))
for ctx, ids in d_triples._l2ids.items():
j = d_ctx.add(ctx)
for i in ids:
ctx_mat[i,j] = 1
return ctx_mat
def binarize_sparse_matrix(mat):
"""
"""
logging.info('binarizing feature matrix')
mat = mat.astype(bool)
mat = mat.astype(np.float64)
logging.info('finished binarizing feature matrix')
return mat
def pred_vectors_with_context(preds_file, has_header=True):
"""
"""
logging.info("creating predicate pairs class vector '{}'".format(preds_file))
temp = []
xy_predl_predr_entail = tio.read_preds_w_ctx(preds_file, has_header=has_header)
d_triples = td.TripleDict() # rows
duplicates = 0
contradicting_duplicates = 0
for ctx_X, ctx_Y, pred_l, pred_r, entailing in xy_predl_predr_entail:
ctx = '{}\t{}'.format(ctx_X, ctx_Y)
i = d_triples.add((ctx, pred_l, pred_r))
if i < len(temp):
label = 1 if entailing.strip().lower() == 'true' else 0
print("omitting duplicate example: '{} {} {} {}' ".format(ctx, pred_l, pred_r, entailing) ,file=sys.stderr)
duplicates += 1
if temp[i] != label:
print("duplicate example has different label: '{}' vs. '{}'".format(temp[i], label) ,file=sys.stderr)
contradicting_duplicates += 1
else:
temp.append(1 if entailing.strip().lower() == 'true' else 0)
vec = np.array(temp, dtype=np.float64)
logging.info("finished creating arg pairs class vector '{}'".format(preds_file))
logging.info("found {} duplicate examples with {} having contradicting labels.".format(duplicates, contradicting_duplicates))
return vec, d_triples
def arg_l_arg_r_pairs_vector(args_file, file_contains_context=False, has_header=True):
"""
"""
logging.info("creating arg pairs class vector '{}'".format(args_file))
temp = []
if file_contains_context:
ctx_argl_argr_entail = tio.read_args_w_ctx(args_file, has_header=has_header)
else:
argl_argr_entail = tio.read_args_wo_ctx(args_file, has_header=has_header)
def append_empty_context(tuples):
for l,r,e in tuples:
yield '', l, r, e
ctx_argl_argr_entail = append_empty_context(argl_argr_entail)
d_triples = td.TripleDict() # rows
duplicates = 0
contradicting_duplicates = 0
for ctx, arg_l, arg_r, entailing in ctx_argl_argr_entail:
i = d_triples.add((ctx, arg_l, arg_r))
if i < len(temp):
label = 1 if entailing.strip().lower() == 'true' else 0
print("omitting duplicate example: '{} {} {} {}' ".format(ctx, arg_l, arg_r, entailing) ,file=sys.stderr)
duplicates += 1
if temp[i] != label:
print("duplicate example has different label: '{}' vs. '{}'".format(temp[i], label) ,file=sys.stderr)
contradicting_duplicates += 1
else:
temp.append(1 if entailing.strip().lower() == 'true' else 0)
vec = np.array(temp, dtype=np.float64)
logging.info("finished creating arg pairs class vector '{}'".format(args_file))
logging.info("found {} duplicate examples with {} having contradicting labels.".format(duplicates, contradicting_duplicates))
return vec, d_triples
def arg_l_arg_r_asjo_matrix(
row_indices,
jb_file,
num_rows,
col_indices,
transform_w1 = lambda w1 : (w1[:w1.find('::@')], w1[w1.find('@::')+3:]),
transform_w2sig = lambda w2sig : w2sig,
mmfile_presuffix = '',
reload = False):
"""
"""
mm_file = os.path.splitext( jb_file )[0] + mmfile_presuffix + '.mm'
if not reload:
# legacy condition ( for files with file extension inside filename )
if not os.path.exists(mm_file):
mm_file = jb_file + mmfile_presuffix + '.mm'
if os.path.exists(mm_file) and os.path.isfile(mm_file):
logging.info("corresponding matrix file already exists for '{}'.".format(jb_file))
logging.info("loading '{}'.".format(mm_file))
mat = mmread(mm_file)
with open(mm_file+'i','r') as f:
col_indices._id2w = cPickle.load(f)
for i, w in enumerate(col_indices._id2w):
col_indices._w2id[w] = i
logging.info("finished loading '{}'.".format(mm_file))
return mat
logging.info("creating arg pair feature matrix '{}'".format(jb_file))
mat = dok_matrix((num_rows,1),dtype=np.float64) # len(d_pairs) = number of rows
j_bs = tio.read_jb_file_filter_by_jo(jb_file, lambda jo : transform_w1(jo) in row_indices)
for j, bs in j_bs:
ks = row_indices[transform_w1(j)]
for b, s in transform_w2sig(bs):
l = col_indices.add(b)
if mat.shape[1] <= l:
mat.resize((mat.shape[0],l+1))
for k in ks:
mat[k,l] = float(s)
logging.info("finished creating arg pair feature matrix '{}'".format(jb_file))
logging.info("saving matrix to '{}'.".format(mm_file))
with open(mm_file,'w') as f:
mmwrite(f, mat)
with open(mm_file+'i','w') as f:
cPickle.dump(col_indices._id2w, f)
logging.info("finshed saving matrix")
return mat
def arg_asjo_matrix(
row_indices,
col_indices,
jb_file,
num_rows,
transform_w1 = lambda w1 : w1,
transform_w2sig = lambda w2sig : w2sig,
mmfile_presuffix = '',
reload = False):
"""
"""
mm_file = os.path.splitext( jb_file )[0] + mmfile_presuffix + '.mm'
if not reload:
# legacy condition ( for files with file extension inside filename )
if not os.path.exists(mm_file):
mm_file = jb_file + mmfile_presuffix + '.mm'
if os.path.exists(mm_file) and os.path.isfile(mm_file):
logging.info("corresponding matrix file already exists for '{}'.".format(jb_file))
logging.info("loading '{}'.".format(mm_file))
mat = mmread(mm_file)
with open(mm_file+'i','r') as f:
col_indices._id2w = cPickle.load(f)
for i, w in enumerate(col_indices._id2w):
col_indices._w2id[w] = i
logging.info("finished loading '{}'.".format(mm_file))
return mat
logging.info("creating arg feature matrix '{}'".format(jb_file))
mat = dok_matrix((num_rows,1),dtype=np.float64) # number of rows x 1
j_bs = tio.read_jb_file_filter_by_jo(jb_file, lambda jo : transform_w1(jo) in row_indices)
for j, bs in j_bs:
j = transform_w1(j)
ks = row_indices[j]
for b, s in transform_w2sig(bs):
l = col_indices.add(b)
if mat.shape[1] <= l:
mat.resize((mat.shape[0],l+1))
for k in ks:
mat[k,l] = float(s)
logging.info("finished creating arg feature matrix '{}'".format(jb_file))
logging.info("saving matrix to '{}'.".format(mm_file))
with open(mm_file,'w') as f:
mmwrite(f, mat)
with open(mm_file+'i','w') as f:
cPickle.dump(col_indices._id2w, f)
logging.info("finshed saving matrix")
return mat
def arg_to_topic_matrix(
args,
word2topic_file,
num_rows,
transform_w = lambda w: w,
mmfile_presuffix = '',
reload = False):
"""
"""
mm_file = os.path.splitext( word2topic_file )[0] + mmfile_presuffix + '.mm'
if not reload:
# legacy condition ( for files with file extension inside filename )
if not os.path.exists(mm_file):
mm_file = word2topic_file + mmfile_presuffix + '.mm'
if os.path.exists(mm_file) and os.path.isfile(mm_file):
logging.info("corresponding matrix file already exists for '{}'.".format(word2topic_file))
logging.info("loading '{}'.".format(mm_file))
mat = mmread(mm_file)
logging.info("finished loading '{}'.".format(mm_file))
return mat
logging.info("creating topic feature matrix '{}'".format(word2topic_file))
mat = dok_matrix((num_rows,1),dtype=np.float64) # number of rows x 1
w2t = tio.read_word2topicfile(word2topic_file)
for w, t in w2t:
w = transform_w(w)
if not w in args:
continue
ks = args[w]
if mat.shape[1] <= t:
mat.resize((mat.shape[0],t+1))
for k in ks:
mat[k,t] = 1
logging.info("finished creating topic feature matrix '{}'".format(word2topic_file))
logging.info("saving matrix to '{}'.".format(word2topic_file))
with open(mm_file,'w') as f:
mmwrite(f, mat)
logging.info("finished saving matrix")
return mat
def arg_l_arg_r_to_topic_matrix(
row_indices,
pair2topic_file,
num_rows,
transform_w = lambda w1 : (w1[:w1.find('::@')], w1[w1.find('@::')+3:]),
mmfile_presuffix = '',
reload = False):
"""
"""
mm_file = os.path.splitext( pair2topic_file )[0] + mmfile_presuffix + '.mm'
if not reload:
# legacy condition ( for files with file extension inside filename )
if not os.path.exists(mm_file):
mm_file = pair2topic_file + mmfile_presuffix + '.mm'
if os.path.exists(mm_file) and os.path.isfile(mm_file):
logging.info("corresponding matrix file already exists for '{}'.".format(pair2topic_file))
logging.info("loading '{}'.".format(mm_file))
mat = mmread(mm_file)
logging.info("finished loading '{}'.".format(mm_file))
return mat
logging.info("creating topic feature matrix '{}'".format(pair2topic_file))
mat = dok_matrix((num_rows,1),dtype=np.float64) # number of rows x 1
w2t = tio.read_word2topicfile(pair2topic_file)
for w, t in w2t:
p = transform_w(w)
if p not in row_indices:
continue
ks = row_indices[p]
if mat.shape[1] <= t:
mat.resize((mat.shape[0],t+1))
for k in ks:
mat[k,t] = 1
logging.info("finished creating topic feature matrix '{}'".format(pair2topic_file))
logging.info("saving matrix to '{}'.".format(pair2topic_file))
with open(mm_file,'w') as f:
mmwrite(f, mat)
logging.info("finished saving matrix")
return mat
def topic_vector_matrix(
row_indices,
word2topicvector_file,
num_rows,
transform_w = lambda w: w,
mmfile_presuffix = '',
reload = False):
"""
"""
mm_file = os.path.splitext(word2topicvector_file)[0] + mmfile_presuffix + '.mm'
if not reload:
# # legacy condition ( for files with file extension inside filename )
# if not os.path.exists(mm_file):
# mm_file = word2topic_file + mmfile_presuffix + '.mm'
if os.path.exists(mm_file) and os.path.isfile(mm_file):
logging.info("corresponding matrix file already exists for '{}'.".format(word2topicvector_file))
logging.info("loading '{}'.".format(mm_file))
mat = mmread(mm_file)
logging.info("finished loading '{}'.".format(mm_file))
return mat
logging.info("creating topic vector feature matrix '{}'".format(word2topicvector_file))
mat = dok_matrix((num_rows,1),dtype=np.float64) # number of rows x 1
w2t = tio.read_word2topicvectorfile(word2topicvector_file)
for w, t in w2t:
w = transform_w(w)
if not w in row_indices:
continue
t = np.array(t.split(' '), dtype=np.float)
ks = row_indices[w]
if mat.shape[1] < len(t):
mat.resize((mat.shape[0],len(t)))
for k in ks:
mat[k,:] = t
logging.info("finished creating topic feature matrix '{}'".format(word2topicvector_file))
logging.info("saving matrix to '{}'.".format(word2topicvector_file))
with open(mm_file,'w') as f:
mmwrite(f, mat)
logging.info("finished saving matrix")
return mat
| 0.340485 | 0.217961 |
import abc
from typing import Any
import torch
AGGREGATION_MODES = ["mean", "max", "min"]
class Metric(metaclass=abc.ABCMeta):
"""abstract class for Metric objects.
Example:
Simple usage of the Metric class::
class MyMetric(Metric):
def _update(self, predictions, truth):
# compute some metric
return metric_value
model = MyModel()
mymetric = MyMetric()
for batch, labels in dataset:
predictions = model(batch)
mymetric.update(predictions, labels)
print(mymetric.get_metric(mode="mean"))
"""
def __init__(self) -> None:
self.reset()
def reset(self) -> None:
"""Clear metrics from class."""
self.metrics = []
def update(self, predictions: torch.Tensor, truth: torch.Tensor) -> None:
"""Compute metric value and append to the metrics array.
Args:
predictions (torch.Tensor): output tensors from model.
truth (torch.Tensor): ground truth tensor.
"""
self.metrics.append(self._update(predictions, truth))
@abc.abstractmethod
def _update(self, predictions: torch.Tensor, truth: torch.Tensor) -> Any:
"""Compute the metric value.
Args:
predictions (torch.Tensor): output tensors from model.
truth (torch.Tensor): ground truth tensor.
"""
def __len__(self) -> int:
return len(self.metrics)
def get_metric(self, mode="mean") -> float:
"""Aggregate all values stored in the metric class.
Args:
mode (str, optional): aggregation type. mean, max or min.
Defaults to "mean".
Raises:
ValueError: aggregation mode not supported
Returns:
float: aggregated metric.
"""
if len(self) == 0:
return 0.0
if mode not in AGGREGATION_MODES:
raise ValueError(
f"Mode {mode} not supported. Supported modes: {AGGREGATION_MODES}"
)
if mode == "mean":
return sum(self.metrics) / len(self)
elif mode == "max":
return max(self.metrics)
elif mode == "min":
return min(self.metrics)
|
frarch/modules/metrics/base.py
|
import abc
from typing import Any
import torch
AGGREGATION_MODES = ["mean", "max", "min"]
class Metric(metaclass=abc.ABCMeta):
"""abstract class for Metric objects.
Example:
Simple usage of the Metric class::
class MyMetric(Metric):
def _update(self, predictions, truth):
# compute some metric
return metric_value
model = MyModel()
mymetric = MyMetric()
for batch, labels in dataset:
predictions = model(batch)
mymetric.update(predictions, labels)
print(mymetric.get_metric(mode="mean"))
"""
def __init__(self) -> None:
self.reset()
def reset(self) -> None:
"""Clear metrics from class."""
self.metrics = []
def update(self, predictions: torch.Tensor, truth: torch.Tensor) -> None:
"""Compute metric value and append to the metrics array.
Args:
predictions (torch.Tensor): output tensors from model.
truth (torch.Tensor): ground truth tensor.
"""
self.metrics.append(self._update(predictions, truth))
@abc.abstractmethod
def _update(self, predictions: torch.Tensor, truth: torch.Tensor) -> Any:
"""Compute the metric value.
Args:
predictions (torch.Tensor): output tensors from model.
truth (torch.Tensor): ground truth tensor.
"""
def __len__(self) -> int:
return len(self.metrics)
def get_metric(self, mode="mean") -> float:
"""Aggregate all values stored in the metric class.
Args:
mode (str, optional): aggregation type. mean, max or min.
Defaults to "mean".
Raises:
ValueError: aggregation mode not supported
Returns:
float: aggregated metric.
"""
if len(self) == 0:
return 0.0
if mode not in AGGREGATION_MODES:
raise ValueError(
f"Mode {mode} not supported. Supported modes: {AGGREGATION_MODES}"
)
if mode == "mean":
return sum(self.metrics) / len(self)
elif mode == "max":
return max(self.metrics)
elif mode == "min":
return min(self.metrics)
| 0.946014 | 0.363195 |
from pprint import pprint
import sympy as sym
sym.init_printing(use_latex=True)
import numpy as np
from .benchmark import Benchmark
class Schubert(Benchmark):
def __init__(self, case: str):
super().__init__()
if case not in {'p3', 'p8', 'p16', 'p22'}:
raise ValueError('case must be one of p3, p8, p16, or p22')
self.name = f"schubert {case}"
def u(x_i, a, k, m):
return sym.Piecewise(
(k * (x_i - a)**m, sym.Gt(x_i, a)),
(0, sym.And(sym.Ge(x[i], -a), sym.Le(x[i], a))),
(k * (-x_i - a)**m, sym.Lt(x_i, -a))
)
a, k, m = sym.symbols('a k m')
if case == 'p3':
n = 2
x = sym.IndexedBase('x')
self.x = [x[i] for i in range(0, n)]
i = sym.Idx('i')
term1 = sym.Sum(i * sym.cos((i + 1) * x[0] + 1), (i, 0, 4))
term2 = sym.Sum(i * sym.cos((i + 1) * x[1] + 1), (i, 0, 4))
self.expr = term1 * term2 + u(x[0], a, k, m) + u(x[1], a, k, m)
self.params = {'a': [a, 10.],
'k': [k, 100.],
'm': [m, 2]}
self.xmin = None
self.domain = [-10. * np.ones(n), 10. * np.ones(n)]
self.domain_plot = self.domain
elif case == 'p8':
n = 3
x = sym.IndexedBase('x')
self.x = [x[i] for i in range(0, n)]
y = sym.IndexedBase('y')
i = sym.Idx('i')
k_1, k_2 = sym.symbols('k_1 k_2')
pprint(y)
self.expr = (sym.pi / n) * (
k_1 * sym.sin(sym.pi * y[0])**2
+ sym.Sum((y[i] - k_2)**2
* (1. + k_1 * sym.sin(sym.pi * y[i + 1])**2), (i, 0, n - 2))
+ (y[n - 1] - k_2)**2) \
+ sym.Sum(u(x[i], a, k, m), (i, 0, n - 1))
y_subs = {y[i]: 1. + 0.25 * (x[i] + 1.) for i in range(n)}
self.expr = self.expr.doit().subs(y_subs)
self.params = {'a': [a, 10.],
'k': [k, 100.],
'm': [m, 4],
'k_1': [k_1, 10.],
'k_2': [k_2, 1.]}
self.xmin = [[1., 1., 1.], ]
self.domain = [-10. * np.ones(n), 10. * np.ones(n)]
self.domain_plot = None
self.dims = n
|
zoo/schubert.py
|
from pprint import pprint
import sympy as sym
sym.init_printing(use_latex=True)
import numpy as np
from .benchmark import Benchmark
class Schubert(Benchmark):
def __init__(self, case: str):
super().__init__()
if case not in {'p3', 'p8', 'p16', 'p22'}:
raise ValueError('case must be one of p3, p8, p16, or p22')
self.name = f"schubert {case}"
def u(x_i, a, k, m):
return sym.Piecewise(
(k * (x_i - a)**m, sym.Gt(x_i, a)),
(0, sym.And(sym.Ge(x[i], -a), sym.Le(x[i], a))),
(k * (-x_i - a)**m, sym.Lt(x_i, -a))
)
a, k, m = sym.symbols('a k m')
if case == 'p3':
n = 2
x = sym.IndexedBase('x')
self.x = [x[i] for i in range(0, n)]
i = sym.Idx('i')
term1 = sym.Sum(i * sym.cos((i + 1) * x[0] + 1), (i, 0, 4))
term2 = sym.Sum(i * sym.cos((i + 1) * x[1] + 1), (i, 0, 4))
self.expr = term1 * term2 + u(x[0], a, k, m) + u(x[1], a, k, m)
self.params = {'a': [a, 10.],
'k': [k, 100.],
'm': [m, 2]}
self.xmin = None
self.domain = [-10. * np.ones(n), 10. * np.ones(n)]
self.domain_plot = self.domain
elif case == 'p8':
n = 3
x = sym.IndexedBase('x')
self.x = [x[i] for i in range(0, n)]
y = sym.IndexedBase('y')
i = sym.Idx('i')
k_1, k_2 = sym.symbols('k_1 k_2')
pprint(y)
self.expr = (sym.pi / n) * (
k_1 * sym.sin(sym.pi * y[0])**2
+ sym.Sum((y[i] - k_2)**2
* (1. + k_1 * sym.sin(sym.pi * y[i + 1])**2), (i, 0, n - 2))
+ (y[n - 1] - k_2)**2) \
+ sym.Sum(u(x[i], a, k, m), (i, 0, n - 1))
y_subs = {y[i]: 1. + 0.25 * (x[i] + 1.) for i in range(n)}
self.expr = self.expr.doit().subs(y_subs)
self.params = {'a': [a, 10.],
'k': [k, 100.],
'm': [m, 4],
'k_1': [k_1, 10.],
'k_2': [k_2, 1.]}
self.xmin = [[1., 1., 1.], ]
self.domain = [-10. * np.ones(n), 10. * np.ones(n)]
self.domain_plot = None
self.dims = n
| 0.325092 | 0.329001 |
import logging
import os
import platform
import shutil
import sys
import unittest
import uuid
from copy import copy
import psutil
from psutil import AccessDenied, NoSuchProcess
from pyngrok.conf import PyngrokConfig
from pyngrok import ngrok, installer, conf
from pyngrok import process
__author__ = "<NAME>"
__copyright__ = "Copyright 2021, <NAME>"
__version__ = "5.1.0"
logger = logging.getLogger(__name__)
ngrok_logger = logging.getLogger("{}.ngrok".format(__name__))
class NgrokTestCase(unittest.TestCase):
def setUp(self):
self.config_dir = os.path.normpath(os.path.join(os.path.abspath(os.path.dirname(__file__)), ".ngrok2"))
if not os.path.exists(self.config_dir):
os.makedirs(self.config_dir)
config_path = os.path.join(self.config_dir, "config.yml")
conf.DEFAULT_NGROK_CONFIG_PATH = config_path
self.pyngrok_config = PyngrokConfig(config_path=conf.DEFAULT_NGROK_CONFIG_PATH)
conf.set_default(self.pyngrok_config)
# ngrok's CDN can be flaky, so make sure its flakiness isn't reflect in our CI/CD test runs
installer.DEFAULT_RETRY_COUNT = 3
def tearDown(self):
for p in list(process._current_processes.values()):
try:
process.kill_process(p.pyngrok_config.ngrok_path)
p.proc.wait()
except OSError:
pass
ngrok._current_tunnels.clear()
if os.path.exists(self.config_dir):
shutil.rmtree(self.config_dir)
@staticmethod
def given_ngrok_installed(pyngrok_config):
ngrok.install_ngrok(pyngrok_config)
@staticmethod
def given_ngrok_not_installed(ngrok_path):
if os.path.exists(ngrok_path):
os.remove(ngrok_path)
@staticmethod
def create_unique_subdomain():
return "pyngrok-{}-{}-{}-{}{}-tcp".format(uuid.uuid4(), platform.system(),
platform.python_implementation(), sys.version_info[0],
sys.version_info[1]).lower()
@staticmethod
def copy_with_updates(to_copy, **kwargs):
copied = copy(to_copy)
for key, value in kwargs.items():
copied.__setattr__(key, value)
return copied
def assertNoZombies(self):
try:
self.assertEqual(0, len(
list(filter(lambda p: p.name() == "ngrok" and p.status() == "zombie", psutil.process_iter()))))
except (AccessDenied, NoSuchProcess):
# Some OSes are flaky on this assertion, but that isn't an indication anything is wrong, so pass
pass
|
tests/testcase.py
|
import logging
import os
import platform
import shutil
import sys
import unittest
import uuid
from copy import copy
import psutil
from psutil import AccessDenied, NoSuchProcess
from pyngrok.conf import PyngrokConfig
from pyngrok import ngrok, installer, conf
from pyngrok import process
__author__ = "<NAME>"
__copyright__ = "Copyright 2021, <NAME>"
__version__ = "5.1.0"
logger = logging.getLogger(__name__)
ngrok_logger = logging.getLogger("{}.ngrok".format(__name__))
class NgrokTestCase(unittest.TestCase):
def setUp(self):
self.config_dir = os.path.normpath(os.path.join(os.path.abspath(os.path.dirname(__file__)), ".ngrok2"))
if not os.path.exists(self.config_dir):
os.makedirs(self.config_dir)
config_path = os.path.join(self.config_dir, "config.yml")
conf.DEFAULT_NGROK_CONFIG_PATH = config_path
self.pyngrok_config = PyngrokConfig(config_path=conf.DEFAULT_NGROK_CONFIG_PATH)
conf.set_default(self.pyngrok_config)
# ngrok's CDN can be flaky, so make sure its flakiness isn't reflect in our CI/CD test runs
installer.DEFAULT_RETRY_COUNT = 3
def tearDown(self):
for p in list(process._current_processes.values()):
try:
process.kill_process(p.pyngrok_config.ngrok_path)
p.proc.wait()
except OSError:
pass
ngrok._current_tunnels.clear()
if os.path.exists(self.config_dir):
shutil.rmtree(self.config_dir)
@staticmethod
def given_ngrok_installed(pyngrok_config):
ngrok.install_ngrok(pyngrok_config)
@staticmethod
def given_ngrok_not_installed(ngrok_path):
if os.path.exists(ngrok_path):
os.remove(ngrok_path)
@staticmethod
def create_unique_subdomain():
return "pyngrok-{}-{}-{}-{}{}-tcp".format(uuid.uuid4(), platform.system(),
platform.python_implementation(), sys.version_info[0],
sys.version_info[1]).lower()
@staticmethod
def copy_with_updates(to_copy, **kwargs):
copied = copy(to_copy)
for key, value in kwargs.items():
copied.__setattr__(key, value)
return copied
def assertNoZombies(self):
try:
self.assertEqual(0, len(
list(filter(lambda p: p.name() == "ngrok" and p.status() == "zombie", psutil.process_iter()))))
except (AccessDenied, NoSuchProcess):
# Some OSes are flaky on this assertion, but that isn't an indication anything is wrong, so pass
pass
| 0.247532 | 0.058025 |
import os
import tempfile
import unittest
import pytorch_lightning as pl
import torch.utils.data
from hydra import compose, initialize_config_dir
from nuplan.planning.script.builders.model_builder import build_nn_model
from nuplan.planning.script.builders.scenario_building_builder import build_scenario_builder
from nuplan.planning.script.builders.training_builder import build_lightning_datamodule
from nuplan.planning.script.builders.utils.utils_config import update_config_for_training
from nuplan.planning.script.builders.worker_pool_builder import build_worker
from omegaconf import DictConfig, OmegaConf
CONFIG_NAME = 'default_training'
class TestDataLoader(unittest.TestCase):
"""
Tests data loading functionality
"""
def setUp(self) -> None:
""" Setup hydra config. """
seed = 10
pl.seed_everything(seed, workers=True)
main_path = os.path.dirname(os.path.realpath(__file__))
self.config_path = os.path.join(main_path, '../config/training/')
# Todo: Investigate pkg in hydra
# Since we are not using the default config in this test, we need to specify the Hydra search path in the
# compose API override, otherwise the Jenkins build fails because bazel cannot find the simulation config file.
common_dir = "file://" + os.path.join(main_path, '..', 'config', 'common')
experiment_dir = "file://" + os.path.join(main_path, '..', 'experiments')
self.search_path = f'hydra.searchpath=[{common_dir}, {experiment_dir}]'
self.group = tempfile.TemporaryDirectory()
self.cache_dir = os.path.join(self.group.name, 'cache_dir')
def tearDown(self) -> None:
""" Remove temporary folder. """
self.group.cleanup()
@staticmethod
def validate_cfg(cfg: DictConfig) -> None:
""" validate hydra config. """
update_config_for_training(cfg)
OmegaConf.set_struct(cfg, False)
cfg.scenario_filter.max_scenarios_per_log = 1
cfg.data_loader.datamodule.train_fraction = 1.0
cfg.data_loader.datamodule.val_fraction = 1.0
cfg.data_loader.datamodule.test_fraction = 1.0
cfg.data_loader.params.batch_size = 2
cfg.data_loader.params.num_workers = 2
cfg.data_loader.params.pin_memory = False
OmegaConf.set_struct(cfg, True)
@staticmethod
def _iterate_dataloader(dataloader: torch.utils.data.DataLoader) -> None:
"""
Iterate NUM_BATCHES of the dataloader
:param dataloader: Data loader.
"""
dataloader_iter = iter(dataloader)
num_batches = 5
iterations = min(len(dataloader), num_batches)
for _ in range(iterations):
next(dataloader_iter)
def _run_dataloader(self, cfg: DictConfig) -> None:
"""
Tests that the training dataloader can be iterated without errors.
:param cfg: Hydra config.
"""
worker = build_worker(cfg)
scenario_builder = build_scenario_builder(cfg)
planning_module = build_nn_model(cfg.model)
datamodule = build_lightning_datamodule(cfg, scenario_builder, worker, planning_module)
datamodule.setup('fit')
datamodule.setup('test')
train_dataloader = datamodule.train_dataloader()
val_dataloader = datamodule.val_dataloader()
test_dataloader = datamodule.test_dataloader()
for dataloader in [train_dataloader, val_dataloader]:
assert len(dataloader) > 0
self._iterate_dataloader(dataloader)
self._iterate_dataloader(test_dataloader)
def test_dataloader(self) -> None:
""" Test dataloader on nuPlan DB. """
log_names = ['2021.05.26.20.05.14_38_1622073985538950.8_1622074969538793.5', # train
'2021.07.21.02.32.00_26_1626834838399916.8_1626835894396760.2', # train
'2021.06.04.19.10.47_47_1622848319071793.5_1622849413071686.2', # val
'2021.05.28.21.56.29_24_1622239057169313.0_1622240664170207.2'] # test
overrides = [
"scenario_builder=nuplan_mini",
"splitter=nuplan",
"scenario_builder.nuplan.scenario_filter.log_labels=null",
f"scenario_builder.nuplan.scenario_filter.log_names={log_names}",
f"group={self.group.name}",
f"cache_dir={self.cache_dir}",
]
with initialize_config_dir(config_dir=self.config_path):
cfg = compose(config_name=CONFIG_NAME,
overrides=[self.search_path, *overrides, '+training=training_raster_model'])
self.validate_cfg(cfg)
self._run_dataloader(cfg)
if __name__ == '__main__':
unittest.main()
|
nuplan/planning/script/test/test_config_dataloader.py
|
import os
import tempfile
import unittest
import pytorch_lightning as pl
import torch.utils.data
from hydra import compose, initialize_config_dir
from nuplan.planning.script.builders.model_builder import build_nn_model
from nuplan.planning.script.builders.scenario_building_builder import build_scenario_builder
from nuplan.planning.script.builders.training_builder import build_lightning_datamodule
from nuplan.planning.script.builders.utils.utils_config import update_config_for_training
from nuplan.planning.script.builders.worker_pool_builder import build_worker
from omegaconf import DictConfig, OmegaConf
CONFIG_NAME = 'default_training'
class TestDataLoader(unittest.TestCase):
"""
Tests data loading functionality
"""
def setUp(self) -> None:
""" Setup hydra config. """
seed = 10
pl.seed_everything(seed, workers=True)
main_path = os.path.dirname(os.path.realpath(__file__))
self.config_path = os.path.join(main_path, '../config/training/')
# Todo: Investigate pkg in hydra
# Since we are not using the default config in this test, we need to specify the Hydra search path in the
# compose API override, otherwise the Jenkins build fails because bazel cannot find the simulation config file.
common_dir = "file://" + os.path.join(main_path, '..', 'config', 'common')
experiment_dir = "file://" + os.path.join(main_path, '..', 'experiments')
self.search_path = f'hydra.searchpath=[{common_dir}, {experiment_dir}]'
self.group = tempfile.TemporaryDirectory()
self.cache_dir = os.path.join(self.group.name, 'cache_dir')
def tearDown(self) -> None:
""" Remove temporary folder. """
self.group.cleanup()
@staticmethod
def validate_cfg(cfg: DictConfig) -> None:
""" validate hydra config. """
update_config_for_training(cfg)
OmegaConf.set_struct(cfg, False)
cfg.scenario_filter.max_scenarios_per_log = 1
cfg.data_loader.datamodule.train_fraction = 1.0
cfg.data_loader.datamodule.val_fraction = 1.0
cfg.data_loader.datamodule.test_fraction = 1.0
cfg.data_loader.params.batch_size = 2
cfg.data_loader.params.num_workers = 2
cfg.data_loader.params.pin_memory = False
OmegaConf.set_struct(cfg, True)
@staticmethod
def _iterate_dataloader(dataloader: torch.utils.data.DataLoader) -> None:
"""
Iterate NUM_BATCHES of the dataloader
:param dataloader: Data loader.
"""
dataloader_iter = iter(dataloader)
num_batches = 5
iterations = min(len(dataloader), num_batches)
for _ in range(iterations):
next(dataloader_iter)
def _run_dataloader(self, cfg: DictConfig) -> None:
"""
Tests that the training dataloader can be iterated without errors.
:param cfg: Hydra config.
"""
worker = build_worker(cfg)
scenario_builder = build_scenario_builder(cfg)
planning_module = build_nn_model(cfg.model)
datamodule = build_lightning_datamodule(cfg, scenario_builder, worker, planning_module)
datamodule.setup('fit')
datamodule.setup('test')
train_dataloader = datamodule.train_dataloader()
val_dataloader = datamodule.val_dataloader()
test_dataloader = datamodule.test_dataloader()
for dataloader in [train_dataloader, val_dataloader]:
assert len(dataloader) > 0
self._iterate_dataloader(dataloader)
self._iterate_dataloader(test_dataloader)
def test_dataloader(self) -> None:
""" Test dataloader on nuPlan DB. """
log_names = ['2021.05.26.20.05.14_38_1622073985538950.8_1622074969538793.5', # train
'2021.07.21.02.32.00_26_1626834838399916.8_1626835894396760.2', # train
'2021.06.04.19.10.47_47_1622848319071793.5_1622849413071686.2', # val
'2021.05.28.21.56.29_24_1622239057169313.0_1622240664170207.2'] # test
overrides = [
"scenario_builder=nuplan_mini",
"splitter=nuplan",
"scenario_builder.nuplan.scenario_filter.log_labels=null",
f"scenario_builder.nuplan.scenario_filter.log_names={log_names}",
f"group={self.group.name}",
f"cache_dir={self.cache_dir}",
]
with initialize_config_dir(config_dir=self.config_path):
cfg = compose(config_name=CONFIG_NAME,
overrides=[self.search_path, *overrides, '+training=training_raster_model'])
self.validate_cfg(cfg)
self._run_dataloader(cfg)
if __name__ == '__main__':
unittest.main()
| 0.471467 | 0.301748 |
import numpy as np
from qutip import (
rand_ket, rand_dm, rand_herm, rand_unitary, rand_ket_haar, rand_dm_hs,
rand_super, rand_unitary_haar, rand_dm_ginibre, rand_super_bcsz, qeye,
rand_stochastic,
)
import pytest
@pytest.mark.repeat(5)
@pytest.mark.parametrize('func', [rand_unitary, rand_unitary_haar])
def test_rand_unitary(func):
"""
Random Qobjs: Tests that unitaries are actually unitary.
"""
random_qobj = func(5)
I = qeye(5)
assert random_qobj * random_qobj.dag() == I
@pytest.mark.repeat(5)
@pytest.mark.parametrize('density', [0.2, 0.8], ids=["sparse", "dense"])
@pytest.mark.parametrize('pos_def', [True, False])
def test_rand_herm(density, pos_def):
"""
Random Qobjs: Hermitian matrix
"""
random_qobj = rand_herm(5, density=density, pos_def=pos_def)
if pos_def:
assert all(random_qobj.eigenenergies() > -1e14)
assert random_qobj.isherm
@pytest.mark.repeat(5)
def test_rand_herm_Eigs():
"""
Random Qobjs: Hermitian matrix - Eigs given
"""
eigs = np.random.random(5)
eigs /= np.sum(eigs)
eigs.sort()
random_qobj = rand_herm(eigs)
np.testing.assert_allclose(random_qobj.eigenenergies(), eigs)
# verify hermitian
assert random_qobj.isherm
@pytest.mark.repeat(5)
@pytest.mark.parametrize('func', [rand_dm, rand_dm_hs])
def test_rand_dm(func):
"""
Random Qobjs: Density matrix
"""
random_qobj = func(5)
assert abs(random_qobj.tr() - 1.0) < 1e-14
# verify all eigvals are >=0
assert all(random_qobj.eigenenergies() >= -1e-14)
# verify hermitian
assert random_qobj.isherm
@pytest.mark.repeat(5)
def test_rand_dm_Eigs():
"""
Random Qobjs: Density matrix - Eigs given
"""
eigs = np.random.random(5)
eigs /= np.sum(eigs)
eigs.sort()
random_qobj = rand_dm(eigs)
assert abs(random_qobj.tr() - 1.0) < 1e-14
np.testing.assert_allclose(random_qobj.eigenenergies(), eigs)
# verify hermitian
assert random_qobj.isherm
@pytest.mark.repeat(5)
def test_rand_dm_ginibre_rank():
"""
Random Qobjs: Ginibre-random density ops have correct rank.
"""
random_qobj = rand_dm_ginibre(5, rank=3)
rank = sum([abs(E) >= 1e-10 for E in random_qobj.eigenenergies()])
assert rank == 3
@pytest.mark.repeat(5)
@pytest.mark.parametrize('kind', ["left", "right"])
def test_rand_stochastic(kind):
"""
Random Qobjs: Test random stochastic
"""
random_qobj = rand_stochastic(5, kind=kind)
axis = {"left":0, "right":1}[kind]
np.testing.assert_allclose(np.sum(random_qobj.full(), axis=axis), 1,
atol=1e-14)
@pytest.mark.repeat(5)
@pytest.mark.parametrize('func', [rand_ket, rand_ket_haar])
def test_rand_ket(func):
"""
Random Qobjs: Test random ket type and norm.
"""
random_qobj = func(5)
assert random_qobj.type == 'ket'
assert abs(random_qobj.norm() - 1) < 1e-14
@pytest.mark.repeat(5)
def test_rand_super():
"""
Random Qobjs: Super operator.
"""
random_qobj = rand_super(5)
assert random_qobj.issuper
@pytest.mark.repeat(5)
def test_rand_super_bcsz_cptp():
"""
Random Qobjs: Tests that BCSZ-random superoperators are CPTP.
"""
random_qobj = rand_super_bcsz(5)
assert random_qobj.issuper
assert random_qobj.iscptp
@pytest.mark.parametrize('func', [
rand_unitary, rand_unitary_haar, rand_herm,
rand_dm, rand_dm_hs, rand_dm_ginibre,
rand_ket, rand_ket_haar,
rand_super, rand_super_bcsz
])
def test_random_seeds(func):
"""
Random Qobjs: Random number generator seed
"""
seed = 12345
U0 = func(5, seed=seed)
U1 = func(5, seed=None)
U2 = func(5, seed=seed)
assert U0 != U1
assert U0 == U2
@pytest.mark.parametrize('func', [rand_ket, rand_ket_haar])
@pytest.mark.parametrize(('args', 'kwargs', 'dims'), [
pytest.param((6,), {}, [[6], [1]], id="N"),
pytest.param((), {'dims': [[2, 3], [1, 1]]}, [[2, 3], [1, 1]], id="dims"),
pytest.param((6,), {'dims': [[2, 3], [1, 1]]}, [[2, 3], [1, 1]],
id="both"),
])
def test_rand_vector_dims(func, args, kwargs, dims):
shape = np.prod(dims[0]), np.prod(dims[1])
output = func(*args, **kwargs)
assert output.shape == shape
assert output.dims == dims
@pytest.mark.parametrize('func', [rand_ket, rand_ket_haar])
def test_rand_ket_raises_if_no_args(func):
with pytest.raises(ValueError):
func()
@pytest.mark.parametrize('func', [
rand_unitary, rand_herm, rand_dm, rand_unitary_haar, rand_dm_ginibre,
rand_dm_hs, rand_stochastic,
])
@pytest.mark.parametrize(('args', 'kwargs', 'dims'), [
pytest.param((6,), {}, [[6], [6]], id="N"),
pytest.param((6,), {'dims': [[2, 3], [2, 3]]}, [[2, 3], [2, 3]],
id="both"),
])
def test_rand_oper_dims(func, args, kwargs, dims):
shape = np.prod(dims[0]), np.prod(dims[1])
output = func(*args, **kwargs)
assert output.shape == shape
assert output.dims == dims
_super_dims = [[[2, 3], [2, 3]], [[2, 3], [2, 3]]]
@pytest.mark.parametrize('func', [rand_super, rand_super_bcsz])
@pytest.mark.parametrize(('args', 'kwargs', 'dims'), [
pytest.param((6,), {}, [[[6]]*2]*2, id="N"),
pytest.param((6,), {'dims': _super_dims}, _super_dims,
id="both"),
])
def test_rand_super_dims(func, args, kwargs, dims):
shape = np.prod(dims[0]), np.prod(dims[1])
output = func(*args, **kwargs)
assert output.shape == shape
assert output.dims == dims
|
qutip/tests/test_random.py
|
import numpy as np
from qutip import (
rand_ket, rand_dm, rand_herm, rand_unitary, rand_ket_haar, rand_dm_hs,
rand_super, rand_unitary_haar, rand_dm_ginibre, rand_super_bcsz, qeye,
rand_stochastic,
)
import pytest
@pytest.mark.repeat(5)
@pytest.mark.parametrize('func', [rand_unitary, rand_unitary_haar])
def test_rand_unitary(func):
"""
Random Qobjs: Tests that unitaries are actually unitary.
"""
random_qobj = func(5)
I = qeye(5)
assert random_qobj * random_qobj.dag() == I
@pytest.mark.repeat(5)
@pytest.mark.parametrize('density', [0.2, 0.8], ids=["sparse", "dense"])
@pytest.mark.parametrize('pos_def', [True, False])
def test_rand_herm(density, pos_def):
"""
Random Qobjs: Hermitian matrix
"""
random_qobj = rand_herm(5, density=density, pos_def=pos_def)
if pos_def:
assert all(random_qobj.eigenenergies() > -1e14)
assert random_qobj.isherm
@pytest.mark.repeat(5)
def test_rand_herm_Eigs():
"""
Random Qobjs: Hermitian matrix - Eigs given
"""
eigs = np.random.random(5)
eigs /= np.sum(eigs)
eigs.sort()
random_qobj = rand_herm(eigs)
np.testing.assert_allclose(random_qobj.eigenenergies(), eigs)
# verify hermitian
assert random_qobj.isherm
@pytest.mark.repeat(5)
@pytest.mark.parametrize('func', [rand_dm, rand_dm_hs])
def test_rand_dm(func):
"""
Random Qobjs: Density matrix
"""
random_qobj = func(5)
assert abs(random_qobj.tr() - 1.0) < 1e-14
# verify all eigvals are >=0
assert all(random_qobj.eigenenergies() >= -1e-14)
# verify hermitian
assert random_qobj.isherm
@pytest.mark.repeat(5)
def test_rand_dm_Eigs():
"""
Random Qobjs: Density matrix - Eigs given
"""
eigs = np.random.random(5)
eigs /= np.sum(eigs)
eigs.sort()
random_qobj = rand_dm(eigs)
assert abs(random_qobj.tr() - 1.0) < 1e-14
np.testing.assert_allclose(random_qobj.eigenenergies(), eigs)
# verify hermitian
assert random_qobj.isherm
@pytest.mark.repeat(5)
def test_rand_dm_ginibre_rank():
"""
Random Qobjs: Ginibre-random density ops have correct rank.
"""
random_qobj = rand_dm_ginibre(5, rank=3)
rank = sum([abs(E) >= 1e-10 for E in random_qobj.eigenenergies()])
assert rank == 3
@pytest.mark.repeat(5)
@pytest.mark.parametrize('kind', ["left", "right"])
def test_rand_stochastic(kind):
"""
Random Qobjs: Test random stochastic
"""
random_qobj = rand_stochastic(5, kind=kind)
axis = {"left":0, "right":1}[kind]
np.testing.assert_allclose(np.sum(random_qobj.full(), axis=axis), 1,
atol=1e-14)
@pytest.mark.repeat(5)
@pytest.mark.parametrize('func', [rand_ket, rand_ket_haar])
def test_rand_ket(func):
"""
Random Qobjs: Test random ket type and norm.
"""
random_qobj = func(5)
assert random_qobj.type == 'ket'
assert abs(random_qobj.norm() - 1) < 1e-14
@pytest.mark.repeat(5)
def test_rand_super():
"""
Random Qobjs: Super operator.
"""
random_qobj = rand_super(5)
assert random_qobj.issuper
@pytest.mark.repeat(5)
def test_rand_super_bcsz_cptp():
"""
Random Qobjs: Tests that BCSZ-random superoperators are CPTP.
"""
random_qobj = rand_super_bcsz(5)
assert random_qobj.issuper
assert random_qobj.iscptp
@pytest.mark.parametrize('func', [
rand_unitary, rand_unitary_haar, rand_herm,
rand_dm, rand_dm_hs, rand_dm_ginibre,
rand_ket, rand_ket_haar,
rand_super, rand_super_bcsz
])
def test_random_seeds(func):
"""
Random Qobjs: Random number generator seed
"""
seed = 12345
U0 = func(5, seed=seed)
U1 = func(5, seed=None)
U2 = func(5, seed=seed)
assert U0 != U1
assert U0 == U2
@pytest.mark.parametrize('func', [rand_ket, rand_ket_haar])
@pytest.mark.parametrize(('args', 'kwargs', 'dims'), [
pytest.param((6,), {}, [[6], [1]], id="N"),
pytest.param((), {'dims': [[2, 3], [1, 1]]}, [[2, 3], [1, 1]], id="dims"),
pytest.param((6,), {'dims': [[2, 3], [1, 1]]}, [[2, 3], [1, 1]],
id="both"),
])
def test_rand_vector_dims(func, args, kwargs, dims):
shape = np.prod(dims[0]), np.prod(dims[1])
output = func(*args, **kwargs)
assert output.shape == shape
assert output.dims == dims
@pytest.mark.parametrize('func', [rand_ket, rand_ket_haar])
def test_rand_ket_raises_if_no_args(func):
with pytest.raises(ValueError):
func()
@pytest.mark.parametrize('func', [
rand_unitary, rand_herm, rand_dm, rand_unitary_haar, rand_dm_ginibre,
rand_dm_hs, rand_stochastic,
])
@pytest.mark.parametrize(('args', 'kwargs', 'dims'), [
pytest.param((6,), {}, [[6], [6]], id="N"),
pytest.param((6,), {'dims': [[2, 3], [2, 3]]}, [[2, 3], [2, 3]],
id="both"),
])
def test_rand_oper_dims(func, args, kwargs, dims):
shape = np.prod(dims[0]), np.prod(dims[1])
output = func(*args, **kwargs)
assert output.shape == shape
assert output.dims == dims
_super_dims = [[[2, 3], [2, 3]], [[2, 3], [2, 3]]]
@pytest.mark.parametrize('func', [rand_super, rand_super_bcsz])
@pytest.mark.parametrize(('args', 'kwargs', 'dims'), [
pytest.param((6,), {}, [[[6]]*2]*2, id="N"),
pytest.param((6,), {'dims': _super_dims}, _super_dims,
id="both"),
])
def test_rand_super_dims(func, args, kwargs, dims):
shape = np.prod(dims[0]), np.prod(dims[1])
output = func(*args, **kwargs)
assert output.shape == shape
assert output.dims == dims
| 0.627038 | 0.617657 |
import hmac
import hashlib
import sys
from ..errors import SignatureVerificationError
class Utility(object):
def __init__(self, client=None):
self.client = client
def verify_payment_signature(self, parameters):
order_id = str(parameters['razorpay_order_id'])
payment_id = str(parameters['razorpay_payment_id'])
razorpay_signature = str(parameters['razorpay_signature'])
msg = "{}|{}".format(order_id, payment_id)
secret = str(self.client.auth[1])
self.verify_signature(msg, razorpay_signature, secret)
def verify_webhook_signature(self, body, signature, secret):
self.verify_signature(body, signature, secret)
def verify_signature(self, body, signature, key):
if sys.version_info[0] == 3: # pragma: no cover
key = bytes(key, 'utf-8')
body = bytes(body, 'utf-8')
dig = hmac.new(key=key,
msg=body,
digestmod=hashlib.sha256)
generated_signature = dig.hexdigest()
if sys.version_info[0:3] < (2, 7, 7):
result = self.compare_string(generated_signature, signature)
else:
result = hmac.compare_digest(generated_signature, signature)
if not result:
raise SignatureVerificationError(
'Razorpay Signature Verification Failed')
# Taken from Django Source Code
# Used in python version < 2.7.7
# As hmac.compare_digest is not present in prev versions
def compare_string(self, expected_str, actual_str):
"""
Returns True if the two strings are equal, False otherwise
The time taken is independent of the number of characters that match
For the sake of simplicity, this function executes in constant time only
when the two strings have the same length. It short-circuits when they
have different lengths
"""
if len(expected_str) != len(actual_str):
return False
result = 0
for x, y in zip(expected_str, actual_str):
result |= ord(x) ^ ord(y)
return result == 0
|
saleor/lib/python3.7/site-packages/razorpay/utility/utility.py
|
import hmac
import hashlib
import sys
from ..errors import SignatureVerificationError
class Utility(object):
def __init__(self, client=None):
self.client = client
def verify_payment_signature(self, parameters):
order_id = str(parameters['razorpay_order_id'])
payment_id = str(parameters['razorpay_payment_id'])
razorpay_signature = str(parameters['razorpay_signature'])
msg = "{}|{}".format(order_id, payment_id)
secret = str(self.client.auth[1])
self.verify_signature(msg, razorpay_signature, secret)
def verify_webhook_signature(self, body, signature, secret):
self.verify_signature(body, signature, secret)
def verify_signature(self, body, signature, key):
if sys.version_info[0] == 3: # pragma: no cover
key = bytes(key, 'utf-8')
body = bytes(body, 'utf-8')
dig = hmac.new(key=key,
msg=body,
digestmod=hashlib.sha256)
generated_signature = dig.hexdigest()
if sys.version_info[0:3] < (2, 7, 7):
result = self.compare_string(generated_signature, signature)
else:
result = hmac.compare_digest(generated_signature, signature)
if not result:
raise SignatureVerificationError(
'Razorpay Signature Verification Failed')
# Taken from Django Source Code
# Used in python version < 2.7.7
# As hmac.compare_digest is not present in prev versions
def compare_string(self, expected_str, actual_str):
"""
Returns True if the two strings are equal, False otherwise
The time taken is independent of the number of characters that match
For the sake of simplicity, this function executes in constant time only
when the two strings have the same length. It short-circuits when they
have different lengths
"""
if len(expected_str) != len(actual_str):
return False
result = 0
for x, y in zip(expected_str, actual_str):
result |= ord(x) ^ ord(y)
return result == 0
| 0.409575 | 0.176636 |
import falcon
from falcon.media.validators import jsonschema
from management_api.utils.logger import get_logger
from management_api.endpoints.endpoint_utils import create_endpoint, delete_endpoint, \
scale_endpoint, update_endpoint, view_endpoint, list_endpoints
from management_api.schemas.endpoints import endpoint_post_schema, endpoint_delete_schema, \
endpoint_patch_schema
logger = get_logger(__name__)
class Endpoints(object):
def on_get(self, req, resp, tenant_name):
namespace = tenant_name
endpoints = list_endpoints(namespace, id_token=req.get_header('Authorization'))
resp.status = falcon.HTTP_200
resp.body = endpoints
@jsonschema.validate(endpoint_post_schema)
def on_post(self, req, resp, tenant_name):
namespace = tenant_name
body = req.media
endpoint_url = create_endpoint(parameters=body, namespace=namespace,
id_token=req.get_header('Authorization'))
resp.status = falcon.HTTP_200
resp.body = 'Endpoint created\n {}'.format(endpoint_url)
@jsonschema.validate(endpoint_delete_schema)
def on_delete(self, req, resp, tenant_name):
namespace = tenant_name
body = req.media
endpoint_url = delete_endpoint(parameters=body, namespace=namespace,
id_token=req.get_header('Authorization'))
resp.status = falcon.HTTP_200
resp.body = 'Endpoint {} deleted\n'.format(endpoint_url)
class EndpointScale(object):
@jsonschema.validate(endpoint_patch_schema)
def on_patch(self, req, resp, tenant_name, endpoint_name):
namespace = tenant_name
body = req.media
endpoint_url = scale_endpoint(parameters=body, namespace=namespace,
endpoint_name=endpoint_name,
id_token=req.get_header('Authorization'))
message = 'Endpoint {} patched successfully. New values: {}\n'.format(endpoint_url, body)
resp.status = falcon.HTTP_200
resp.body = message
logger.info(message)
class Endpoint(object):
def on_get(self, req, resp, tenant_name, endpoint_name):
namespace = tenant_name
endpoint = view_endpoint(endpoint_name=endpoint_name, namespace=namespace,
id_token=req.get_header('Authorization'))
resp.status = falcon.HTTP_200
resp.body = endpoint
@jsonschema.validate(endpoint_patch_schema)
def on_patch(self, req, resp, tenant_name, endpoint_name):
namespace = tenant_name
body = req.media
endpoint_url = update_endpoint(body, namespace, endpoint_name,
id_token=req.get_header('Authorization'))
message = 'Endpoint {} patched successfully. New values: {}\n'.format(endpoint_url, body)
resp.status = falcon.HTTP_200
resp.body = message
logger.info(message)
|
management/management_api/endpoints/endpoints.py
|
import falcon
from falcon.media.validators import jsonschema
from management_api.utils.logger import get_logger
from management_api.endpoints.endpoint_utils import create_endpoint, delete_endpoint, \
scale_endpoint, update_endpoint, view_endpoint, list_endpoints
from management_api.schemas.endpoints import endpoint_post_schema, endpoint_delete_schema, \
endpoint_patch_schema
logger = get_logger(__name__)
class Endpoints(object):
def on_get(self, req, resp, tenant_name):
namespace = tenant_name
endpoints = list_endpoints(namespace, id_token=req.get_header('Authorization'))
resp.status = falcon.HTTP_200
resp.body = endpoints
@jsonschema.validate(endpoint_post_schema)
def on_post(self, req, resp, tenant_name):
namespace = tenant_name
body = req.media
endpoint_url = create_endpoint(parameters=body, namespace=namespace,
id_token=req.get_header('Authorization'))
resp.status = falcon.HTTP_200
resp.body = 'Endpoint created\n {}'.format(endpoint_url)
@jsonschema.validate(endpoint_delete_schema)
def on_delete(self, req, resp, tenant_name):
namespace = tenant_name
body = req.media
endpoint_url = delete_endpoint(parameters=body, namespace=namespace,
id_token=req.get_header('Authorization'))
resp.status = falcon.HTTP_200
resp.body = 'Endpoint {} deleted\n'.format(endpoint_url)
class EndpointScale(object):
@jsonschema.validate(endpoint_patch_schema)
def on_patch(self, req, resp, tenant_name, endpoint_name):
namespace = tenant_name
body = req.media
endpoint_url = scale_endpoint(parameters=body, namespace=namespace,
endpoint_name=endpoint_name,
id_token=req.get_header('Authorization'))
message = 'Endpoint {} patched successfully. New values: {}\n'.format(endpoint_url, body)
resp.status = falcon.HTTP_200
resp.body = message
logger.info(message)
class Endpoint(object):
def on_get(self, req, resp, tenant_name, endpoint_name):
namespace = tenant_name
endpoint = view_endpoint(endpoint_name=endpoint_name, namespace=namespace,
id_token=req.get_header('Authorization'))
resp.status = falcon.HTTP_200
resp.body = endpoint
@jsonschema.validate(endpoint_patch_schema)
def on_patch(self, req, resp, tenant_name, endpoint_name):
namespace = tenant_name
body = req.media
endpoint_url = update_endpoint(body, namespace, endpoint_name,
id_token=req.get_header('Authorization'))
message = 'Endpoint {} patched successfully. New values: {}\n'.format(endpoint_url, body)
resp.status = falcon.HTTP_200
resp.body = message
logger.info(message)
| 0.358802 | 0.048339 |
import streamlit as st
from PIL import Image
def app():
st.title("Vegetation Analysis")
st.markdown(
"""
The goal of this task is to discover the use of different vegetation indices to identify the level of
desertification in northern Iraq. Indices of interest include NDVI, NDWI, NDBI, and MSAVI. In specfic, we conducted the analysis using NDVI
index, for the years 2016, 2018 and 2021. A summary of what has been done for this task is shown below:
"""
)
# Summary
st.subheader("Summary")
st.markdown(
"""
1. **Dataset:** Sentinel2 images using Google Earth Engine
2. **Region of Interest:** Mosul - Iraq
3. **Periods of study:** 2016, 2018, 2021
4. **Bands:** 5 Bands downloaded: R, G, B, NIR, SWIR
5. **Processing method:** Used rasterio to process the images
"""
)
# NDVI analysis
st.subheader("1. NDVI Analysis")
# NDVI Definitoin
st.info("""
The normalized difference vegetation index (NDVI) is a simple graphical indicator that can be used to analyze remote sensing measurements, often from a space platform,
assessing whether or not the target being observed contains live green vegetation
"""
)
st.markdown(
"""
The following shows NDVI values of Mosul for three different periods:
**2016**, **2018** and **2021**, calculated using data from Sentinel2.
"""
)
# NDVI_classes_2016
st.markdown("""**NDVI: 2016**""")
image1 = Image.open('NDVI_classes_2016.png')
st.image(image1, use_column_width=True)
st.markdown(""" ----- """)
# NDVI_classes_2018
st.markdown("""**NDVI: 2018**""")
image2 = Image.open('NDVI_classes_2018.png')
st.image(image2, use_column_width=True)
st.markdown(""" ----- """)
# NDVI_classes_2021
st.markdown("""**NDVI: 2021**""")
image3 = Image.open('NDVI_classes_2021.png')
st.image(image3, use_column_width=True)
# Pie chart Analysis
st.subheader("2. Pie chart Analysis")
st.markdown(
"""
The following shows pie chart analysis of Mosul over three periods: 2016, 2018 and 2021.
The results clearly show that the arid area is reducing and the green area is increasing, which seems to be a good indication.
"""
)
st.markdown("""**Pie chart analysis of Mosul: 2016**""")
image2 = Image.open('NDVI_2016.png')
st.image(image2, use_column_width=True)
st.markdown(""" ----- """)
st.markdown("""**Pie chart analysis of Mosul: 2018**""")
image3 = Image.open('NDVI_2018.png')
st.image(image3, use_column_width=True)
st.markdown(""" ----- """)
st.markdown("""**Pie chart analysis of Mosul: 2021**""")
image3 = Image.open('NDVI_2021.png')
st.image(image3, use_column_width=True)
|
apps/vegetation_analysis.py
|
import streamlit as st
from PIL import Image
def app():
st.title("Vegetation Analysis")
st.markdown(
"""
The goal of this task is to discover the use of different vegetation indices to identify the level of
desertification in northern Iraq. Indices of interest include NDVI, NDWI, NDBI, and MSAVI. In specfic, we conducted the analysis using NDVI
index, for the years 2016, 2018 and 2021. A summary of what has been done for this task is shown below:
"""
)
# Summary
st.subheader("Summary")
st.markdown(
"""
1. **Dataset:** Sentinel2 images using Google Earth Engine
2. **Region of Interest:** Mosul - Iraq
3. **Periods of study:** 2016, 2018, 2021
4. **Bands:** 5 Bands downloaded: R, G, B, NIR, SWIR
5. **Processing method:** Used rasterio to process the images
"""
)
# NDVI analysis
st.subheader("1. NDVI Analysis")
# NDVI Definitoin
st.info("""
The normalized difference vegetation index (NDVI) is a simple graphical indicator that can be used to analyze remote sensing measurements, often from a space platform,
assessing whether or not the target being observed contains live green vegetation
"""
)
st.markdown(
"""
The following shows NDVI values of Mosul for three different periods:
**2016**, **2018** and **2021**, calculated using data from Sentinel2.
"""
)
# NDVI_classes_2016
st.markdown("""**NDVI: 2016**""")
image1 = Image.open('NDVI_classes_2016.png')
st.image(image1, use_column_width=True)
st.markdown(""" ----- """)
# NDVI_classes_2018
st.markdown("""**NDVI: 2018**""")
image2 = Image.open('NDVI_classes_2018.png')
st.image(image2, use_column_width=True)
st.markdown(""" ----- """)
# NDVI_classes_2021
st.markdown("""**NDVI: 2021**""")
image3 = Image.open('NDVI_classes_2021.png')
st.image(image3, use_column_width=True)
# Pie chart Analysis
st.subheader("2. Pie chart Analysis")
st.markdown(
"""
The following shows pie chart analysis of Mosul over three periods: 2016, 2018 and 2021.
The results clearly show that the arid area is reducing and the green area is increasing, which seems to be a good indication.
"""
)
st.markdown("""**Pie chart analysis of Mosul: 2016**""")
image2 = Image.open('NDVI_2016.png')
st.image(image2, use_column_width=True)
st.markdown(""" ----- """)
st.markdown("""**Pie chart analysis of Mosul: 2018**""")
image3 = Image.open('NDVI_2018.png')
st.image(image3, use_column_width=True)
st.markdown(""" ----- """)
st.markdown("""**Pie chart analysis of Mosul: 2021**""")
image3 = Image.open('NDVI_2021.png')
st.image(image3, use_column_width=True)
| 0.795857 | 0.589894 |
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='inference.proto',
package='',
syntax='proto3',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x0finference.proto\"\'\n\x0b\x44\x61taRequest\x12\x18\n\tdata_list\x18\x01 \x03(\x0b\x32\x05.Data\",\n\x04\x44\x61ta\x12\x11\n\tdata_file\x18\x01 \x01(\t\x12\x11\n\tdata_name\x18\x02 \x01(\t\"#\n\x0c\x44\x61taResponse\x12\x13\n\x0bjson_result\x18\x01 \x01(\t2>\n\x10InferenceService\x12*\n\tinference\x12\x0c.DataRequest\x1a\r.DataResponse\"\x00\x62\x06proto3'
)
_DATAREQUEST = _descriptor.Descriptor(
name='DataRequest',
full_name='DataRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='data_list', full_name='DataRequest.data_list', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19,
serialized_end=58,
)
_DATA = _descriptor.Descriptor(
name='Data',
full_name='Data',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='data_file', full_name='Data.data_file', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='data_name', full_name='Data.data_name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=60,
serialized_end=104,
)
_DATARESPONSE = _descriptor.Descriptor(
name='DataResponse',
full_name='DataResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='json_result', full_name='DataResponse.json_result', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=106,
serialized_end=141,
)
_DATAREQUEST.fields_by_name['data_list'].message_type = _DATA
DESCRIPTOR.message_types_by_name['DataRequest'] = _DATAREQUEST
DESCRIPTOR.message_types_by_name['Data'] = _DATA
DESCRIPTOR.message_types_by_name['DataResponse'] = _DATARESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
DataRequest = _reflection.GeneratedProtocolMessageType('DataRequest', (_message.Message,), {
'DESCRIPTOR' : _DATAREQUEST,
'__module__' : 'inference_pb2'
# @@protoc_insertion_point(class_scope:DataRequest)
})
_sym_db.RegisterMessage(DataRequest)
Data = _reflection.GeneratedProtocolMessageType('Data', (_message.Message,), {
'DESCRIPTOR' : _DATA,
'__module__' : 'inference_pb2'
# @@protoc_insertion_point(class_scope:Data)
})
_sym_db.RegisterMessage(Data)
DataResponse = _reflection.GeneratedProtocolMessageType('DataResponse', (_message.Message,), {
'DESCRIPTOR' : _DATARESPONSE,
'__module__' : 'inference_pb2'
# @@protoc_insertion_point(class_scope:DataResponse)
})
_sym_db.RegisterMessage(DataResponse)
_INFERENCESERVICE = _descriptor.ServiceDescriptor(
name='InferenceService',
full_name='InferenceService',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=143,
serialized_end=205,
methods=[
_descriptor.MethodDescriptor(
name='inference',
full_name='InferenceService.inference',
index=0,
containing_service=None,
input_type=_DATAREQUEST,
output_type=_DATARESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_INFERENCESERVICE)
DESCRIPTOR.services_by_name['InferenceService'] = _INFERENCESERVICE
# @@protoc_insertion_point(module_scope)
|
tianshu_serving/proto/inference_pb2.py
|
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='inference.proto',
package='',
syntax='proto3',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x0finference.proto\"\'\n\x0b\x44\x61taRequest\x12\x18\n\tdata_list\x18\x01 \x03(\x0b\x32\x05.Data\",\n\x04\x44\x61ta\x12\x11\n\tdata_file\x18\x01 \x01(\t\x12\x11\n\tdata_name\x18\x02 \x01(\t\"#\n\x0c\x44\x61taResponse\x12\x13\n\x0bjson_result\x18\x01 \x01(\t2>\n\x10InferenceService\x12*\n\tinference\x12\x0c.DataRequest\x1a\r.DataResponse\"\x00\x62\x06proto3'
)
_DATAREQUEST = _descriptor.Descriptor(
name='DataRequest',
full_name='DataRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='data_list', full_name='DataRequest.data_list', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19,
serialized_end=58,
)
_DATA = _descriptor.Descriptor(
name='Data',
full_name='Data',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='data_file', full_name='Data.data_file', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='data_name', full_name='Data.data_name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=60,
serialized_end=104,
)
_DATARESPONSE = _descriptor.Descriptor(
name='DataResponse',
full_name='DataResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='json_result', full_name='DataResponse.json_result', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=106,
serialized_end=141,
)
_DATAREQUEST.fields_by_name['data_list'].message_type = _DATA
DESCRIPTOR.message_types_by_name['DataRequest'] = _DATAREQUEST
DESCRIPTOR.message_types_by_name['Data'] = _DATA
DESCRIPTOR.message_types_by_name['DataResponse'] = _DATARESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
DataRequest = _reflection.GeneratedProtocolMessageType('DataRequest', (_message.Message,), {
'DESCRIPTOR' : _DATAREQUEST,
'__module__' : 'inference_pb2'
# @@protoc_insertion_point(class_scope:DataRequest)
})
_sym_db.RegisterMessage(DataRequest)
Data = _reflection.GeneratedProtocolMessageType('Data', (_message.Message,), {
'DESCRIPTOR' : _DATA,
'__module__' : 'inference_pb2'
# @@protoc_insertion_point(class_scope:Data)
})
_sym_db.RegisterMessage(Data)
DataResponse = _reflection.GeneratedProtocolMessageType('DataResponse', (_message.Message,), {
'DESCRIPTOR' : _DATARESPONSE,
'__module__' : 'inference_pb2'
# @@protoc_insertion_point(class_scope:DataResponse)
})
_sym_db.RegisterMessage(DataResponse)
_INFERENCESERVICE = _descriptor.ServiceDescriptor(
name='InferenceService',
full_name='InferenceService',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=143,
serialized_end=205,
methods=[
_descriptor.MethodDescriptor(
name='inference',
full_name='InferenceService.inference',
index=0,
containing_service=None,
input_type=_DATAREQUEST,
output_type=_DATARESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_INFERENCESERVICE)
DESCRIPTOR.services_by_name['InferenceService'] = _INFERENCESERVICE
# @@protoc_insertion_point(module_scope)
| 0.224565 | 0.075585 |
import re
from comply.rules.rule import *
from comply.rules.patterns import FUNC_BODY_PATTERN
from comply.util.scope import depth
class ScopeTooDeep(Rule):
""" Don't write deeply nested code.
A deeply nested scope is often an indication of too high complexity and can be
difficult to read.
"""
def __init__(self):
Rule.__init__(self, name='scope-too-deep',
description='Scope is too deep ({depth} > {max} levels)',
suggestion='Avoid nesting code too deeply. Consider refactoring.')
MAX = 3
pattern = re.compile(FUNC_BODY_PATTERN)
def collect(self, file: CheckFile):
offenders = []
text = file.stripped
max_depth = ScopeTooDeep.MAX
for scope_match in self.pattern.finditer(text):
scope_index = scope_match.start()
scope_depth = depth(scope_index, text)
if scope_depth > max_depth:
line_number, column = file.line_number_at(scope_index)
offender = self.violate(at=(line_number, column),
to=(line_number, column + 1),
lines=[(line_number, file.lines[line_number - 1])],
meta={'depth': scope_depth,
'max': max_depth})
offenders.append(offender)
return offenders
@property
def triggers(self):
return [
('void func(...) {\n'
' if (true) {\n'
' if (false) {\n'
' if (true) {\n'
' if (true) ↓{\n'
' ...\n'
' }\n'
' }\n'
' }\n'
' }\n'
'}')
]
@property
def nontriggers(self):
return [
('void func(...) {\n'
' if (true) {\n'
' if (false) {\n'
' if (true) {\n'
' ...\n'
' }\n'
' }\n'
' }\n'
'}')
]
|
comply/rules/standard/scope_too_deep.py
|
import re
from comply.rules.rule import *
from comply.rules.patterns import FUNC_BODY_PATTERN
from comply.util.scope import depth
class ScopeTooDeep(Rule):
""" Don't write deeply nested code.
A deeply nested scope is often an indication of too high complexity and can be
difficult to read.
"""
def __init__(self):
Rule.__init__(self, name='scope-too-deep',
description='Scope is too deep ({depth} > {max} levels)',
suggestion='Avoid nesting code too deeply. Consider refactoring.')
MAX = 3
pattern = re.compile(FUNC_BODY_PATTERN)
def collect(self, file: CheckFile):
offenders = []
text = file.stripped
max_depth = ScopeTooDeep.MAX
for scope_match in self.pattern.finditer(text):
scope_index = scope_match.start()
scope_depth = depth(scope_index, text)
if scope_depth > max_depth:
line_number, column = file.line_number_at(scope_index)
offender = self.violate(at=(line_number, column),
to=(line_number, column + 1),
lines=[(line_number, file.lines[line_number - 1])],
meta={'depth': scope_depth,
'max': max_depth})
offenders.append(offender)
return offenders
@property
def triggers(self):
return [
('void func(...) {\n'
' if (true) {\n'
' if (false) {\n'
' if (true) {\n'
' if (true) ↓{\n'
' ...\n'
' }\n'
' }\n'
' }\n'
' }\n'
'}')
]
@property
def nontriggers(self):
return [
('void func(...) {\n'
' if (true) {\n'
' if (false) {\n'
' if (true) {\n'
' ...\n'
' }\n'
' }\n'
' }\n'
'}')
]
| 0.666714 | 0.246375 |
import pytest
import datetime
from pupa.scrape import Event
def event_obj():
e = Event(
name="get-together",
start_date=datetime.datetime.utcnow().isoformat().split('.')[0] + 'Z',
location_name="Joe's Place",
)
e.add_source(url='http://example.com/foobar')
return e
def test_basic_event():
e = event_obj()
e.validate()
def test_no_location():
e = Event(
name="get-together",
start_date=datetime.datetime.utcnow().isoformat().split('.')[0] + 'Z',
)
e.add_source(url='http://example.com/foobar')
e.validate()
def test_event_str():
e = event_obj()
assert e.name in str(e)
def test_bad_event():
e = event_obj()
e.start_date = 6
with pytest.raises(ValueError):
e.validate()
def test_basic_agenda():
e = event_obj()
agenda = e.add_agenda_item("foo bar")
assert agenda['description'] == 'foo bar'
assert e.agenda[0] == agenda
e.validate()
def test_agenda_add_person():
e = event_obj()
agenda = e.add_agenda_item("foo bar")
assert agenda['related_entities'] == []
agenda.add_person(person='<NAME>', note='chair')
assert len(e.agenda[0]['related_entities']) == 1
e.validate()
def test_agenda_add_vote_event():
e = event_obj()
agenda = e.add_agenda_item("foo bar")
assert agenda['related_entities'] == []
agenda.add_vote_event(vote_event='Roll no. 12')
assert len(e.agenda[0]['related_entities']) == 1
e.validate()
def test_agenda_add_subject():
e = event_obj()
agenda = e.add_agenda_item("foo bar")
agenda.add_subject('test')
assert e.agenda[0]['subjects'] == ['test']
agenda.add_subject('test2')
assert e.agenda[0]['subjects'] == ['test', 'test2']
e.validate()
def test_agenda_add_classification():
e = event_obj()
agenda = e.add_agenda_item("foo bar")
agenda.add_classification('test')
assert e.agenda[0]['classification'] == ['test']
agenda.add_classification('test2')
assert e.agenda[0]['classification'] == ['test', 'test2']
e.validate()
def test_agenda_add_extra():
e = event_obj()
a = e.add_agenda_item('foo bar')
a['extras'] = dict(foo=1, bar=['baz'])
assert e.agenda[0]['extras'] == {'foo': 1, 'bar': ['baz']}
def test_add_committee():
e = event_obj()
agenda = e.add_agenda_item("foo bar")
assert agenda['related_entities'] == []
agenda.add_committee(committee='Hello, World', note='host')
e.validate()
def test_add_bill():
e = event_obj()
agenda = e.add_agenda_item("foo bar")
assert agenda['related_entities'] == []
agenda.add_bill(bill='HB 101', note='consideration')
e.validate()
def test_add_document():
e = event_obj()
assert e.documents == []
e.add_document(note='hello', url='http://example.com', media_type="text/html")
assert len(e.documents) == 1
o = e.documents[0]
assert o['note'] == 'hello'
assert o['links'] == [{'url': 'http://example.com', 'media_type': 'text/html', 'text': ''}]
e.validate()
def test_participants():
e = event_obj()
e.add_participant('Committee of the Whole', type='committee', note='everyone')
assert len(e.participants) == 1
assert e.participants[0]['name'] == 'Committee of the Whole'
assert e.participants[0]['entity_type'] == 'committee'
assert e.participants[0]['note'] == 'everyone'
# and add_person, which is a shortcut
e.add_person('<NAME>')
assert len(e.participants) == 2
assert e.participants[1]['name'] == '<NAME>'
assert e.participants[1]['entity_type'] == 'person'
assert e.participants[1]['note'] == 'participant'
def test_set_location():
e = event_obj()
e.set_location('North Pole', note='it is cold here', url='https://www.northpole.com',
coordinates={'latitude': '90.0000', 'longitude': '0.0000'})
assert e.location.get('name') == 'North Pole'
assert e.location.get('note') == 'it is cold here'
assert e.location.get('url') == 'https://www.northpole.com'
assert e.location.get('coordinates').get('latitude') == '90.0000'
assert e.location.get('coordinates').get('longitude') == '0.0000'
e.validate()
def test_add_media():
e = event_obj()
name = "<NAME>"
a = e.add_agenda_item(description='foo')
a.add_media_link(note=name, url="http://pault.ag", media_type="text/html")
a.add_media_link(note=name, url="ftp://pault.ag", media_type="text/plain")
e.validate()
assert len(e.agenda[0]['media']) == 1
assert len(e.agenda[0]['media'][0]['links']) == 2
e.add_media_link(note=name, url="http://pault.ag", media_type="text/html")
e.add_media_link(note=name, url="ftp://pault.ag", media_type="text/plain")
e.validate()
assert len(e.media) == 1
assert len(e.media[0]['links']) == 2
|
pupa/tests/scrape/test_event_scrape.py
|
import pytest
import datetime
from pupa.scrape import Event
def event_obj():
e = Event(
name="get-together",
start_date=datetime.datetime.utcnow().isoformat().split('.')[0] + 'Z',
location_name="Joe's Place",
)
e.add_source(url='http://example.com/foobar')
return e
def test_basic_event():
e = event_obj()
e.validate()
def test_no_location():
e = Event(
name="get-together",
start_date=datetime.datetime.utcnow().isoformat().split('.')[0] + 'Z',
)
e.add_source(url='http://example.com/foobar')
e.validate()
def test_event_str():
e = event_obj()
assert e.name in str(e)
def test_bad_event():
e = event_obj()
e.start_date = 6
with pytest.raises(ValueError):
e.validate()
def test_basic_agenda():
e = event_obj()
agenda = e.add_agenda_item("foo bar")
assert agenda['description'] == 'foo bar'
assert e.agenda[0] == agenda
e.validate()
def test_agenda_add_person():
e = event_obj()
agenda = e.add_agenda_item("foo bar")
assert agenda['related_entities'] == []
agenda.add_person(person='<NAME>', note='chair')
assert len(e.agenda[0]['related_entities']) == 1
e.validate()
def test_agenda_add_vote_event():
e = event_obj()
agenda = e.add_agenda_item("foo bar")
assert agenda['related_entities'] == []
agenda.add_vote_event(vote_event='Roll no. 12')
assert len(e.agenda[0]['related_entities']) == 1
e.validate()
def test_agenda_add_subject():
e = event_obj()
agenda = e.add_agenda_item("foo bar")
agenda.add_subject('test')
assert e.agenda[0]['subjects'] == ['test']
agenda.add_subject('test2')
assert e.agenda[0]['subjects'] == ['test', 'test2']
e.validate()
def test_agenda_add_classification():
e = event_obj()
agenda = e.add_agenda_item("foo bar")
agenda.add_classification('test')
assert e.agenda[0]['classification'] == ['test']
agenda.add_classification('test2')
assert e.agenda[0]['classification'] == ['test', 'test2']
e.validate()
def test_agenda_add_extra():
e = event_obj()
a = e.add_agenda_item('foo bar')
a['extras'] = dict(foo=1, bar=['baz'])
assert e.agenda[0]['extras'] == {'foo': 1, 'bar': ['baz']}
def test_add_committee():
e = event_obj()
agenda = e.add_agenda_item("foo bar")
assert agenda['related_entities'] == []
agenda.add_committee(committee='Hello, World', note='host')
e.validate()
def test_add_bill():
e = event_obj()
agenda = e.add_agenda_item("foo bar")
assert agenda['related_entities'] == []
agenda.add_bill(bill='HB 101', note='consideration')
e.validate()
def test_add_document():
e = event_obj()
assert e.documents == []
e.add_document(note='hello', url='http://example.com', media_type="text/html")
assert len(e.documents) == 1
o = e.documents[0]
assert o['note'] == 'hello'
assert o['links'] == [{'url': 'http://example.com', 'media_type': 'text/html', 'text': ''}]
e.validate()
def test_participants():
e = event_obj()
e.add_participant('Committee of the Whole', type='committee', note='everyone')
assert len(e.participants) == 1
assert e.participants[0]['name'] == 'Committee of the Whole'
assert e.participants[0]['entity_type'] == 'committee'
assert e.participants[0]['note'] == 'everyone'
# and add_person, which is a shortcut
e.add_person('<NAME>')
assert len(e.participants) == 2
assert e.participants[1]['name'] == '<NAME>'
assert e.participants[1]['entity_type'] == 'person'
assert e.participants[1]['note'] == 'participant'
def test_set_location():
e = event_obj()
e.set_location('North Pole', note='it is cold here', url='https://www.northpole.com',
coordinates={'latitude': '90.0000', 'longitude': '0.0000'})
assert e.location.get('name') == 'North Pole'
assert e.location.get('note') == 'it is cold here'
assert e.location.get('url') == 'https://www.northpole.com'
assert e.location.get('coordinates').get('latitude') == '90.0000'
assert e.location.get('coordinates').get('longitude') == '0.0000'
e.validate()
def test_add_media():
e = event_obj()
name = "<NAME>"
a = e.add_agenda_item(description='foo')
a.add_media_link(note=name, url="http://pault.ag", media_type="text/html")
a.add_media_link(note=name, url="ftp://pault.ag", media_type="text/plain")
e.validate()
assert len(e.agenda[0]['media']) == 1
assert len(e.agenda[0]['media'][0]['links']) == 2
e.add_media_link(note=name, url="http://pault.ag", media_type="text/html")
e.add_media_link(note=name, url="ftp://pault.ag", media_type="text/plain")
e.validate()
assert len(e.media) == 1
assert len(e.media[0]['links']) == 2
| 0.574753 | 0.518363 |
import unittest
from Multi_cell import *
class MultiCellTestCase(unittest.TestCase):
def test_multi_cell_INV_n_INV(self):
str_netlist_1 = "M0001 GND IN001 OUT01 GND NMOS\n" \
"M0002 OUT01 IN001 VDD VDD PMOS\n"
str_netlist_2 = "M0001 GND IN001 OUT01 GND NMOS\n" \
"M0002 OUT01 IN001 VDD VDD PMOS\n"
multi_cell = MultiCell()
iso, share = multi_cell.construct(str_netlist_1, str_netlist_2)
self.assertCountEqual([], share)
self.assertCountEqual([
"M0001 GND IN001 N0001 GND NMOS\n"
"M0002 GND N0001 OUT01 GND NMOS\n"
"M0003 N0001 IN001 VDD VDD PMOS\n"
"M0004 OUT01 N0001 VDD VDD PMOS\n"
], iso)
def test_multi_cell_with_one_internal(self):
str_netlist_1 = "M0001 GND IN001 OUT01 GND NMOS\n" \
"M0002 OUT01 IN001 VDD VDD PMOS\n"
str_netlist_2 = "M0001 GND N0001 OUT01 GND NMOS\n" \
"M0002 OUT01 IN001 VDD VDD PMOS\n"
multi_cell = MultiCell()
iso, share = multi_cell.construct(str_netlist_1, str_netlist_2)
self.assertCountEqual([], share)
self.assertCountEqual([
"M0001 GND IN001 N0002 GND NMOS\n"
"M0002 GND N0001 OUT01 GND NMOS\n"
"M0003 N0002 IN001 VDD VDD PMOS\n"
"M0004 OUT01 N0002 VDD VDD PMOS\n"
], iso)
def test_multi_cell_with_two_internal(self):
str_netlist_1 = "M0001 GND IN001 N0001 GND NMOS\n" \
"M0002 OUT01 IN001 VDD VDD PMOS\n"
str_netlist_2 = "M0001 GND N0001 OUT01 GND NMOS\n" \
"M0002 OUT01 IN001 VDD VDD PMOS\n"
multi_cell = MultiCell()
iso, share = multi_cell.construct(str_netlist_1, str_netlist_2)
self.assertCountEqual([], share)
self.assertCountEqual([
"M0001 GND IN001 N0001 GND NMOS\n"
"M0002 GND N0002 OUT01 GND NMOS\n"
"M0003 N0003 IN001 VDD VDD PMOS\n"
"M0004 OUT01 N0003 VDD VDD PMOS\n"
], iso)
def test_multi_cell_with_two_inputs(self):
str_netlist_1 = "M0001 GND IN001 N0001 GND NMOS\n" \
"M0002 OUT01 IN002 VDD VDD NMOS\n"
str_netlist_2 = "M0001 IN001 IN002 IN003 GND NMOS\n"
multi_cell = MultiCell()
iso, share = multi_cell.construct(str_netlist_1, str_netlist_2)
template_1 = "M0001 GND IN001 N0001 GND NMOS\n" \
"M0002 N0002 IN002 VDD GND NMOS\n"\
"M0003 IN003 N0002 IN004 GND NMOS\n"
template_2 = "M0001 GND IN001 N0001 GND NMOS\n" \
"M0002 N0002 IN002 VDD GND NMOS\n" \
"M0003 N0002 IN003 IN004 GND NMOS\n"
template_3 = "M0001 GND IN001 N0001 GND NMOS\n" \
"M0002 N0002 IN002 VDD GND NMOS\n" \
"M0003 IN003 IN004 N0002 GND NMOS\n"
self.assertCountEqual([
template_1.replace('IN004', 'IN003'),
template_2.replace('IN004', 'IN003'),
template_3.replace('IN004', 'IN003')
], iso)
shared_golden = list()
for replacements in product(('IN001', 'IN002', 'IN003'), repeat=2):
if replacements[0] == replacements[1] and replacements[0] == 'IN003':
continue
shared_golden.append(template_1.replace('IN003', replacements[0]).replace('IN004', replacements[1]))
shared_golden.append(template_2.replace('IN003', replacements[0]).replace('IN004', replacements[1]))
shared_golden.append(template_3.replace('IN003', replacements[0]).replace('IN004', replacements[1]))
self.assertCountEqual(shared_golden, share)
def test_multi_cell_with_1_2(self):
str_netlist_1 = "M0001 OUT01 VDD IN001 GND NMOS\n"
str_netlist_2 = "M0001 VDD IN001 OUT01 GND NMOS\n"\
"M0002 OUT01 IN001 IN002 VDD PMOS\n"
multi_cell = MultiCell()
iso, share = multi_cell.construct(str_netlist_1, str_netlist_2)
self.assertCountEqual([
"M0001 N0001 VDD IN001 GND NMOS\nM0002 VDD N0001 OUT01 GND NMOS\nM0003 OUT01 N0001 IN001 VDD PMOS\n",
"M0001 N0001 VDD IN001 GND NMOS\nM0002 VDD IN001 OUT01 GND NMOS\nM0003 OUT01 IN001 N0001 VDD PMOS\n"
], share)
self.assertCountEqual([
"M0001 N0001 VDD IN001 GND NMOS\nM0002 VDD N0001 OUT01 GND NMOS\nM0003 OUT01 N0001 IN002 VDD PMOS\n",
"M0001 N0001 VDD IN001 GND NMOS\nM0002 VDD N0001 OUT01 GND NMOS\nM0003 OUT01 N0001 IN003 VDD PMOS\n",
"M0001 N0001 VDD IN001 GND NMOS\nM0002 VDD IN002 OUT01 GND NMOS\nM0003 OUT01 IN002 N0001 VDD PMOS\n",
"M0001 N0001 VDD IN001 GND NMOS\nM0002 VDD IN003 OUT01 GND NMOS\nM0003 OUT01 IN003 N0001 VDD PMOS\n"
], iso)
if __name__ == '__main__':
unittest.main()
|
MultiCellTest.py
|
import unittest
from Multi_cell import *
class MultiCellTestCase(unittest.TestCase):
def test_multi_cell_INV_n_INV(self):
str_netlist_1 = "M0001 GND IN001 OUT01 GND NMOS\n" \
"M0002 OUT01 IN001 VDD VDD PMOS\n"
str_netlist_2 = "M0001 GND IN001 OUT01 GND NMOS\n" \
"M0002 OUT01 IN001 VDD VDD PMOS\n"
multi_cell = MultiCell()
iso, share = multi_cell.construct(str_netlist_1, str_netlist_2)
self.assertCountEqual([], share)
self.assertCountEqual([
"M0001 GND IN001 N0001 GND NMOS\n"
"M0002 GND N0001 OUT01 GND NMOS\n"
"M0003 N0001 IN001 VDD VDD PMOS\n"
"M0004 OUT01 N0001 VDD VDD PMOS\n"
], iso)
def test_multi_cell_with_one_internal(self):
str_netlist_1 = "M0001 GND IN001 OUT01 GND NMOS\n" \
"M0002 OUT01 IN001 VDD VDD PMOS\n"
str_netlist_2 = "M0001 GND N0001 OUT01 GND NMOS\n" \
"M0002 OUT01 IN001 VDD VDD PMOS\n"
multi_cell = MultiCell()
iso, share = multi_cell.construct(str_netlist_1, str_netlist_2)
self.assertCountEqual([], share)
self.assertCountEqual([
"M0001 GND IN001 N0002 GND NMOS\n"
"M0002 GND N0001 OUT01 GND NMOS\n"
"M0003 N0002 IN001 VDD VDD PMOS\n"
"M0004 OUT01 N0002 VDD VDD PMOS\n"
], iso)
def test_multi_cell_with_two_internal(self):
str_netlist_1 = "M0001 GND IN001 N0001 GND NMOS\n" \
"M0002 OUT01 IN001 VDD VDD PMOS\n"
str_netlist_2 = "M0001 GND N0001 OUT01 GND NMOS\n" \
"M0002 OUT01 IN001 VDD VDD PMOS\n"
multi_cell = MultiCell()
iso, share = multi_cell.construct(str_netlist_1, str_netlist_2)
self.assertCountEqual([], share)
self.assertCountEqual([
"M0001 GND IN001 N0001 GND NMOS\n"
"M0002 GND N0002 OUT01 GND NMOS\n"
"M0003 N0003 IN001 VDD VDD PMOS\n"
"M0004 OUT01 N0003 VDD VDD PMOS\n"
], iso)
def test_multi_cell_with_two_inputs(self):
str_netlist_1 = "M0001 GND IN001 N0001 GND NMOS\n" \
"M0002 OUT01 IN002 VDD VDD NMOS\n"
str_netlist_2 = "M0001 IN001 IN002 IN003 GND NMOS\n"
multi_cell = MultiCell()
iso, share = multi_cell.construct(str_netlist_1, str_netlist_2)
template_1 = "M0001 GND IN001 N0001 GND NMOS\n" \
"M0002 N0002 IN002 VDD GND NMOS\n"\
"M0003 IN003 N0002 IN004 GND NMOS\n"
template_2 = "M0001 GND IN001 N0001 GND NMOS\n" \
"M0002 N0002 IN002 VDD GND NMOS\n" \
"M0003 N0002 IN003 IN004 GND NMOS\n"
template_3 = "M0001 GND IN001 N0001 GND NMOS\n" \
"M0002 N0002 IN002 VDD GND NMOS\n" \
"M0003 IN003 IN004 N0002 GND NMOS\n"
self.assertCountEqual([
template_1.replace('IN004', 'IN003'),
template_2.replace('IN004', 'IN003'),
template_3.replace('IN004', 'IN003')
], iso)
shared_golden = list()
for replacements in product(('IN001', 'IN002', 'IN003'), repeat=2):
if replacements[0] == replacements[1] and replacements[0] == 'IN003':
continue
shared_golden.append(template_1.replace('IN003', replacements[0]).replace('IN004', replacements[1]))
shared_golden.append(template_2.replace('IN003', replacements[0]).replace('IN004', replacements[1]))
shared_golden.append(template_3.replace('IN003', replacements[0]).replace('IN004', replacements[1]))
self.assertCountEqual(shared_golden, share)
def test_multi_cell_with_1_2(self):
str_netlist_1 = "M0001 OUT01 VDD IN001 GND NMOS\n"
str_netlist_2 = "M0001 VDD IN001 OUT01 GND NMOS\n"\
"M0002 OUT01 IN001 IN002 VDD PMOS\n"
multi_cell = MultiCell()
iso, share = multi_cell.construct(str_netlist_1, str_netlist_2)
self.assertCountEqual([
"M0001 N0001 VDD IN001 GND NMOS\nM0002 VDD N0001 OUT01 GND NMOS\nM0003 OUT01 N0001 IN001 VDD PMOS\n",
"M0001 N0001 VDD IN001 GND NMOS\nM0002 VDD IN001 OUT01 GND NMOS\nM0003 OUT01 IN001 N0001 VDD PMOS\n"
], share)
self.assertCountEqual([
"M0001 N0001 VDD IN001 GND NMOS\nM0002 VDD N0001 OUT01 GND NMOS\nM0003 OUT01 N0001 IN002 VDD PMOS\n",
"M0001 N0001 VDD IN001 GND NMOS\nM0002 VDD N0001 OUT01 GND NMOS\nM0003 OUT01 N0001 IN003 VDD PMOS\n",
"M0001 N0001 VDD IN001 GND NMOS\nM0002 VDD IN002 OUT01 GND NMOS\nM0003 OUT01 IN002 N0001 VDD PMOS\n",
"M0001 N0001 VDD IN001 GND NMOS\nM0002 VDD IN003 OUT01 GND NMOS\nM0003 OUT01 IN003 N0001 VDD PMOS\n"
], iso)
if __name__ == '__main__':
unittest.main()
| 0.500488 | 0.364778 |
import numpy as np
import pytest
import pandas as pd
import pandas._testing as tm
@pytest.mark.parametrize("align_axis", [0, 1, "index", "columns"])
def test_compare_axis(align_axis):
# GH#30429
df = pd.DataFrame(
{"col1": ["a", "b", "c"], "col2": [1.0, 2.0, np.nan], "col3": [1.0, 2.0, 3.0]},
columns=["col1", "col2", "col3"],
)
df2 = df.copy()
df2.loc[0, "col1"] = "c"
df2.loc[2, "col3"] = 4.0
result = df.compare(df2, align_axis=align_axis)
if align_axis in (1, "columns"):
indices = pd.Index([0, 2])
columns = pd.MultiIndex.from_product([["col1", "col3"], ["self", "other"]])
expected = pd.DataFrame(
[["a", "c", np.nan, np.nan], [np.nan, np.nan, 3.0, 4.0]],
index=indices,
columns=columns,
)
else:
indices = pd.MultiIndex.from_product([[0, 2], ["self", "other"]])
columns = pd.Index(["col1", "col3"])
expected = pd.DataFrame(
[["a", np.nan], ["c", np.nan], [np.nan, 3.0], [np.nan, 4.0]],
index=indices,
columns=columns,
)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"keep_shape, keep_equal",
[
(True, False),
(False, True),
(True, True),
# False, False case is already covered in test_compare_axis
],
)
def test_compare_various_formats(keep_shape, keep_equal):
df = pd.DataFrame(
{"col1": ["a", "b", "c"], "col2": [1.0, 2.0, np.nan], "col3": [1.0, 2.0, 3.0]},
columns=["col1", "col2", "col3"],
)
df2 = df.copy()
df2.loc[0, "col1"] = "c"
df2.loc[2, "col3"] = 4.0
result = df.compare(df2, keep_shape=keep_shape, keep_equal=keep_equal)
if keep_shape:
indices = pd.Index([0, 1, 2])
columns = pd.MultiIndex.from_product(
[["col1", "col2", "col3"], ["self", "other"]]
)
if keep_equal:
expected = pd.DataFrame(
[
["a", "c", 1.0, 1.0, 1.0, 1.0],
["b", "b", 2.0, 2.0, 2.0, 2.0],
["c", "c", np.nan, np.nan, 3.0, 4.0],
],
index=indices,
columns=columns,
)
else:
expected = pd.DataFrame(
[
["a", "c", np.nan, np.nan, np.nan, np.nan],
[np.nan, np.nan, np.nan, np.nan, np.nan, np.nan],
[np.nan, np.nan, np.nan, np.nan, 3.0, 4.0],
],
index=indices,
columns=columns,
)
else:
indices = pd.Index([0, 2])
columns = pd.MultiIndex.from_product([["col1", "col3"], ["self", "other"]])
expected = pd.DataFrame(
[["a", "c", 1.0, 1.0], ["c", "c", 3.0, 4.0]], index=indices, columns=columns
)
tm.assert_frame_equal(result, expected)
def test_compare_with_equal_nulls():
# We want to make sure two NaNs are considered the same
# and dropped where applicable
df = pd.DataFrame(
{"col1": ["a", "b", "c"], "col2": [1.0, 2.0, np.nan], "col3": [1.0, 2.0, 3.0]},
columns=["col1", "col2", "col3"],
)
df2 = df.copy()
df2.loc[0, "col1"] = "c"
result = df.compare(df2)
indices = pd.Index([0])
columns = pd.MultiIndex.from_product([["col1"], ["self", "other"]])
expected = pd.DataFrame([["a", "c"]], index=indices, columns=columns)
tm.assert_frame_equal(result, expected)
def test_compare_with_non_equal_nulls():
# We want to make sure the relevant NaNs do not get dropped
# even if the entire row or column are NaNs
df = pd.DataFrame(
{"col1": ["a", "b", "c"], "col2": [1.0, 2.0, np.nan], "col3": [1.0, 2.0, 3.0]},
columns=["col1", "col2", "col3"],
)
df2 = df.copy()
df2.loc[0, "col1"] = "c"
df2.loc[2, "col3"] = np.nan
result = df.compare(df2)
indices = pd.Index([0, 2])
columns = pd.MultiIndex.from_product([["col1", "col3"], ["self", "other"]])
expected = pd.DataFrame(
[["a", "c", np.nan, np.nan], [np.nan, np.nan, 3.0, np.nan]],
index=indices,
columns=columns,
)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("align_axis", [0, 1])
def test_compare_multi_index(align_axis):
df = pd.DataFrame(
{"col1": ["a", "b", "c"], "col2": [1.0, 2.0, np.nan], "col3": [1.0, 2.0, 3.0]}
)
df.columns = pd.MultiIndex.from_arrays([["a", "a", "b"], ["col1", "col2", "col3"]])
df.index = pd.MultiIndex.from_arrays([["x", "x", "y"], [0, 1, 2]])
df2 = df.copy()
df2.iloc[0, 0] = "c"
df2.iloc[2, 2] = 4.0
result = df.compare(df2, align_axis=align_axis)
if align_axis == 0:
indices = pd.MultiIndex.from_arrays(
[["x", "x", "y", "y"], [0, 0, 2, 2], ["self", "other", "self", "other"]]
)
columns = pd.MultiIndex.from_arrays([["a", "b"], ["col1", "col3"]])
data = [["a", np.nan], ["c", np.nan], [np.nan, 3.0], [np.nan, 4.0]]
else:
indices = pd.MultiIndex.from_arrays([["x", "y"], [0, 2]])
columns = pd.MultiIndex.from_arrays(
[
["a", "a", "b", "b"],
["col1", "col1", "col3", "col3"],
["self", "other", "self", "other"],
]
)
data = [["a", "c", np.nan, np.nan], [np.nan, np.nan, 3.0, 4.0]]
expected = pd.DataFrame(data=data, index=indices, columns=columns)
tm.assert_frame_equal(result, expected)
def test_compare_unaligned_objects():
# test DataFrames with different indices
msg = "Can only compare identically-labeled DataFrame objects"
with pytest.raises(ValueError, match=msg):
df1 = pd.DataFrame([1, 2, 3], index=["a", "b", "c"])
df2 = pd.DataFrame([1, 2, 3], index=["a", "b", "d"])
df1.compare(df2)
# test DataFrames with different shapes
msg = "Can only compare identically-labeled DataFrame objects"
with pytest.raises(ValueError, match=msg):
df1 = pd.DataFrame(np.ones((3, 3)))
df2 = pd.DataFrame(np.zeros((2, 1)))
df1.compare(df2)
|
pandas/tests/frame/methods/test_compare.py
|
import numpy as np
import pytest
import pandas as pd
import pandas._testing as tm
@pytest.mark.parametrize("align_axis", [0, 1, "index", "columns"])
def test_compare_axis(align_axis):
# GH#30429
df = pd.DataFrame(
{"col1": ["a", "b", "c"], "col2": [1.0, 2.0, np.nan], "col3": [1.0, 2.0, 3.0]},
columns=["col1", "col2", "col3"],
)
df2 = df.copy()
df2.loc[0, "col1"] = "c"
df2.loc[2, "col3"] = 4.0
result = df.compare(df2, align_axis=align_axis)
if align_axis in (1, "columns"):
indices = pd.Index([0, 2])
columns = pd.MultiIndex.from_product([["col1", "col3"], ["self", "other"]])
expected = pd.DataFrame(
[["a", "c", np.nan, np.nan], [np.nan, np.nan, 3.0, 4.0]],
index=indices,
columns=columns,
)
else:
indices = pd.MultiIndex.from_product([[0, 2], ["self", "other"]])
columns = pd.Index(["col1", "col3"])
expected = pd.DataFrame(
[["a", np.nan], ["c", np.nan], [np.nan, 3.0], [np.nan, 4.0]],
index=indices,
columns=columns,
)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"keep_shape, keep_equal",
[
(True, False),
(False, True),
(True, True),
# False, False case is already covered in test_compare_axis
],
)
def test_compare_various_formats(keep_shape, keep_equal):
df = pd.DataFrame(
{"col1": ["a", "b", "c"], "col2": [1.0, 2.0, np.nan], "col3": [1.0, 2.0, 3.0]},
columns=["col1", "col2", "col3"],
)
df2 = df.copy()
df2.loc[0, "col1"] = "c"
df2.loc[2, "col3"] = 4.0
result = df.compare(df2, keep_shape=keep_shape, keep_equal=keep_equal)
if keep_shape:
indices = pd.Index([0, 1, 2])
columns = pd.MultiIndex.from_product(
[["col1", "col2", "col3"], ["self", "other"]]
)
if keep_equal:
expected = pd.DataFrame(
[
["a", "c", 1.0, 1.0, 1.0, 1.0],
["b", "b", 2.0, 2.0, 2.0, 2.0],
["c", "c", np.nan, np.nan, 3.0, 4.0],
],
index=indices,
columns=columns,
)
else:
expected = pd.DataFrame(
[
["a", "c", np.nan, np.nan, np.nan, np.nan],
[np.nan, np.nan, np.nan, np.nan, np.nan, np.nan],
[np.nan, np.nan, np.nan, np.nan, 3.0, 4.0],
],
index=indices,
columns=columns,
)
else:
indices = pd.Index([0, 2])
columns = pd.MultiIndex.from_product([["col1", "col3"], ["self", "other"]])
expected = pd.DataFrame(
[["a", "c", 1.0, 1.0], ["c", "c", 3.0, 4.0]], index=indices, columns=columns
)
tm.assert_frame_equal(result, expected)
def test_compare_with_equal_nulls():
# We want to make sure two NaNs are considered the same
# and dropped where applicable
df = pd.DataFrame(
{"col1": ["a", "b", "c"], "col2": [1.0, 2.0, np.nan], "col3": [1.0, 2.0, 3.0]},
columns=["col1", "col2", "col3"],
)
df2 = df.copy()
df2.loc[0, "col1"] = "c"
result = df.compare(df2)
indices = pd.Index([0])
columns = pd.MultiIndex.from_product([["col1"], ["self", "other"]])
expected = pd.DataFrame([["a", "c"]], index=indices, columns=columns)
tm.assert_frame_equal(result, expected)
def test_compare_with_non_equal_nulls():
# We want to make sure the relevant NaNs do not get dropped
# even if the entire row or column are NaNs
df = pd.DataFrame(
{"col1": ["a", "b", "c"], "col2": [1.0, 2.0, np.nan], "col3": [1.0, 2.0, 3.0]},
columns=["col1", "col2", "col3"],
)
df2 = df.copy()
df2.loc[0, "col1"] = "c"
df2.loc[2, "col3"] = np.nan
result = df.compare(df2)
indices = pd.Index([0, 2])
columns = pd.MultiIndex.from_product([["col1", "col3"], ["self", "other"]])
expected = pd.DataFrame(
[["a", "c", np.nan, np.nan], [np.nan, np.nan, 3.0, np.nan]],
index=indices,
columns=columns,
)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("align_axis", [0, 1])
def test_compare_multi_index(align_axis):
df = pd.DataFrame(
{"col1": ["a", "b", "c"], "col2": [1.0, 2.0, np.nan], "col3": [1.0, 2.0, 3.0]}
)
df.columns = pd.MultiIndex.from_arrays([["a", "a", "b"], ["col1", "col2", "col3"]])
df.index = pd.MultiIndex.from_arrays([["x", "x", "y"], [0, 1, 2]])
df2 = df.copy()
df2.iloc[0, 0] = "c"
df2.iloc[2, 2] = 4.0
result = df.compare(df2, align_axis=align_axis)
if align_axis == 0:
indices = pd.MultiIndex.from_arrays(
[["x", "x", "y", "y"], [0, 0, 2, 2], ["self", "other", "self", "other"]]
)
columns = pd.MultiIndex.from_arrays([["a", "b"], ["col1", "col3"]])
data = [["a", np.nan], ["c", np.nan], [np.nan, 3.0], [np.nan, 4.0]]
else:
indices = pd.MultiIndex.from_arrays([["x", "y"], [0, 2]])
columns = pd.MultiIndex.from_arrays(
[
["a", "a", "b", "b"],
["col1", "col1", "col3", "col3"],
["self", "other", "self", "other"],
]
)
data = [["a", "c", np.nan, np.nan], [np.nan, np.nan, 3.0, 4.0]]
expected = pd.DataFrame(data=data, index=indices, columns=columns)
tm.assert_frame_equal(result, expected)
def test_compare_unaligned_objects():
# test DataFrames with different indices
msg = "Can only compare identically-labeled DataFrame objects"
with pytest.raises(ValueError, match=msg):
df1 = pd.DataFrame([1, 2, 3], index=["a", "b", "c"])
df2 = pd.DataFrame([1, 2, 3], index=["a", "b", "d"])
df1.compare(df2)
# test DataFrames with different shapes
msg = "Can only compare identically-labeled DataFrame objects"
with pytest.raises(ValueError, match=msg):
df1 = pd.DataFrame(np.ones((3, 3)))
df2 = pd.DataFrame(np.zeros((2, 1)))
df1.compare(df2)
| 0.443841 | 0.648286 |
import json
import logging
from .base import WeTransferBase
from .file import File
LOG = logging.getLogger("wetransfer")
LOG.addHandler(logging.NullHandler())
LOG.setLevel(logging.INFO)
class WeTransfer(WeTransferBase):
WE_ENDPOINT_DEV = 'https://dev.wetransfer.com'
def __finalize_transfer(self, transfer_id):
"""
Finalize transfer.
:param transfer_id: transfer id.
:return: WeTransfer URL
"""
_, body = self.put('transfers/%s/finalize' % transfer_id, status=200)
return body['url']
def __complete_file_upload(self, transfer_id, file_id, part_numbers):
"""
Complete file upload.
:param transfer_id: transfer id
:param file_id: file id
:param part_numbers: part numbers
:return: None
"""
data = {'part_numbers': part_numbers}
LOG.debug(json.dumps(data, sort_keys=True, indent=2, separators=(',', ': ')))
self.put('transfers/%s/files/%s/upload-complete' % (transfer_id, file_id),
data=json.dumps(data), status=200)
def __request_upload_url(self, transfer_id, file_id, part_number):
"""
Request special upload url, which is tailored for AWS S3
:param transfer_id: transfer id
:param file_id: file id
:param part_number: part number
:return: AWS S3 upload url
"""
_, body = self.get('transfers/%s/files/%s/upload-url/%s' % (transfer_id, file_id, part_number), status=200)
return body['url']
def __create_transfer(self, message, files):
"""
Create a new transfer.
:param message: Message that goes with the transfer
:param files: An array of files
:return:
"""
files_stream = [{'name': file.name, 'size': file.size} for file in files]
data = {'message': message, 'files': files_stream}
_, body = self.post('transfers', data=json.dumps(data), status=201)
LOG.debug(json.dumps(body, sort_keys=True, indent=2, separators=(',', ': ')))
files_info = body['files']
for i in range(len(files_info)):
file_info = files_info[i]
multipart = file_info['multipart']
file = files[i]
file.id = file_info['id']
file.part_numbers = multipart['part_numbers']
file.chunk_size = multipart['chunk_size']
return body['id']
def upload_files(self, message, filepaths):
"""
Main entrypoint for this class. Pass in a message and a list of filepaths to upload.
:param message: Message to go with uploads
:param filepaths: A list of filepaths of files to upload
:return: The download URL generated by WeTransfer
"""
files = [File(filepath) for filepath in filepaths]
transfer_id = self.__create_transfer(message, files)
for file in files:
part_number = 1
with open(file.path, 'rb') as fh:
while True:
bytes_read = fh.read(file.chunk_size)
if not bytes_read: # empty string?
break
url = self.__request_upload_url(transfer_id, file.id, part_number)
self.s3_file_upload(url, bytes_read)
part_number += 1
self.__complete_file_upload(transfer_id, file.id, file.part_numbers)
return self.__finalize_transfer(transfer_id)
def upload_file(self, message, file_path):
"""
Upload a single file.
:param message: Message
:param file_path: Path of file to upload
:return: The download URL generated by WeTransfer
"""
return self.upload_files(message, [file_path])
|
wetransfer/transfer.py
|
import json
import logging
from .base import WeTransferBase
from .file import File
LOG = logging.getLogger("wetransfer")
LOG.addHandler(logging.NullHandler())
LOG.setLevel(logging.INFO)
class WeTransfer(WeTransferBase):
WE_ENDPOINT_DEV = 'https://dev.wetransfer.com'
def __finalize_transfer(self, transfer_id):
"""
Finalize transfer.
:param transfer_id: transfer id.
:return: WeTransfer URL
"""
_, body = self.put('transfers/%s/finalize' % transfer_id, status=200)
return body['url']
def __complete_file_upload(self, transfer_id, file_id, part_numbers):
"""
Complete file upload.
:param transfer_id: transfer id
:param file_id: file id
:param part_numbers: part numbers
:return: None
"""
data = {'part_numbers': part_numbers}
LOG.debug(json.dumps(data, sort_keys=True, indent=2, separators=(',', ': ')))
self.put('transfers/%s/files/%s/upload-complete' % (transfer_id, file_id),
data=json.dumps(data), status=200)
def __request_upload_url(self, transfer_id, file_id, part_number):
"""
Request special upload url, which is tailored for AWS S3
:param transfer_id: transfer id
:param file_id: file id
:param part_number: part number
:return: AWS S3 upload url
"""
_, body = self.get('transfers/%s/files/%s/upload-url/%s' % (transfer_id, file_id, part_number), status=200)
return body['url']
def __create_transfer(self, message, files):
"""
Create a new transfer.
:param message: Message that goes with the transfer
:param files: An array of files
:return:
"""
files_stream = [{'name': file.name, 'size': file.size} for file in files]
data = {'message': message, 'files': files_stream}
_, body = self.post('transfers', data=json.dumps(data), status=201)
LOG.debug(json.dumps(body, sort_keys=True, indent=2, separators=(',', ': ')))
files_info = body['files']
for i in range(len(files_info)):
file_info = files_info[i]
multipart = file_info['multipart']
file = files[i]
file.id = file_info['id']
file.part_numbers = multipart['part_numbers']
file.chunk_size = multipart['chunk_size']
return body['id']
def upload_files(self, message, filepaths):
"""
Main entrypoint for this class. Pass in a message and a list of filepaths to upload.
:param message: Message to go with uploads
:param filepaths: A list of filepaths of files to upload
:return: The download URL generated by WeTransfer
"""
files = [File(filepath) for filepath in filepaths]
transfer_id = self.__create_transfer(message, files)
for file in files:
part_number = 1
with open(file.path, 'rb') as fh:
while True:
bytes_read = fh.read(file.chunk_size)
if not bytes_read: # empty string?
break
url = self.__request_upload_url(transfer_id, file.id, part_number)
self.s3_file_upload(url, bytes_read)
part_number += 1
self.__complete_file_upload(transfer_id, file.id, file.part_numbers)
return self.__finalize_transfer(transfer_id)
def upload_file(self, message, file_path):
"""
Upload a single file.
:param message: Message
:param file_path: Path of file to upload
:return: The download URL generated by WeTransfer
"""
return self.upload_files(message, [file_path])
| 0.40028 | 0.14013 |
from datetime import datetime
from dateutil.relativedelta import relativedelta
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.db import models, transaction
from django.db.models import Q
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from chamber.models import SmartModel
from typing import TYPE_CHECKING, Iterable, Optional, Type
from enumfields import NumEnumField
from .enums import LegalReasonState
from .loading import purpose_register
if TYPE_CHECKING:
from gdpr.purposes.default import AbstractPurpose
class LegalReasonManager(models.Manager):
def create_consent(self, purpose_slug: str, source_object, issued_at: Optional[datetime] = None,
tag: Optional[str] = None, related_objects: Optional[Iterable[Type[models.Model]]] = None):
"""
Create (or update, if it exist) a LegalReason with purpose slug for concrete object instance
Args:
purpose_slug: String of Legal Reason purpose
source_object: Source object this Legal Reason is related to
issued_at: When the Legal Reason consent was given
tag: String that the developer can add to the created consent and use it to mark his business processes
related_objects: Objects this Legal Reason relates to (ie. order, registrations etc.)
Returns:
Legal Reason: LegalReason object
"""
try:
purpose = purpose_register[purpose_slug]
except KeyError:
raise KeyError('Purpose with slug {} does not exits'.format(purpose_slug))
issued_at = issued_at or timezone.now()
legal_reason, created = LegalReason.objects.get_or_create(
source_object_content_type=ContentType.objects.get_for_model(source_object.__class__),
source_object_id=str(source_object.pk),
purpose_slug=purpose_slug,
defaults={
'issued_at': issued_at,
'expires_at': issued_at + purpose.expiration_timedelta,
'tag': tag,
'state': LegalReasonState.ACTIVE,
}
)
if not created:
legal_reason.change_and_save(
expires_at=timezone.now() + purpose.expiration_timedelta,
tag=tag,
state=LegalReasonState.ACTIVE
)
for related_object in related_objects or ():
legal_reason.related_objects.update_or_create(
object_content_type=ContentType.objects.get_for_model(related_object.__class__),
object_id=related_object.pk
)
return legal_reason
def deactivate_consent(self, purpose_slug: str, source_object):
"""
Deactivate/Remove consent (Legal reason) for source_object, purpose_slug combination
Args:
purpose_slug: Purpose slug to deactivate consent for
source_object: Source object to deactivate consent for
Returns:
List of LegalReason objects
"""
reasons = []
for reason in LegalReason.objects.filter_source_instance_active_non_expired_purpose(source_object,
purpose_slug):
reason.deactivate()
reasons.append(reason)
return reasons
def exists_valid_consent(self, purpose_slug: str, source_object):
"""
Returns True if source_object has valid (ie. active and non-expired) consent (Legal Reason)
Args:
purpose_slug: Purpose_slug to check consent for
source_object: Source object to check consent for
"""
return LegalReason.objects.filter_source_instance_active_non_expired_purpose(
source_object, purpose_slug).exists()
def exists_deactivated_consent(self, purpose_slug: str, source_object):
"""
Returns True if source_object has deactivated consent (Legal Reason)
Args:
purpose_slug: Purpose_slug to check consent for
source_object: Source object to check consent for
"""
return self.filter_source_instance(source_object).filter(
state=LegalReasonState.DEACTIVATED,
purpose_slug=purpose_slug
).exists()
def expire_old_consents(self):
"""
Anonymize and expire consents which have past their `expires_at`.
"""
for reason in LegalReason.objects.filter_active_and_expired():
reason.expire()
class LegalReasonQuerySet(models.QuerySet):
def filter_expired_retaining_data_in_last_days(self, days=None):
"""
Filters all Legal Reason that retain data and that expired in last days
Args:
days: Number of days in the past. If not provided, all Legal Reasons retaining data which expired in the
past will be returned.
"""
purpose_slugs_retaining_data = [slug for slug, cls in purpose_register.items() if cls.fields]
filter_keys = {
'expires_at__lt': timezone.now(),
} if days is None else {
'expires_at__gt': timezone.now() - relativedelta(days=days),
'expires_at__lt': timezone.now()
}
return self.filter(state=LegalReasonState.ACTIVE, purpose_slug__in=purpose_slugs_retaining_data, **filter_keys)
def filter_non_expired(self):
return self.filter(Q(expires_at__gte=timezone.now()) | Q(expires_at=None))
def filter_expired(self):
return self.filter(expires_at__lte=timezone.now())
def filter_active(self):
return self.filter(state=LegalReasonState.ACTIVE)
def filter_active_and_non_expired(self):
return self.filter_active().filter_non_expired()
def filter_active_and_expired(self):
return self.filter_active().filter_expired()
def filter_source_instance(self, source_object):
return self.filter(
source_object_content_type=ContentType.objects.get_for_model(source_object.__class__),
source_object_id=str(source_object.pk)
)
def filter_source_instance_active_non_expired(self, source_object):
return self.filter_source_instance(source_object).filter_active_and_non_expired()
def filter_source_instance_active_non_expired_purpose(self, source_object, purpose_slug: str):
return self.filter_source_instance_active_non_expired(source_object).filter(
purpose_slug=purpose_slug
)
class LegalReason(SmartModel):
objects = LegalReasonManager.from_queryset(LegalReasonQuerySet)()
issued_at = models.DateTimeField(
verbose_name=_('issued at'),
null=False,
blank=False,
)
expires_at = models.DateTimeField(
verbose_name=_('expires at'),
null=True,
blank=True,
db_index=True
)
tag = models.CharField(
verbose_name=_('tag'),
null=True,
blank=True,
max_length=100
)
state = NumEnumField(
verbose_name=_('state'),
null=False,
blank=False,
enum=LegalReasonState,
default=LegalReasonState.ACTIVE
)
purpose_slug = models.CharField(
verbose_name=_('purpose'),
null=False,
blank=False,
max_length=100,
db_index=True
)
source_object_content_type = models.ForeignKey(
ContentType,
verbose_name=_('source object content type'),
null=False,
blank=False,
on_delete=models.DO_NOTHING
)
source_object_id = models.TextField(
verbose_name=_('source object ID'),
null=False, blank=False,
db_index=True
)
source_object = GenericForeignKey(
'source_object_content_type', 'source_object_id'
)
class Meta:
verbose_name = _('legal reason')
verbose_name_plural = _('legal reasons')
ordering = ('-created_at',)
unique_together = ('purpose_slug', 'source_object_content_type', 'source_object_id')
def __str__(self):
return f'{self.purpose.name}'
@property
def is_active(self):
return self.state == LegalReasonState.ACTIVE
@property
def purpose(self) -> Type["AbstractPurpose"]:
return purpose_register.get(self.purpose_slug, None)
def _anonymize_obj(self, *args, **kwargs):
purpose_register[self.purpose_slug]().anonymize_obj(self.source_object, self, *args, **kwargs)
def _deanonymize_obj(self, *args, **kwargs):
purpose_register[self.purpose_slug]().deanonymize_obj(self.source_object, *args, **kwargs)
def expire(self):
"""Anonymize obj and set state as expired."""
with transaction.atomic():
self._anonymize_obj()
self.change_and_save(state=LegalReasonState.EXPIRED)
def deactivate(self):
"""Deactivate obj and run anonymization."""
with transaction.atomic():
self._anonymize_obj()
self.change_and_save(state=LegalReasonState.DEACTIVATED)
def renew(self):
with transaction.atomic():
self.change_and_save(
expires_at=timezone.now() + purpose_register[self.purpose_slug]().expiration_timedelta,
state=LegalReasonState.ACTIVE
)
self._deanonymize_obj()
class LegalReasonRelatedObject(SmartModel):
legal_reason = models.ForeignKey(
LegalReason,
verbose_name=_('legal reason'),
null=False,
blank=False,
related_name='related_objects',
on_delete=models.CASCADE
)
object_content_type = models.ForeignKey(
ContentType,
verbose_name=_('related object content type'),
null=False,
blank=False,
on_delete=models.DO_NOTHING
)
object_id = models.TextField(
verbose_name=_('related object ID'),
null=False,
blank=False,
db_index=True
)
object = GenericForeignKey(
'object_content_type', 'object_id'
)
class Meta:
verbose_name = _('legal reason related object')
verbose_name_plural = _('legal reasons related objects')
ordering = ('-created_at',)
unique_together = ('legal_reason', 'object_content_type', 'object_id')
def __str__(self):
return '{legal_reason} {object}'.format(legal_reason=self.legal_reason, object=self.object)
class AnonymizedDataQuerySet(models.QuerySet):
def filter_source_instance_active(self, source_object):
return self.filter(
content_type=ContentType.objects.get_for_model(source_object.__class__),
object_id=str(source_object.pk),
is_active=True
)
class AnonymizedData(SmartModel):
objects = models.Manager.from_queryset(AnonymizedDataQuerySet)()
field = models.CharField(
verbose_name=_('anonymized field name'),
max_length=250,
null=False,
blank=False
)
content_type = models.ForeignKey(
ContentType,
verbose_name=_('related object content type'),
null=False,
blank=False,
on_delete=models.DO_NOTHING
)
object_id = models.TextField(
verbose_name=_('related object ID'),
null=False,
blank=False
)
object = GenericForeignKey(
'content_type', 'object_id'
)
is_active = models.BooleanField(
verbose_name=_('is active'),
default=True
)
expired_reason = models.ForeignKey(
LegalReason,
verbose_name=_('expired reason'),
null=True,
blank=True,
on_delete=models.SET_NULL
)
class Meta:
verbose_name = _('anonymized data')
verbose_name_plural = _('anonymized data')
ordering = ('-created_at',)
unique_together = ('content_type', 'object_id', 'field')
def __str__(self):
return '{field} {object}'.format(field=self.field, object=self.object)
|
gdpr/models.py
|
from datetime import datetime
from dateutil.relativedelta import relativedelta
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.db import models, transaction
from django.db.models import Q
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from chamber.models import SmartModel
from typing import TYPE_CHECKING, Iterable, Optional, Type
from enumfields import NumEnumField
from .enums import LegalReasonState
from .loading import purpose_register
if TYPE_CHECKING:
from gdpr.purposes.default import AbstractPurpose
class LegalReasonManager(models.Manager):
def create_consent(self, purpose_slug: str, source_object, issued_at: Optional[datetime] = None,
tag: Optional[str] = None, related_objects: Optional[Iterable[Type[models.Model]]] = None):
"""
Create (or update, if it exist) a LegalReason with purpose slug for concrete object instance
Args:
purpose_slug: String of Legal Reason purpose
source_object: Source object this Legal Reason is related to
issued_at: When the Legal Reason consent was given
tag: String that the developer can add to the created consent and use it to mark his business processes
related_objects: Objects this Legal Reason relates to (ie. order, registrations etc.)
Returns:
Legal Reason: LegalReason object
"""
try:
purpose = purpose_register[purpose_slug]
except KeyError:
raise KeyError('Purpose with slug {} does not exits'.format(purpose_slug))
issued_at = issued_at or timezone.now()
legal_reason, created = LegalReason.objects.get_or_create(
source_object_content_type=ContentType.objects.get_for_model(source_object.__class__),
source_object_id=str(source_object.pk),
purpose_slug=purpose_slug,
defaults={
'issued_at': issued_at,
'expires_at': issued_at + purpose.expiration_timedelta,
'tag': tag,
'state': LegalReasonState.ACTIVE,
}
)
if not created:
legal_reason.change_and_save(
expires_at=timezone.now() + purpose.expiration_timedelta,
tag=tag,
state=LegalReasonState.ACTIVE
)
for related_object in related_objects or ():
legal_reason.related_objects.update_or_create(
object_content_type=ContentType.objects.get_for_model(related_object.__class__),
object_id=related_object.pk
)
return legal_reason
def deactivate_consent(self, purpose_slug: str, source_object):
"""
Deactivate/Remove consent (Legal reason) for source_object, purpose_slug combination
Args:
purpose_slug: Purpose slug to deactivate consent for
source_object: Source object to deactivate consent for
Returns:
List of LegalReason objects
"""
reasons = []
for reason in LegalReason.objects.filter_source_instance_active_non_expired_purpose(source_object,
purpose_slug):
reason.deactivate()
reasons.append(reason)
return reasons
def exists_valid_consent(self, purpose_slug: str, source_object):
"""
Returns True if source_object has valid (ie. active and non-expired) consent (Legal Reason)
Args:
purpose_slug: Purpose_slug to check consent for
source_object: Source object to check consent for
"""
return LegalReason.objects.filter_source_instance_active_non_expired_purpose(
source_object, purpose_slug).exists()
def exists_deactivated_consent(self, purpose_slug: str, source_object):
"""
Returns True if source_object has deactivated consent (Legal Reason)
Args:
purpose_slug: Purpose_slug to check consent for
source_object: Source object to check consent for
"""
return self.filter_source_instance(source_object).filter(
state=LegalReasonState.DEACTIVATED,
purpose_slug=purpose_slug
).exists()
def expire_old_consents(self):
"""
Anonymize and expire consents which have past their `expires_at`.
"""
for reason in LegalReason.objects.filter_active_and_expired():
reason.expire()
class LegalReasonQuerySet(models.QuerySet):
def filter_expired_retaining_data_in_last_days(self, days=None):
"""
Filters all Legal Reason that retain data and that expired in last days
Args:
days: Number of days in the past. If not provided, all Legal Reasons retaining data which expired in the
past will be returned.
"""
purpose_slugs_retaining_data = [slug for slug, cls in purpose_register.items() if cls.fields]
filter_keys = {
'expires_at__lt': timezone.now(),
} if days is None else {
'expires_at__gt': timezone.now() - relativedelta(days=days),
'expires_at__lt': timezone.now()
}
return self.filter(state=LegalReasonState.ACTIVE, purpose_slug__in=purpose_slugs_retaining_data, **filter_keys)
def filter_non_expired(self):
return self.filter(Q(expires_at__gte=timezone.now()) | Q(expires_at=None))
def filter_expired(self):
return self.filter(expires_at__lte=timezone.now())
def filter_active(self):
return self.filter(state=LegalReasonState.ACTIVE)
def filter_active_and_non_expired(self):
return self.filter_active().filter_non_expired()
def filter_active_and_expired(self):
return self.filter_active().filter_expired()
def filter_source_instance(self, source_object):
return self.filter(
source_object_content_type=ContentType.objects.get_for_model(source_object.__class__),
source_object_id=str(source_object.pk)
)
def filter_source_instance_active_non_expired(self, source_object):
return self.filter_source_instance(source_object).filter_active_and_non_expired()
def filter_source_instance_active_non_expired_purpose(self, source_object, purpose_slug: str):
return self.filter_source_instance_active_non_expired(source_object).filter(
purpose_slug=purpose_slug
)
class LegalReason(SmartModel):
objects = LegalReasonManager.from_queryset(LegalReasonQuerySet)()
issued_at = models.DateTimeField(
verbose_name=_('issued at'),
null=False,
blank=False,
)
expires_at = models.DateTimeField(
verbose_name=_('expires at'),
null=True,
blank=True,
db_index=True
)
tag = models.CharField(
verbose_name=_('tag'),
null=True,
blank=True,
max_length=100
)
state = NumEnumField(
verbose_name=_('state'),
null=False,
blank=False,
enum=LegalReasonState,
default=LegalReasonState.ACTIVE
)
purpose_slug = models.CharField(
verbose_name=_('purpose'),
null=False,
blank=False,
max_length=100,
db_index=True
)
source_object_content_type = models.ForeignKey(
ContentType,
verbose_name=_('source object content type'),
null=False,
blank=False,
on_delete=models.DO_NOTHING
)
source_object_id = models.TextField(
verbose_name=_('source object ID'),
null=False, blank=False,
db_index=True
)
source_object = GenericForeignKey(
'source_object_content_type', 'source_object_id'
)
class Meta:
verbose_name = _('legal reason')
verbose_name_plural = _('legal reasons')
ordering = ('-created_at',)
unique_together = ('purpose_slug', 'source_object_content_type', 'source_object_id')
def __str__(self):
return f'{self.purpose.name}'
@property
def is_active(self):
return self.state == LegalReasonState.ACTIVE
@property
def purpose(self) -> Type["AbstractPurpose"]:
return purpose_register.get(self.purpose_slug, None)
def _anonymize_obj(self, *args, **kwargs):
purpose_register[self.purpose_slug]().anonymize_obj(self.source_object, self, *args, **kwargs)
def _deanonymize_obj(self, *args, **kwargs):
purpose_register[self.purpose_slug]().deanonymize_obj(self.source_object, *args, **kwargs)
def expire(self):
"""Anonymize obj and set state as expired."""
with transaction.atomic():
self._anonymize_obj()
self.change_and_save(state=LegalReasonState.EXPIRED)
def deactivate(self):
"""Deactivate obj and run anonymization."""
with transaction.atomic():
self._anonymize_obj()
self.change_and_save(state=LegalReasonState.DEACTIVATED)
def renew(self):
with transaction.atomic():
self.change_and_save(
expires_at=timezone.now() + purpose_register[self.purpose_slug]().expiration_timedelta,
state=LegalReasonState.ACTIVE
)
self._deanonymize_obj()
class LegalReasonRelatedObject(SmartModel):
legal_reason = models.ForeignKey(
LegalReason,
verbose_name=_('legal reason'),
null=False,
blank=False,
related_name='related_objects',
on_delete=models.CASCADE
)
object_content_type = models.ForeignKey(
ContentType,
verbose_name=_('related object content type'),
null=False,
blank=False,
on_delete=models.DO_NOTHING
)
object_id = models.TextField(
verbose_name=_('related object ID'),
null=False,
blank=False,
db_index=True
)
object = GenericForeignKey(
'object_content_type', 'object_id'
)
class Meta:
verbose_name = _('legal reason related object')
verbose_name_plural = _('legal reasons related objects')
ordering = ('-created_at',)
unique_together = ('legal_reason', 'object_content_type', 'object_id')
def __str__(self):
return '{legal_reason} {object}'.format(legal_reason=self.legal_reason, object=self.object)
class AnonymizedDataQuerySet(models.QuerySet):
def filter_source_instance_active(self, source_object):
return self.filter(
content_type=ContentType.objects.get_for_model(source_object.__class__),
object_id=str(source_object.pk),
is_active=True
)
class AnonymizedData(SmartModel):
objects = models.Manager.from_queryset(AnonymizedDataQuerySet)()
field = models.CharField(
verbose_name=_('anonymized field name'),
max_length=250,
null=False,
blank=False
)
content_type = models.ForeignKey(
ContentType,
verbose_name=_('related object content type'),
null=False,
blank=False,
on_delete=models.DO_NOTHING
)
object_id = models.TextField(
verbose_name=_('related object ID'),
null=False,
blank=False
)
object = GenericForeignKey(
'content_type', 'object_id'
)
is_active = models.BooleanField(
verbose_name=_('is active'),
default=True
)
expired_reason = models.ForeignKey(
LegalReason,
verbose_name=_('expired reason'),
null=True,
blank=True,
on_delete=models.SET_NULL
)
class Meta:
verbose_name = _('anonymized data')
verbose_name_plural = _('anonymized data')
ordering = ('-created_at',)
unique_together = ('content_type', 'object_id', 'field')
def __str__(self):
return '{field} {object}'.format(field=self.field, object=self.object)
| 0.804905 | 0.131982 |
import os
import glob
from PIL import Image
from resizeimage import resizeimage
import sys
from xml.etree.ElementTree import ElementTree
from xml.etree.ElementTree import Element
import xml.etree.ElementTree as etree
import xml.etree.cElementTree as ET
from yattag import Doc, indent
import shutil
import pandas as pd
from google_images_download import google_images_download
from io import BytesIO
import numpy as np
import tensorflow as tf
import datetime
def size_and_name(root_dir,query,pypath):
i = 1
z = 1
main_dir = root_dir+'/'+'downloads'+'/'+query
for filename in glob.iglob(main_dir + '**/*.jpg', recursive=True):
print(filename)
im = Image.open(filename)
im = im.convert('RGB')
im.save(filename , 'JPEG', quality=90)
for filename in glob.iglob(main_dir + '**/*.png', recursive=True):
print(filename)
im = Image.open(filename)
im = im.convert('RGB')
im.save(filename , 'JPEG', quality=90)
for filename in os.listdir(main_dir):
tst =query + str(i) +'.jpg'
src =main_dir+'/'+filename
tst =main_dir+'/'+tst
os.rename(src, tst)
i = i+1
for filename in glob.iglob(main_dir + '**/*.jpg', recursive=True):
class DeepLabModel(object):
INPUT_TENSOR_NAME = 'ImageTensor:0'
OUTPUT_TENSOR_NAME = 'SemanticPredictions:0'
INPUT_SIZE = 513
FROZEN_GRAPH_NAME = 'frozen_inference_graph'
def __init__(self, tarball_path):
self.graph = tf.Graph()
graph_def = None
graph_def = tf.GraphDef.FromString(open(pypath+"/PSCMR_Tensorflow_object_trainer/"+tarball_path + "/frozen_inference_graph.pb", "rb").read())
if graph_def is None:
raise RuntimeError('Cannot find inference graph in tar archive.')
with self.graph.as_default():
tf.import_graph_def(graph_def, name='')
self.sess = tf.Session(graph=self.graph)
def run(self, image):
start = datetime.datetime.now()
width, height = image.size
resize_ratio = 1.0 * self.INPUT_SIZE / max(width, height)
target_size = (int(resize_ratio * width), int(resize_ratio * height))
resized_image = image.convert('RGB').resize(target_size, Image.ANTIALIAS)
batch_seg_map = self.sess.run(
self.OUTPUT_TENSOR_NAME,
feed_dict={self.INPUT_TENSOR_NAME: [np.asarray(resized_image)]})
seg_map = batch_seg_map[0]
end = datetime.datetime.now()
diff = end - start
print("Time taken to evaluate segmentation is : " + str(diff))
return resized_image, seg_map
def drawSegment(baseImg, matImg):
width, height = baseImg.size
dummyImg = np.zeros([height, width, 4], dtype=np.uint8)
for x in range(width):
for y in range(height):
color = matImg[y,x]
(r,g,b) = baseImg.getpixel((x,y))
if color == 0:
dummyImg[y,x,3] = 0
else :
dummyImg[y,x] = [r,g,b,255]
img = Image.fromarray(dummyImg)
print(filename)
img.mode == 'RGB'
img = img.convert('RGB')
imResize = img.resize((600,600), Image.ANTIALIAS)
imResize.save(filename , 'JPEG', quality=90)
#img.save(outputFilePath)
print(filename)
inputFilePath = filename
outputFilePath = root_dir+"/"+query+str(i)+'.jpg'
i = i + 1
if inputFilePath is None or outputFilePath is None:
print("Bad parameters. Please specify input file path and output file path")
exit()
modelType = "mobile_net_model"
if len(sys.argv) > 3 and sys.argv[3] == "1":
modelType = "xception_model"
MODEL = DeepLabModel(modelType)
print('model loaded successfully : ' + modelType)
def run_visualization(filepath):
try:
print("Trying to open : " )
jpeg_str = open(filepath, "rb").read()
orignal_im = Image.open(BytesIO(jpeg_str))
except IOError:
print('Cannot retrieve image. Please check file: ' + filepath)
return
print('running deeplab on image %s...' % filepath)
resized_im, seg_map = MODEL.run(orignal_im)
drawSegment(resized_im, seg_map)
run_visualization(inputFilePath)
|
size_name_background_removal.py
|
import os
import glob
from PIL import Image
from resizeimage import resizeimage
import sys
from xml.etree.ElementTree import ElementTree
from xml.etree.ElementTree import Element
import xml.etree.ElementTree as etree
import xml.etree.cElementTree as ET
from yattag import Doc, indent
import shutil
import pandas as pd
from google_images_download import google_images_download
from io import BytesIO
import numpy as np
import tensorflow as tf
import datetime
def size_and_name(root_dir,query,pypath):
i = 1
z = 1
main_dir = root_dir+'/'+'downloads'+'/'+query
for filename in glob.iglob(main_dir + '**/*.jpg', recursive=True):
print(filename)
im = Image.open(filename)
im = im.convert('RGB')
im.save(filename , 'JPEG', quality=90)
for filename in glob.iglob(main_dir + '**/*.png', recursive=True):
print(filename)
im = Image.open(filename)
im = im.convert('RGB')
im.save(filename , 'JPEG', quality=90)
for filename in os.listdir(main_dir):
tst =query + str(i) +'.jpg'
src =main_dir+'/'+filename
tst =main_dir+'/'+tst
os.rename(src, tst)
i = i+1
for filename in glob.iglob(main_dir + '**/*.jpg', recursive=True):
class DeepLabModel(object):
INPUT_TENSOR_NAME = 'ImageTensor:0'
OUTPUT_TENSOR_NAME = 'SemanticPredictions:0'
INPUT_SIZE = 513
FROZEN_GRAPH_NAME = 'frozen_inference_graph'
def __init__(self, tarball_path):
self.graph = tf.Graph()
graph_def = None
graph_def = tf.GraphDef.FromString(open(pypath+"/PSCMR_Tensorflow_object_trainer/"+tarball_path + "/frozen_inference_graph.pb", "rb").read())
if graph_def is None:
raise RuntimeError('Cannot find inference graph in tar archive.')
with self.graph.as_default():
tf.import_graph_def(graph_def, name='')
self.sess = tf.Session(graph=self.graph)
def run(self, image):
start = datetime.datetime.now()
width, height = image.size
resize_ratio = 1.0 * self.INPUT_SIZE / max(width, height)
target_size = (int(resize_ratio * width), int(resize_ratio * height))
resized_image = image.convert('RGB').resize(target_size, Image.ANTIALIAS)
batch_seg_map = self.sess.run(
self.OUTPUT_TENSOR_NAME,
feed_dict={self.INPUT_TENSOR_NAME: [np.asarray(resized_image)]})
seg_map = batch_seg_map[0]
end = datetime.datetime.now()
diff = end - start
print("Time taken to evaluate segmentation is : " + str(diff))
return resized_image, seg_map
def drawSegment(baseImg, matImg):
width, height = baseImg.size
dummyImg = np.zeros([height, width, 4], dtype=np.uint8)
for x in range(width):
for y in range(height):
color = matImg[y,x]
(r,g,b) = baseImg.getpixel((x,y))
if color == 0:
dummyImg[y,x,3] = 0
else :
dummyImg[y,x] = [r,g,b,255]
img = Image.fromarray(dummyImg)
print(filename)
img.mode == 'RGB'
img = img.convert('RGB')
imResize = img.resize((600,600), Image.ANTIALIAS)
imResize.save(filename , 'JPEG', quality=90)
#img.save(outputFilePath)
print(filename)
inputFilePath = filename
outputFilePath = root_dir+"/"+query+str(i)+'.jpg'
i = i + 1
if inputFilePath is None or outputFilePath is None:
print("Bad parameters. Please specify input file path and output file path")
exit()
modelType = "mobile_net_model"
if len(sys.argv) > 3 and sys.argv[3] == "1":
modelType = "xception_model"
MODEL = DeepLabModel(modelType)
print('model loaded successfully : ' + modelType)
def run_visualization(filepath):
try:
print("Trying to open : " )
jpeg_str = open(filepath, "rb").read()
orignal_im = Image.open(BytesIO(jpeg_str))
except IOError:
print('Cannot retrieve image. Please check file: ' + filepath)
return
print('running deeplab on image %s...' % filepath)
resized_im, seg_map = MODEL.run(orignal_im)
drawSegment(resized_im, seg_map)
run_visualization(inputFilePath)
| 0.193414 | 0.098296 |
from utils.utils import block_diag, stack_matrices, sum_sparse
from torch.nn.modules.module import Module
from torch.nn.parameter import Parameter
from torch import nn
import math
import torch
class RelationalGraphConvolution(Module):
""" Relational Graph Convolution (RGC) Layer (as described in https://arxiv.org/abs/1703.06103)"""
def __init__(self,
triples=None,
num_nodes=None,
num_relations=None,
in_features=None,
out_features=None,
edge_dropout=None,
edge_dropout_self_loop=None,
bias=True,
decomposition=None,
vertical_stacking=False,
reset_mode='xavier'):
super(RelationalGraphConvolution, self).__init__()
assert (triples is not None or num_nodes is not None or num_relations is not None or out_features is not None), \
"The following must be specified: triples, number of nodes, number of relations and output dimension!"
# If featureless, use number of nodes instead as input dimension
in_dim = in_features if in_features is not None else num_nodes
out_dim = out_features
# Unpack arguments
weight_decomp = decomposition['type'] if decomposition is not None and 'type' in decomposition else None
num_bases = decomposition['num_bases'] if decomposition is not None and 'num_bases' in decomposition else None
num_blocks = decomposition['num_blocks'] if decomposition is not None and 'num_blocks' in decomposition else None
self.triples = triples
self.num_nodes = num_nodes
self.num_relations = num_relations
self.in_features = in_features
self.out_features = out_features
self.weight_decomp = weight_decomp
self.num_bases = num_bases
self.num_blocks = num_blocks
self.vertical_stacking = vertical_stacking
self.edge_dropout = edge_dropout
self.edge_dropout_self_loop = edge_dropout_self_loop
# Instantiate weights
if self.weight_decomp is None:
self.weights = Parameter(torch.FloatTensor(num_relations, in_dim, out_dim))
elif self.weight_decomp == 'basis':
# Weight Regularisation through Basis Decomposition
assert num_bases > 0, \
'Number of bases should be set to higher than zero for basis decomposition!'
self.bases = Parameter(torch.FloatTensor(num_bases, in_dim, out_dim))
self.comps = Parameter(torch.FloatTensor(num_relations, num_bases))
elif self.weight_decomp == 'block':
# Weight Regularisation through Block Diagonal Decomposition
assert self.num_blocks > 0, \
'Number of blocks should be set to a value higher than zero for block diagonal decomposition!'
assert in_dim % self.num_blocks == 0 and out_dim % self.num_blocks == 0,\
f'For block diagonal decomposition, input dimensions ({in_dim}, {out_dim}) must be divisible ' \
f'by number of blocks ({self.num_blocks})'
self.blocks = nn.Parameter(
torch.FloatTensor(num_relations, self.num_blocks, in_dim // self.num_blocks, out_dim // self.num_blocks))
else:
raise NotImplementedError(f'{self.weight_decomp} decomposition has not been implemented')
# Instantiate biases
if bias:
self.bias = Parameter(torch.FloatTensor(out_features))
else:
self.register_parameter('bias', None)
self.reset_parameters(reset_mode)
def reset_parameters(self, reset_mode='xavier'):
""" Initialise biases and weights (xavier or uniform) """
if reset_mode == 'xavier':
if self.weight_decomp == 'block':
nn.init.xavier_uniform_(self.blocks, gain=nn.init.calculate_gain('relu'))
elif self.weight_decomp == 'basis':
nn.init.xavier_uniform_(self.bases, gain=nn.init.calculate_gain('relu'))
nn.init.xavier_uniform_(self.comps, gain=nn.init.calculate_gain('relu'))
else:
nn.init.xavier_uniform_(self.weights, gain=nn.init.calculate_gain('relu'))
if self.bias is not None:
torch.nn.init.zeros_(self.bias)
elif reset_mode == 'uniform':
stdv = 1.0 / math.sqrt(self.weights.size(1))
if self.weight_decomp == 'block':
self.blocks.data.uniform_(-stdv, stdv)
elif self.weight_decomp == 'basis':
self.bases.data.uniform_(-stdv, stdv)
self.comps.data.uniform_(-stdv, stdv)
else:
self.weights.data.uniform_(-stdv, stdv)
if self.bias is not None:
self.bias.data.uniform_(-stdv, stdv)
else:
raise NotImplementedError(f'{reset_mode} parameter initialisation method has not been implemented')
def forward(self, triples, features=None):
""" Perform a single pass of message propagation """
assert (features is None) == (self.in_features is None), \
"Layer has not been properly configured to take in features!"
in_dim = self.in_features if self.in_features is not None else self.num_nodes
# triples = self.triples
out_dim = self.out_features
edge_dropout = self.edge_dropout
weight_decomp = self.weight_decomp
num_nodes = self.num_nodes
num_relations = self.num_relations
vertical_stacking = self.vertical_stacking
# Apply edge dropout
if edge_dropout is not None and self.training:
assert 'general' in edge_dropout and 'self_loop' in edge_dropout, \
'General and self-loop edge dropouts must be specified!'
assert type(edge_dropout['general']) is float and 0.0 <= edge_dropout['general'] <= 1.0, \
"Edge dropout rates must between 0.0 and 1.0!"
general_edo = edge_dropout['general']
self_loop_edo = edge_dropout['self_loop']
triples = drop_edges(triples, num_nodes, general_edo, self_loop_edo)
# Choose weights
if weight_decomp is None:
weights = self.weights
elif weight_decomp == 'basis':
weights = torch.einsum('rb, bio -> rio', self.comps, self.bases)
elif weight_decomp == 'block':
weights = block_diag(self.blocks)
else:
raise NotImplementedError(f'{weight_decomp} decomposition has not been implemented')
# Determine whether to use cuda or not
if weights.is_cuda:
device = 'cuda'
else:
device = 'cpu'
# Stack adjacency matrices (vertically/horizontally)
adj_indices, adj_size = stack_matrices(
triples,
num_nodes,
num_relations,
vertical_stacking=vertical_stacking,
device=device
)
num_triples = adj_indices.size(0)
vals = torch.ones(num_triples, dtype=torch.float, device=device)
# Apply normalisation (vertical-stacking -> row-wise rum & horizontal-stacking -> column-wise sum)
sums = sum_sparse(adj_indices, vals, adj_size, row_normalisation=vertical_stacking, device=device)
if not vertical_stacking:
# Rearrange column-wise normalised value to reflect original order (because of transpose-trick)
n = (len(vals) - num_nodes) // 2
sums = torch.cat([sums[n:2 * n], sums[:n], sums[2 * n:]], dim=0)
vals = vals / sums
# Construct adjacency matrix
if device == 'cuda':
adj = torch.cuda.sparse.FloatTensor(indices=adj_indices.t(), values=vals, size=adj_size)
else:
adj = torch.sparse.FloatTensor(indices=adj_indices.t(), values=vals, size=adj_size)
assert weights.size() == (num_relations, in_dim, out_dim)
if self.in_features is None:
# Featureless
output = torch.mm(adj, weights.view(num_relations * in_dim, out_dim))
elif self.vertical_stacking:
# Adjacency matrix vertically stacked
af = torch.spmm(adj, features)
af = af.view(self.num_relations, self.num_nodes, in_dim)
output = torch.einsum('rio, rni -> no', weights, af)
else:
# Adjacency matrix horizontally stacked
fw = torch.einsum('ni, rio -> rno', features, weights).contiguous()
output = torch.mm(adj, fw.view(self.num_relations * self.num_nodes, out_dim))
assert output.size() == (self.num_nodes, out_dim)
if self.bias is not None:
output = torch.add(output, self.bias)
return output
|
torch_rgvae/layers/RGC_layers.py
|
from utils.utils import block_diag, stack_matrices, sum_sparse
from torch.nn.modules.module import Module
from torch.nn.parameter import Parameter
from torch import nn
import math
import torch
class RelationalGraphConvolution(Module):
""" Relational Graph Convolution (RGC) Layer (as described in https://arxiv.org/abs/1703.06103)"""
def __init__(self,
triples=None,
num_nodes=None,
num_relations=None,
in_features=None,
out_features=None,
edge_dropout=None,
edge_dropout_self_loop=None,
bias=True,
decomposition=None,
vertical_stacking=False,
reset_mode='xavier'):
super(RelationalGraphConvolution, self).__init__()
assert (triples is not None or num_nodes is not None or num_relations is not None or out_features is not None), \
"The following must be specified: triples, number of nodes, number of relations and output dimension!"
# If featureless, use number of nodes instead as input dimension
in_dim = in_features if in_features is not None else num_nodes
out_dim = out_features
# Unpack arguments
weight_decomp = decomposition['type'] if decomposition is not None and 'type' in decomposition else None
num_bases = decomposition['num_bases'] if decomposition is not None and 'num_bases' in decomposition else None
num_blocks = decomposition['num_blocks'] if decomposition is not None and 'num_blocks' in decomposition else None
self.triples = triples
self.num_nodes = num_nodes
self.num_relations = num_relations
self.in_features = in_features
self.out_features = out_features
self.weight_decomp = weight_decomp
self.num_bases = num_bases
self.num_blocks = num_blocks
self.vertical_stacking = vertical_stacking
self.edge_dropout = edge_dropout
self.edge_dropout_self_loop = edge_dropout_self_loop
# Instantiate weights
if self.weight_decomp is None:
self.weights = Parameter(torch.FloatTensor(num_relations, in_dim, out_dim))
elif self.weight_decomp == 'basis':
# Weight Regularisation through Basis Decomposition
assert num_bases > 0, \
'Number of bases should be set to higher than zero for basis decomposition!'
self.bases = Parameter(torch.FloatTensor(num_bases, in_dim, out_dim))
self.comps = Parameter(torch.FloatTensor(num_relations, num_bases))
elif self.weight_decomp == 'block':
# Weight Regularisation through Block Diagonal Decomposition
assert self.num_blocks > 0, \
'Number of blocks should be set to a value higher than zero for block diagonal decomposition!'
assert in_dim % self.num_blocks == 0 and out_dim % self.num_blocks == 0,\
f'For block diagonal decomposition, input dimensions ({in_dim}, {out_dim}) must be divisible ' \
f'by number of blocks ({self.num_blocks})'
self.blocks = nn.Parameter(
torch.FloatTensor(num_relations, self.num_blocks, in_dim // self.num_blocks, out_dim // self.num_blocks))
else:
raise NotImplementedError(f'{self.weight_decomp} decomposition has not been implemented')
# Instantiate biases
if bias:
self.bias = Parameter(torch.FloatTensor(out_features))
else:
self.register_parameter('bias', None)
self.reset_parameters(reset_mode)
def reset_parameters(self, reset_mode='xavier'):
""" Initialise biases and weights (xavier or uniform) """
if reset_mode == 'xavier':
if self.weight_decomp == 'block':
nn.init.xavier_uniform_(self.blocks, gain=nn.init.calculate_gain('relu'))
elif self.weight_decomp == 'basis':
nn.init.xavier_uniform_(self.bases, gain=nn.init.calculate_gain('relu'))
nn.init.xavier_uniform_(self.comps, gain=nn.init.calculate_gain('relu'))
else:
nn.init.xavier_uniform_(self.weights, gain=nn.init.calculate_gain('relu'))
if self.bias is not None:
torch.nn.init.zeros_(self.bias)
elif reset_mode == 'uniform':
stdv = 1.0 / math.sqrt(self.weights.size(1))
if self.weight_decomp == 'block':
self.blocks.data.uniform_(-stdv, stdv)
elif self.weight_decomp == 'basis':
self.bases.data.uniform_(-stdv, stdv)
self.comps.data.uniform_(-stdv, stdv)
else:
self.weights.data.uniform_(-stdv, stdv)
if self.bias is not None:
self.bias.data.uniform_(-stdv, stdv)
else:
raise NotImplementedError(f'{reset_mode} parameter initialisation method has not been implemented')
def forward(self, triples, features=None):
""" Perform a single pass of message propagation """
assert (features is None) == (self.in_features is None), \
"Layer has not been properly configured to take in features!"
in_dim = self.in_features if self.in_features is not None else self.num_nodes
# triples = self.triples
out_dim = self.out_features
edge_dropout = self.edge_dropout
weight_decomp = self.weight_decomp
num_nodes = self.num_nodes
num_relations = self.num_relations
vertical_stacking = self.vertical_stacking
# Apply edge dropout
if edge_dropout is not None and self.training:
assert 'general' in edge_dropout and 'self_loop' in edge_dropout, \
'General and self-loop edge dropouts must be specified!'
assert type(edge_dropout['general']) is float and 0.0 <= edge_dropout['general'] <= 1.0, \
"Edge dropout rates must between 0.0 and 1.0!"
general_edo = edge_dropout['general']
self_loop_edo = edge_dropout['self_loop']
triples = drop_edges(triples, num_nodes, general_edo, self_loop_edo)
# Choose weights
if weight_decomp is None:
weights = self.weights
elif weight_decomp == 'basis':
weights = torch.einsum('rb, bio -> rio', self.comps, self.bases)
elif weight_decomp == 'block':
weights = block_diag(self.blocks)
else:
raise NotImplementedError(f'{weight_decomp} decomposition has not been implemented')
# Determine whether to use cuda or not
if weights.is_cuda:
device = 'cuda'
else:
device = 'cpu'
# Stack adjacency matrices (vertically/horizontally)
adj_indices, adj_size = stack_matrices(
triples,
num_nodes,
num_relations,
vertical_stacking=vertical_stacking,
device=device
)
num_triples = adj_indices.size(0)
vals = torch.ones(num_triples, dtype=torch.float, device=device)
# Apply normalisation (vertical-stacking -> row-wise rum & horizontal-stacking -> column-wise sum)
sums = sum_sparse(adj_indices, vals, adj_size, row_normalisation=vertical_stacking, device=device)
if not vertical_stacking:
# Rearrange column-wise normalised value to reflect original order (because of transpose-trick)
n = (len(vals) - num_nodes) // 2
sums = torch.cat([sums[n:2 * n], sums[:n], sums[2 * n:]], dim=0)
vals = vals / sums
# Construct adjacency matrix
if device == 'cuda':
adj = torch.cuda.sparse.FloatTensor(indices=adj_indices.t(), values=vals, size=adj_size)
else:
adj = torch.sparse.FloatTensor(indices=adj_indices.t(), values=vals, size=adj_size)
assert weights.size() == (num_relations, in_dim, out_dim)
if self.in_features is None:
# Featureless
output = torch.mm(adj, weights.view(num_relations * in_dim, out_dim))
elif self.vertical_stacking:
# Adjacency matrix vertically stacked
af = torch.spmm(adj, features)
af = af.view(self.num_relations, self.num_nodes, in_dim)
output = torch.einsum('rio, rni -> no', weights, af)
else:
# Adjacency matrix horizontally stacked
fw = torch.einsum('ni, rio -> rno', features, weights).contiguous()
output = torch.mm(adj, fw.view(self.num_relations * self.num_nodes, out_dim))
assert output.size() == (self.num_nodes, out_dim)
if self.bias is not None:
output = torch.add(output, self.bias)
return output
| 0.917052 | 0.622517 |
import logging
from rackattack.physical import logconfig
from rackattack.ssh import connection
connection.discardParamikoLogs()
connection.discardSSHDebugMessages()
logging.getLogger("pika").setLevel(logging.INFO)
import time
import argparse
from rackattack.physical import config
from rackattack.physical import network
from rackattack.physical import dynamicconfig
import rackattack.virtual.handlekill
from rackattack.common import dnsmasq
from rackattack.common import globallock
from rackattack.common import tftpboot
from rackattack.common import inaugurate
from rackattack.common import timer
from rackattack.common import hosts
from rackattack.physical.alloc import freepool
from rackattack.physical.alloc import allocations
from rackattack.physical import ipcserver
from rackattack.tcp import publish
from rackattack.tcp import transportserver
from twisted.internet import reactor
from twisted.web import server
from twisted.python import log
from rackattack.common import httprootresource
import inaugurator.server.config
import yaml
import sys
parser = argparse.ArgumentParser()
parser.add_argument("--requestPort", default=1014, type=int)
parser.add_argument("--subscribePort", default=1015, type=int)
parser.add_argument("--httpPort", default=1016, type=int)
parser.add_argument("--rackYAML")
parser.add_argument("--serialLogsDirectory")
parser.add_argument("--managedPostMortemPacksDirectory")
parser.add_argument("--configurationFile")
args = parser.parse_args()
if args.rackYAML:
config.RACK_YAML = args.rackYAML
if args.serialLogsDirectory:
config.SERIAL_LOGS_DIRECTORY = args.serialLogsDirectory
if args.configurationFile:
config.CONFIGURATION_FILE = args.configurationFile
if args.managedPostMortemPacksDirectory:
config.MANAGED_POST_MORTEM_PACKS_DIRECTORY = args.managedPostMortemPacksDirectory
with open(config.CONFIGURATION_FILE) as f:
conf = yaml.load(f.read())
network.setGatewayIP(conf['GATEWAY_IP'])
network.setUpStaticPortForwardingForSSH(conf['PUBLIC_INTERFACE'])
timer.TimersThread()
tftpbootInstance = tftpboot.TFTPBoot(
netmask=network.NETMASK,
inauguratorServerIP=network.BOOTSERVER_IP_ADDRESS,
inauguratorServerPort=inaugurator.server.config.PORT,
inauguratorGatewayIP=network.GATEWAY_IP_ADDRESS,
osmosisServerIP=conf['OSMOSIS_SERVER_IP'],
rootPassword=config.ROOT_PASSWORD,
withLocalObjectStore=True)
dnsmasq.DNSMasq.eraseLeasesFile()
dnsmasq.DNSMasq.killAllPrevious()
dnsmasqInstance = dnsmasq.DNSMasq(
tftpboot=tftpbootInstance,
serverIP=network.BOOTSERVER_IP_ADDRESS,
netmask=network.NETMASK,
firstIP=network.FIRST_IP,
lastIP=network.LAST_IP,
gateway=network.GATEWAY_IP_ADDRESS,
nameserver=network.BOOTSERVER_IP_ADDRESS)
inaugurateInstance = inaugurate.Inaugurate(config.RABBIT_MQ_DIRECTORY)
publishInstance = publish.Publish("ampq://localhost:%d/%%2F" % inaugurator.server.config.PORT)
hostsInstance = hosts.Hosts()
freePool = freepool.FreePool(hostsInstance)
allocationsInstance = allocations.Allocations(
broadcaster=publishInstance, hosts=hostsInstance, freePool=freePool,
osmosisServer=conf['OSMOSIS_SERVER_IP'])
dynamicConfig = dynamicconfig.DynamicConfig(
hosts=hostsInstance,
dnsmasq=dnsmasqInstance,
inaugurate=inaugurateInstance,
tftpboot=tftpbootInstance,
freePool=freePool,
allocations=allocationsInstance)
ipcServer = ipcserver.IPCServer(
publicNATIP=conf['PUBLIC_NAT_IP'],
osmosisServerIP=conf['OSMOSIS_SERVER_IP'],
dnsmasq=dnsmasqInstance,
allocations=allocationsInstance,
hosts=hostsInstance,
dynamicConfig=dynamicConfig)
def serialLogFilename(vmID):
with globallock.lock():
return hostsInstance.byID(vmID).hostImplementation().serialLogFilename()
def createPostMortemPackForAllocationID(allocationID):
with globallock.lock():
return allocationsInstance.byIndex(int(allocationID)).createPostMortemPack()
log.startLogging(sys.stderr)
root = httprootresource.HTTPRootResource(
serialLogFilename, createPostMortemPackForAllocationID,
config.MANAGED_POST_MORTEM_PACKS_DIRECTORY)
reactor.listenTCP(args.httpPort, server.Site(root))
reactor.listenTCP(args.requestPort, transportserver.TransportFactory(ipcServer.handle))
logging.info("Physical RackAttack up and running")
reactor.run()
|
rackattack/physical/main.py
|
import logging
from rackattack.physical import logconfig
from rackattack.ssh import connection
connection.discardParamikoLogs()
connection.discardSSHDebugMessages()
logging.getLogger("pika").setLevel(logging.INFO)
import time
import argparse
from rackattack.physical import config
from rackattack.physical import network
from rackattack.physical import dynamicconfig
import rackattack.virtual.handlekill
from rackattack.common import dnsmasq
from rackattack.common import globallock
from rackattack.common import tftpboot
from rackattack.common import inaugurate
from rackattack.common import timer
from rackattack.common import hosts
from rackattack.physical.alloc import freepool
from rackattack.physical.alloc import allocations
from rackattack.physical import ipcserver
from rackattack.tcp import publish
from rackattack.tcp import transportserver
from twisted.internet import reactor
from twisted.web import server
from twisted.python import log
from rackattack.common import httprootresource
import inaugurator.server.config
import yaml
import sys
parser = argparse.ArgumentParser()
parser.add_argument("--requestPort", default=1014, type=int)
parser.add_argument("--subscribePort", default=1015, type=int)
parser.add_argument("--httpPort", default=1016, type=int)
parser.add_argument("--rackYAML")
parser.add_argument("--serialLogsDirectory")
parser.add_argument("--managedPostMortemPacksDirectory")
parser.add_argument("--configurationFile")
args = parser.parse_args()
if args.rackYAML:
config.RACK_YAML = args.rackYAML
if args.serialLogsDirectory:
config.SERIAL_LOGS_DIRECTORY = args.serialLogsDirectory
if args.configurationFile:
config.CONFIGURATION_FILE = args.configurationFile
if args.managedPostMortemPacksDirectory:
config.MANAGED_POST_MORTEM_PACKS_DIRECTORY = args.managedPostMortemPacksDirectory
with open(config.CONFIGURATION_FILE) as f:
conf = yaml.load(f.read())
network.setGatewayIP(conf['GATEWAY_IP'])
network.setUpStaticPortForwardingForSSH(conf['PUBLIC_INTERFACE'])
timer.TimersThread()
tftpbootInstance = tftpboot.TFTPBoot(
netmask=network.NETMASK,
inauguratorServerIP=network.BOOTSERVER_IP_ADDRESS,
inauguratorServerPort=inaugurator.server.config.PORT,
inauguratorGatewayIP=network.GATEWAY_IP_ADDRESS,
osmosisServerIP=conf['OSMOSIS_SERVER_IP'],
rootPassword=config.ROOT_PASSWORD,
withLocalObjectStore=True)
dnsmasq.DNSMasq.eraseLeasesFile()
dnsmasq.DNSMasq.killAllPrevious()
dnsmasqInstance = dnsmasq.DNSMasq(
tftpboot=tftpbootInstance,
serverIP=network.BOOTSERVER_IP_ADDRESS,
netmask=network.NETMASK,
firstIP=network.FIRST_IP,
lastIP=network.LAST_IP,
gateway=network.GATEWAY_IP_ADDRESS,
nameserver=network.BOOTSERVER_IP_ADDRESS)
inaugurateInstance = inaugurate.Inaugurate(config.RABBIT_MQ_DIRECTORY)
publishInstance = publish.Publish("ampq://localhost:%d/%%2F" % inaugurator.server.config.PORT)
hostsInstance = hosts.Hosts()
freePool = freepool.FreePool(hostsInstance)
allocationsInstance = allocations.Allocations(
broadcaster=publishInstance, hosts=hostsInstance, freePool=freePool,
osmosisServer=conf['OSMOSIS_SERVER_IP'])
dynamicConfig = dynamicconfig.DynamicConfig(
hosts=hostsInstance,
dnsmasq=dnsmasqInstance,
inaugurate=inaugurateInstance,
tftpboot=tftpbootInstance,
freePool=freePool,
allocations=allocationsInstance)
ipcServer = ipcserver.IPCServer(
publicNATIP=conf['PUBLIC_NAT_IP'],
osmosisServerIP=conf['OSMOSIS_SERVER_IP'],
dnsmasq=dnsmasqInstance,
allocations=allocationsInstance,
hosts=hostsInstance,
dynamicConfig=dynamicConfig)
def serialLogFilename(vmID):
with globallock.lock():
return hostsInstance.byID(vmID).hostImplementation().serialLogFilename()
def createPostMortemPackForAllocationID(allocationID):
with globallock.lock():
return allocationsInstance.byIndex(int(allocationID)).createPostMortemPack()
log.startLogging(sys.stderr)
root = httprootresource.HTTPRootResource(
serialLogFilename, createPostMortemPackForAllocationID,
config.MANAGED_POST_MORTEM_PACKS_DIRECTORY)
reactor.listenTCP(args.httpPort, server.Site(root))
reactor.listenTCP(args.requestPort, transportserver.TransportFactory(ipcServer.handle))
logging.info("Physical RackAttack up and running")
reactor.run()
| 0.32306 | 0.045058 |
import datetime
STRING_UNAVAILABLE = "spaceapi: N/A"
class Py3status:
"""
"""
# available configuration parameters
button_url = 3
cache_timeout = 60
format = "{state}[ {lastchanged}]"
format_lastchanged = "since %H:%M"
state_closed = "closed"
state_open = "open"
url = "https://status.chaospott.de/status.json"
class Meta:
deprecated = {
"rename": [
{
"param": "open_color",
"new": "color_open",
"msg": "obsolete parameter use `color_open`",
},
{
"param": "closed_color",
"new": "color_closed",
"msg": "obsolete parameter use `color_closed`",
},
{
"param": "closed_text",
"new": "state_closed",
"msg": "obsolete parameter use `state_closed`",
},
{
"param": "open_text",
"new": "state_open",
"msg": "obsolete parameter use `state_open`",
},
{
"param": "time_text",
"new": "format_lastchanged",
"msg": "obsolete parameter use `format_lastchanged`",
},
]
}
def post_config_hook(self):
self.button_refresh = 2
self.color_open = self.py3.COLOR_OPEN or self.py3.COLOR_GOOD
self.color_closed = self.py3.COLOR_CLOSED or self.py3.COLOR_BAD
def spaceapi(self):
color = self.color_closed
state = self.state_closed
lastchanged = "unknown"
try:
data = self.py3.request(self.url).json()
self._url = data.get("url")
if data["state"]["open"]:
color = self.color_open
state = self.state_open
if "lastchange" in data["state"].keys():
try:
dt = datetime.datetime.fromtimestamp(data["state"]["lastchange"])
lastchanged = dt.strftime(self.format_lastchanged)
except TypeError:
pass
full_text = self.py3.safe_format(
self.format, {"state": state, "lastchanged": lastchanged}
)
except (self.py3.RequestException, KeyError):
full_text = STRING_UNAVAILABLE
return {
"cached_until": self.py3.time_in(self.cache_timeout),
"full_text": full_text,
"color": color,
}
def on_click(self, event):
button = event["button"]
if self._url and self.button_url == button:
self.py3.command_run("xdg-open {}".format(self._url))
self.py3.prevent_refresh()
elif button != self.button_refresh:
self.py3.prevent_refresh()
if __name__ == "__main__":
"""
Run module in test mode.
"""
from py3status.module_test import module_test
module_test(Py3status)
|
py3status/modules/spaceapi.py
|
import datetime
STRING_UNAVAILABLE = "spaceapi: N/A"
class Py3status:
"""
"""
# available configuration parameters
button_url = 3
cache_timeout = 60
format = "{state}[ {lastchanged}]"
format_lastchanged = "since %H:%M"
state_closed = "closed"
state_open = "open"
url = "https://status.chaospott.de/status.json"
class Meta:
deprecated = {
"rename": [
{
"param": "open_color",
"new": "color_open",
"msg": "obsolete parameter use `color_open`",
},
{
"param": "closed_color",
"new": "color_closed",
"msg": "obsolete parameter use `color_closed`",
},
{
"param": "closed_text",
"new": "state_closed",
"msg": "obsolete parameter use `state_closed`",
},
{
"param": "open_text",
"new": "state_open",
"msg": "obsolete parameter use `state_open`",
},
{
"param": "time_text",
"new": "format_lastchanged",
"msg": "obsolete parameter use `format_lastchanged`",
},
]
}
def post_config_hook(self):
self.button_refresh = 2
self.color_open = self.py3.COLOR_OPEN or self.py3.COLOR_GOOD
self.color_closed = self.py3.COLOR_CLOSED or self.py3.COLOR_BAD
def spaceapi(self):
color = self.color_closed
state = self.state_closed
lastchanged = "unknown"
try:
data = self.py3.request(self.url).json()
self._url = data.get("url")
if data["state"]["open"]:
color = self.color_open
state = self.state_open
if "lastchange" in data["state"].keys():
try:
dt = datetime.datetime.fromtimestamp(data["state"]["lastchange"])
lastchanged = dt.strftime(self.format_lastchanged)
except TypeError:
pass
full_text = self.py3.safe_format(
self.format, {"state": state, "lastchanged": lastchanged}
)
except (self.py3.RequestException, KeyError):
full_text = STRING_UNAVAILABLE
return {
"cached_until": self.py3.time_in(self.cache_timeout),
"full_text": full_text,
"color": color,
}
def on_click(self, event):
button = event["button"]
if self._url and self.button_url == button:
self.py3.command_run("xdg-open {}".format(self._url))
self.py3.prevent_refresh()
elif button != self.button_refresh:
self.py3.prevent_refresh()
if __name__ == "__main__":
"""
Run module in test mode.
"""
from py3status.module_test import module_test
module_test(Py3status)
| 0.426799 | 0.278045 |
from odoo import models, fields, api, tools, _
from odoo.http import request, content_disposition
from odoo.addons.hs_query.libs.query_libs import query_and_count_data, get_query_statement_by_code
import json
import traceback
ERROR_NO_STATEMENT_CODE = u"数据库查询代码[ %s ]不存在, 请联系管理员!!"
ERROR_SQL_QUERY = u"数据库查询异常, 请联系管理员!!<br/><br/> %s"
class QueryAdapter(models.TransientModel):
_name = 'hs.query.adapter'
def query_data(self, *args, **kwargs):
# 接口传值
query_condition = request.jsonrequest['context'].get('query_condition', {}) or {}
_statement_code = request.jsonrequest['context']['_statement_code']
query = get_query_statement_by_code(request.env, _statement_code)
if not query:
return {
'error': 1,
'msg': ERROR_NO_STATEMENT_CODE % _statement_code,
}
sql = query.statement or ''
wizard_name = query.wizard_name or ''
page = request.jsonrequest['page'] # 页码
page_size = request.jsonrequest['pagesize'] # 每页显示数量
# try_catch
try:
sql = request.env[wizard_name].format_sql_by_condition(sql, query_condition)
data = query_and_count_data(self.env, sql, page, page_size, query.get_columns())
except Exception, e:
print(traceback.format_exc())
data = {'error': 1, 'msg': ERROR_SQL_QUERY % str(e)}
return data
def query_download(self, statement_code):
wizard = self.env['query.select.wizard.parent']
download_data = wizard._generate_download_data(statement_code=statement_code)
xls_name = download_data['xls_name']
base_data = download_data['base_data']
query_id = download_data['query_id']
wizard.create_download_file(xls_name, base_data, query_id)
return request.make_response(
base_data,
headers=[
('Content-Disposition', content_disposition(xls_name)),
('Content-Type', 'application/octet-stream')],
)
|
hs_query/models/query_adapter.py
|
from odoo import models, fields, api, tools, _
from odoo.http import request, content_disposition
from odoo.addons.hs_query.libs.query_libs import query_and_count_data, get_query_statement_by_code
import json
import traceback
ERROR_NO_STATEMENT_CODE = u"数据库查询代码[ %s ]不存在, 请联系管理员!!"
ERROR_SQL_QUERY = u"数据库查询异常, 请联系管理员!!<br/><br/> %s"
class QueryAdapter(models.TransientModel):
_name = 'hs.query.adapter'
def query_data(self, *args, **kwargs):
# 接口传值
query_condition = request.jsonrequest['context'].get('query_condition', {}) or {}
_statement_code = request.jsonrequest['context']['_statement_code']
query = get_query_statement_by_code(request.env, _statement_code)
if not query:
return {
'error': 1,
'msg': ERROR_NO_STATEMENT_CODE % _statement_code,
}
sql = query.statement or ''
wizard_name = query.wizard_name or ''
page = request.jsonrequest['page'] # 页码
page_size = request.jsonrequest['pagesize'] # 每页显示数量
# try_catch
try:
sql = request.env[wizard_name].format_sql_by_condition(sql, query_condition)
data = query_and_count_data(self.env, sql, page, page_size, query.get_columns())
except Exception, e:
print(traceback.format_exc())
data = {'error': 1, 'msg': ERROR_SQL_QUERY % str(e)}
return data
def query_download(self, statement_code):
wizard = self.env['query.select.wizard.parent']
download_data = wizard._generate_download_data(statement_code=statement_code)
xls_name = download_data['xls_name']
base_data = download_data['base_data']
query_id = download_data['query_id']
wizard.create_download_file(xls_name, base_data, query_id)
return request.make_response(
base_data,
headers=[
('Content-Disposition', content_disposition(xls_name)),
('Content-Type', 'application/octet-stream')],
)
| 0.283285 | 0.087525 |
import atexit
import os
import random
import time
import traceback
from mpire.pool import WorkerPool
import pickle
from ditk import logging
import tempfile
import socket
from os import path
from typing import Callable, Dict, List, Optional, Tuple, Union, Set
from threading import Thread
from ding.framework.event_loop import EventLoop
from ding.utils.design_helper import SingletonMetaclass
from ding.framework.message_queue import *
from ding.utils.registry_factory import MQ_REGISTRY
# Avoid ipc address conflict, random should always use random seed
random = random.Random()
class Parallel(metaclass=SingletonMetaclass):
def __init__(self) -> None:
# Init will only be called once in a process
self._listener = None
self.is_active = False
self.node_id = None
self.labels = set()
self._event_loop = EventLoop("parallel_{}".format(id(self)))
self._retries = 0 # Retries in auto recovery
def _run(
self,
node_id: int,
labels: Optional[Set[str]] = None,
auto_recover: bool = False,
max_retries: int = float("inf"),
mq_type: str = "nng",
**kwargs
) -> None:
self.node_id = node_id
self.labels = labels or set()
self.auto_recover = auto_recover
self.max_retries = max_retries
self._mq = MQ_REGISTRY.get(mq_type)(**kwargs)
self._listener = Thread(target=self.listen, name="mq_listener", daemon=True)
self._listener.start()
@classmethod
def runner(
cls,
n_parallel_workers: int,
mq_type: str = "nng",
attach_to: Optional[List[str]] = None,
protocol: str = "ipc",
address: Optional[str] = None,
ports: Optional[Union[List[int], int]] = None,
topology: str = "mesh",
labels: Optional[Set[str]] = None,
node_ids: Optional[Union[List[int], int]] = None,
auto_recover: bool = False,
max_retries: int = float("inf"),
redis_host: Optional[str] = None,
redis_port: Optional[int] = None
) -> Callable:
"""
Overview:
This method allows you to configure parallel parameters, and now you are still in the parent process.
Arguments:
- n_parallel_workers (:obj:`int`): Workers to spawn.
- mq_type (:obj:`str`): Embedded message queue type, i.e. nng, redis.
- attach_to (:obj:`Optional[List[str]]`): The node's addresses you want to attach to.
- protocol (:obj:`str`): Network protocol.
- address (:obj:`Optional[str]`): Bind address, ip or file path.
- ports (:obj:`Optional[List[int]]`): Candidate ports.
- topology (:obj:`str`): Network topology, includes:
`mesh` (default): fully connected between each other;
`star`: only connect to the first node;
`alone`: do not connect to any node, except the node attached to;
- labels (:obj:`Optional[Set[str]]`): Labels.
- node_ids (:obj:`Optional[List[int]]`): Candidate node ids.
- auto_recover (:obj:`bool`): Auto recover from uncaught exceptions from main.
- max_retries (:obj:`int`): Max retries for auto recover.
- redis_host (:obj:`str`): Redis server host.
- redis_port (:obj:`int`): Redis server port.
Returns:
- _runner (:obj:`Callable`): The wrapper function for main.
"""
all_args = locals()
del all_args["cls"]
args_parsers = {"nng": cls._nng_args_parser, "redis": cls._redis_args_parser}
assert n_parallel_workers > 0, "Parallel worker number should bigger than 0"
def _runner(main_process: Callable, *args, **kwargs) -> None:
"""
Overview:
Prepare to run in subprocess.
Arguments:
- main_process (:obj:`Callable`): The main function, your program start from here.
"""
runner_params = args_parsers[mq_type](**all_args)
params_group = [[runner_kwargs, (main_process, args, kwargs)] for runner_kwargs in runner_params]
if n_parallel_workers == 1:
cls._subprocess_runner(*params_group[0])
else:
with WorkerPool(n_jobs=n_parallel_workers, start_method="spawn", daemon=False) as pool:
# Cleanup the pool just in case the program crashes.
atexit.register(pool.__exit__)
pool.map(cls._subprocess_runner, params_group)
return _runner
@classmethod
def _nng_args_parser(
cls,
n_parallel_workers: int,
attach_to: Optional[List[str]] = None,
protocol: str = "ipc",
address: Optional[str] = None,
ports: Optional[Union[List[int], int]] = None,
topology: str = "mesh",
node_ids: Optional[Union[List[int], int]] = None,
**kwargs
) -> Dict[str, dict]:
attach_to = attach_to or []
nodes = cls.get_node_addrs(n_parallel_workers, protocol=protocol, address=address, ports=ports)
logging.info("Bind subprocesses on these addresses: {}".format(nodes))
def cleanup_nodes():
for node in nodes:
protocol, file_path = node.split("://")
if protocol == "ipc" and path.exists(file_path):
os.remove(file_path)
atexit.register(cleanup_nodes)
def topology_network(i: int) -> List[str]:
if topology == "mesh":
return nodes[:i] + attach_to
elif topology == "star":
return nodes[:min(1, i)] + attach_to
elif topology == "alone":
return attach_to
else:
raise ValueError("Unknown topology: {}".format(topology))
runner_params = []
candidate_node_ids = cls.padding_param(node_ids, n_parallel_workers, 0)
for i in range(n_parallel_workers):
runner_kwargs = {
**kwargs,
"node_id": candidate_node_ids[i],
"listen_to": nodes[i],
"attach_to": topology_network(i),
}
runner_params.append(runner_kwargs)
return runner_params
@classmethod
def _redis_args_parser(cls, n_parallel_workers: int, node_ids: Optional[Union[List[int], int]] = None, **kwargs):
runner_params = []
candidate_node_ids = cls.padding_param(node_ids, n_parallel_workers, 0)
for i in range(n_parallel_workers):
runner_kwargs = {**kwargs, "node_id": candidate_node_ids[i]}
runner_params.append(runner_kwargs)
return runner_params
@classmethod
def _subprocess_runner(cls, runner_kwargs: dict, main_params: Tuple[Union[List, Dict]]) -> None:
"""
Overview:
Really run in subprocess.
Arguments:
- runner_params (:obj:`Tuple[Union[List, Dict]]`): Args and kwargs for runner.
- main_params (:obj:`Tuple[Union[List, Dict]]`): Args and kwargs for main function.
"""
main_process, args, kwargs = main_params
with Parallel() as router:
router.is_active = True
router._run(**runner_kwargs)
time.sleep(0.3) # Waiting for network pairing
router._supervised_runner(main_process, *args, **kwargs)
def _supervised_runner(self, main: Callable, *args, **kwargs) -> None:
"""
Overview:
Run in supervised mode.
Arguments:
- main (:obj:`Callable`): Main function.
"""
if self.auto_recover:
while True:
try:
main(*args, **kwargs)
break
except Exception as e:
if self._retries < self.max_retries:
logging.warning(
"Auto recover from exception: {}, node: {}, retries: {}".format(
e, self.node_id, self._retries
)
)
logging.warning(traceback.format_exc())
self._retries += 1
else:
logging.warning(
"Exceed the max retries, node: {}, retries: {}, max_retries: {}".format(
self.node_id, self._retries, self.max_retries
)
)
raise e
else:
main(*args, **kwargs)
@classmethod
def get_node_addrs(
cls,
n_workers: int,
protocol: str = "ipc",
address: Optional[str] = None,
ports: Optional[Union[List[int], int]] = None
) -> None:
if protocol == "ipc":
node_name = "".join(random.choices("abcdefghijklmnopqrstuvwxyz0123456789", k=4))
tmp_dir = tempfile.gettempdir()
nodes = ["ipc://{}/ditask_{}_{}.ipc".format(tmp_dir, node_name, i) for i in range(n_workers)]
elif protocol == "tcp":
address = address or cls.get_ip()
ports = cls.padding_param(ports, n_workers, 50515)
assert len(ports) == n_workers, "The number of ports must be the same as the number of workers, \
now there are {} ports and {} workers".format(len(ports), n_workers)
nodes = ["tcp://{}:{}".format(address, port) for port in ports]
else:
raise Exception("Unknown protocol {}".format(protocol))
return nodes
@classmethod
def padding_param(cls, int_or_list: Optional[Union[List[int], int]], n_max: int, start_value: int) -> List[int]:
"""
Overview:
Padding int or list param to the length of n_max.
Arguments:
- int_or_list (:obj:`Optional[Union[List[int], int]]`): Int or list typed value.
- n_max (:obj:`int`): Max length.
- start_value (:obj:`int`): Start from value.
"""
param = int_or_list
if isinstance(param, List) and len(param) == 1:
param = param[0] # List with only 1 element is equal to int
if isinstance(param, int):
param = range(param, param + n_max)
else:
param = param or range(start_value, start_value + n_max)
return param
def listen(self):
self._mq.listen()
while True:
msg = self._mq.recv()
# msg is none means that the message queue is no longer being listened to,
# especially if the message queue is already closed
if not msg:
break
topic, msg = msg
self._handle_message(topic, msg)
def on(self, event: str, fn: Callable) -> None:
"""
Overview:
Register an remote event on parallel instance, this function will be executed \
when a remote process emit this event via network.
Arguments:
- event (:obj:`str`): Event name.
- fn (:obj:`Callable`): Function body.
"""
if self.is_active:
self._mq.subscribe(event)
self._event_loop.on(event, fn)
def once(self, event: str, fn: Callable) -> None:
"""
Overview:
Register an remote event which will only call once on parallel instance,
this function will be executed when a remote process emit this event via network.
Arguments:
- event (:obj:`str`): Event name.
- fn (:obj:`Callable`): Function body.
"""
if self.is_active:
self._mq.subscribe(event)
self._event_loop.once(event, fn)
def off(self, event: str) -> None:
"""
Overview:
Unregister an event.
Arguments:
- event (:obj:`str`): Event name.
"""
if self.is_active:
self._mq.unsubscribe(event)
self._event_loop.off(event)
def emit(self, event: str, *args, **kwargs) -> None:
"""
Overview:
Send an remote event via network to subscribed processes.
Arguments:
- event (:obj:`str`): Event name.
"""
if self.is_active:
payload = {"a": args, "k": kwargs}
try:
data = pickle.dumps(payload, protocol=-1)
except AttributeError as e:
logging.error("Arguments are not pickable! Event: {}, Args: {}".format(event, args))
raise e
self._mq.publish(event, data)
def _handle_message(self, topic: str, msg: bytes) -> None:
"""
Overview:
Recv and parse payload from other processes, and call local functions.
Arguments:
- topic (:obj:`str`): Recevied topic.
- msg (:obj:`bytes`): Recevied message.
"""
event = topic
if not self._event_loop.listened(event):
logging.debug("Event {} was not listened in parallel {}".format(event, self.node_id))
return
try:
payload = pickle.loads(msg)
except Exception as e:
logging.error("Error when unpacking message on node {}, msg: {}".format(self.node_id, e))
return
self._event_loop.emit(event, *payload["a"], **payload["k"])
@classmethod
def get_ip(cls):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
# doesn't even have to be reachable
s.connect(('10.255.255.255', 1))
IP = s.getsockname()[0]
except Exception:
IP = '127.0.0.1'
finally:
s.close()
return IP
def __enter__(self) -> "Parallel":
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.stop()
def stop(self):
logging.info("Stopping parallel worker on node: {}".format(self.node_id))
self.is_active = False
time.sleep(0.03)
if self._mq:
self._mq.stop()
self._mq = None
if self._listener:
self._listener.join(timeout=1)
self._listener = None
self._event_loop.stop()
|
ding/framework/parallel.py
|
import atexit
import os
import random
import time
import traceback
from mpire.pool import WorkerPool
import pickle
from ditk import logging
import tempfile
import socket
from os import path
from typing import Callable, Dict, List, Optional, Tuple, Union, Set
from threading import Thread
from ding.framework.event_loop import EventLoop
from ding.utils.design_helper import SingletonMetaclass
from ding.framework.message_queue import *
from ding.utils.registry_factory import MQ_REGISTRY
# Avoid ipc address conflict, random should always use random seed
random = random.Random()
class Parallel(metaclass=SingletonMetaclass):
def __init__(self) -> None:
# Init will only be called once in a process
self._listener = None
self.is_active = False
self.node_id = None
self.labels = set()
self._event_loop = EventLoop("parallel_{}".format(id(self)))
self._retries = 0 # Retries in auto recovery
def _run(
self,
node_id: int,
labels: Optional[Set[str]] = None,
auto_recover: bool = False,
max_retries: int = float("inf"),
mq_type: str = "nng",
**kwargs
) -> None:
self.node_id = node_id
self.labels = labels or set()
self.auto_recover = auto_recover
self.max_retries = max_retries
self._mq = MQ_REGISTRY.get(mq_type)(**kwargs)
self._listener = Thread(target=self.listen, name="mq_listener", daemon=True)
self._listener.start()
@classmethod
def runner(
cls,
n_parallel_workers: int,
mq_type: str = "nng",
attach_to: Optional[List[str]] = None,
protocol: str = "ipc",
address: Optional[str] = None,
ports: Optional[Union[List[int], int]] = None,
topology: str = "mesh",
labels: Optional[Set[str]] = None,
node_ids: Optional[Union[List[int], int]] = None,
auto_recover: bool = False,
max_retries: int = float("inf"),
redis_host: Optional[str] = None,
redis_port: Optional[int] = None
) -> Callable:
"""
Overview:
This method allows you to configure parallel parameters, and now you are still in the parent process.
Arguments:
- n_parallel_workers (:obj:`int`): Workers to spawn.
- mq_type (:obj:`str`): Embedded message queue type, i.e. nng, redis.
- attach_to (:obj:`Optional[List[str]]`): The node's addresses you want to attach to.
- protocol (:obj:`str`): Network protocol.
- address (:obj:`Optional[str]`): Bind address, ip or file path.
- ports (:obj:`Optional[List[int]]`): Candidate ports.
- topology (:obj:`str`): Network topology, includes:
`mesh` (default): fully connected between each other;
`star`: only connect to the first node;
`alone`: do not connect to any node, except the node attached to;
- labels (:obj:`Optional[Set[str]]`): Labels.
- node_ids (:obj:`Optional[List[int]]`): Candidate node ids.
- auto_recover (:obj:`bool`): Auto recover from uncaught exceptions from main.
- max_retries (:obj:`int`): Max retries for auto recover.
- redis_host (:obj:`str`): Redis server host.
- redis_port (:obj:`int`): Redis server port.
Returns:
- _runner (:obj:`Callable`): The wrapper function for main.
"""
all_args = locals()
del all_args["cls"]
args_parsers = {"nng": cls._nng_args_parser, "redis": cls._redis_args_parser}
assert n_parallel_workers > 0, "Parallel worker number should bigger than 0"
def _runner(main_process: Callable, *args, **kwargs) -> None:
"""
Overview:
Prepare to run in subprocess.
Arguments:
- main_process (:obj:`Callable`): The main function, your program start from here.
"""
runner_params = args_parsers[mq_type](**all_args)
params_group = [[runner_kwargs, (main_process, args, kwargs)] for runner_kwargs in runner_params]
if n_parallel_workers == 1:
cls._subprocess_runner(*params_group[0])
else:
with WorkerPool(n_jobs=n_parallel_workers, start_method="spawn", daemon=False) as pool:
# Cleanup the pool just in case the program crashes.
atexit.register(pool.__exit__)
pool.map(cls._subprocess_runner, params_group)
return _runner
@classmethod
def _nng_args_parser(
cls,
n_parallel_workers: int,
attach_to: Optional[List[str]] = None,
protocol: str = "ipc",
address: Optional[str] = None,
ports: Optional[Union[List[int], int]] = None,
topology: str = "mesh",
node_ids: Optional[Union[List[int], int]] = None,
**kwargs
) -> Dict[str, dict]:
attach_to = attach_to or []
nodes = cls.get_node_addrs(n_parallel_workers, protocol=protocol, address=address, ports=ports)
logging.info("Bind subprocesses on these addresses: {}".format(nodes))
def cleanup_nodes():
for node in nodes:
protocol, file_path = node.split("://")
if protocol == "ipc" and path.exists(file_path):
os.remove(file_path)
atexit.register(cleanup_nodes)
def topology_network(i: int) -> List[str]:
if topology == "mesh":
return nodes[:i] + attach_to
elif topology == "star":
return nodes[:min(1, i)] + attach_to
elif topology == "alone":
return attach_to
else:
raise ValueError("Unknown topology: {}".format(topology))
runner_params = []
candidate_node_ids = cls.padding_param(node_ids, n_parallel_workers, 0)
for i in range(n_parallel_workers):
runner_kwargs = {
**kwargs,
"node_id": candidate_node_ids[i],
"listen_to": nodes[i],
"attach_to": topology_network(i),
}
runner_params.append(runner_kwargs)
return runner_params
@classmethod
def _redis_args_parser(cls, n_parallel_workers: int, node_ids: Optional[Union[List[int], int]] = None, **kwargs):
runner_params = []
candidate_node_ids = cls.padding_param(node_ids, n_parallel_workers, 0)
for i in range(n_parallel_workers):
runner_kwargs = {**kwargs, "node_id": candidate_node_ids[i]}
runner_params.append(runner_kwargs)
return runner_params
@classmethod
def _subprocess_runner(cls, runner_kwargs: dict, main_params: Tuple[Union[List, Dict]]) -> None:
"""
Overview:
Really run in subprocess.
Arguments:
- runner_params (:obj:`Tuple[Union[List, Dict]]`): Args and kwargs for runner.
- main_params (:obj:`Tuple[Union[List, Dict]]`): Args and kwargs for main function.
"""
main_process, args, kwargs = main_params
with Parallel() as router:
router.is_active = True
router._run(**runner_kwargs)
time.sleep(0.3) # Waiting for network pairing
router._supervised_runner(main_process, *args, **kwargs)
def _supervised_runner(self, main: Callable, *args, **kwargs) -> None:
"""
Overview:
Run in supervised mode.
Arguments:
- main (:obj:`Callable`): Main function.
"""
if self.auto_recover:
while True:
try:
main(*args, **kwargs)
break
except Exception as e:
if self._retries < self.max_retries:
logging.warning(
"Auto recover from exception: {}, node: {}, retries: {}".format(
e, self.node_id, self._retries
)
)
logging.warning(traceback.format_exc())
self._retries += 1
else:
logging.warning(
"Exceed the max retries, node: {}, retries: {}, max_retries: {}".format(
self.node_id, self._retries, self.max_retries
)
)
raise e
else:
main(*args, **kwargs)
@classmethod
def get_node_addrs(
cls,
n_workers: int,
protocol: str = "ipc",
address: Optional[str] = None,
ports: Optional[Union[List[int], int]] = None
) -> None:
if protocol == "ipc":
node_name = "".join(random.choices("abcdefghijklmnopqrstuvwxyz0123456789", k=4))
tmp_dir = tempfile.gettempdir()
nodes = ["ipc://{}/ditask_{}_{}.ipc".format(tmp_dir, node_name, i) for i in range(n_workers)]
elif protocol == "tcp":
address = address or cls.get_ip()
ports = cls.padding_param(ports, n_workers, 50515)
assert len(ports) == n_workers, "The number of ports must be the same as the number of workers, \
now there are {} ports and {} workers".format(len(ports), n_workers)
nodes = ["tcp://{}:{}".format(address, port) for port in ports]
else:
raise Exception("Unknown protocol {}".format(protocol))
return nodes
@classmethod
def padding_param(cls, int_or_list: Optional[Union[List[int], int]], n_max: int, start_value: int) -> List[int]:
"""
Overview:
Padding int or list param to the length of n_max.
Arguments:
- int_or_list (:obj:`Optional[Union[List[int], int]]`): Int or list typed value.
- n_max (:obj:`int`): Max length.
- start_value (:obj:`int`): Start from value.
"""
param = int_or_list
if isinstance(param, List) and len(param) == 1:
param = param[0] # List with only 1 element is equal to int
if isinstance(param, int):
param = range(param, param + n_max)
else:
param = param or range(start_value, start_value + n_max)
return param
def listen(self):
self._mq.listen()
while True:
msg = self._mq.recv()
# msg is none means that the message queue is no longer being listened to,
# especially if the message queue is already closed
if not msg:
break
topic, msg = msg
self._handle_message(topic, msg)
def on(self, event: str, fn: Callable) -> None:
"""
Overview:
Register an remote event on parallel instance, this function will be executed \
when a remote process emit this event via network.
Arguments:
- event (:obj:`str`): Event name.
- fn (:obj:`Callable`): Function body.
"""
if self.is_active:
self._mq.subscribe(event)
self._event_loop.on(event, fn)
def once(self, event: str, fn: Callable) -> None:
"""
Overview:
Register an remote event which will only call once on parallel instance,
this function will be executed when a remote process emit this event via network.
Arguments:
- event (:obj:`str`): Event name.
- fn (:obj:`Callable`): Function body.
"""
if self.is_active:
self._mq.subscribe(event)
self._event_loop.once(event, fn)
def off(self, event: str) -> None:
"""
Overview:
Unregister an event.
Arguments:
- event (:obj:`str`): Event name.
"""
if self.is_active:
self._mq.unsubscribe(event)
self._event_loop.off(event)
def emit(self, event: str, *args, **kwargs) -> None:
"""
Overview:
Send an remote event via network to subscribed processes.
Arguments:
- event (:obj:`str`): Event name.
"""
if self.is_active:
payload = {"a": args, "k": kwargs}
try:
data = pickle.dumps(payload, protocol=-1)
except AttributeError as e:
logging.error("Arguments are not pickable! Event: {}, Args: {}".format(event, args))
raise e
self._mq.publish(event, data)
def _handle_message(self, topic: str, msg: bytes) -> None:
"""
Overview:
Recv and parse payload from other processes, and call local functions.
Arguments:
- topic (:obj:`str`): Recevied topic.
- msg (:obj:`bytes`): Recevied message.
"""
event = topic
if not self._event_loop.listened(event):
logging.debug("Event {} was not listened in parallel {}".format(event, self.node_id))
return
try:
payload = pickle.loads(msg)
except Exception as e:
logging.error("Error when unpacking message on node {}, msg: {}".format(self.node_id, e))
return
self._event_loop.emit(event, *payload["a"], **payload["k"])
@classmethod
def get_ip(cls):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
# doesn't even have to be reachable
s.connect(('10.255.255.255', 1))
IP = s.getsockname()[0]
except Exception:
IP = '127.0.0.1'
finally:
s.close()
return IP
def __enter__(self) -> "Parallel":
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.stop()
def stop(self):
logging.info("Stopping parallel worker on node: {}".format(self.node_id))
self.is_active = False
time.sleep(0.03)
if self._mq:
self._mq.stop()
self._mq = None
if self._listener:
self._listener.join(timeout=1)
self._listener = None
self._event_loop.stop()
| 0.722527 | 0.207014 |
import sys
import typing
from collections import OrderedDict
def raises(err, lamda):
try:
lamda()
return False
except err:
return True
def expand_tuples(L):
"""
>>> expand_tuples([1, (2, 3)])
[(1, 2), (1, 3)]
>>> expand_tuples([1, 2])
[(1, 2)]
"""
if not L:
return [()]
elif not isinstance(L[0], tuple):
rest = expand_tuples(L[1:])
return [(L[0],) + t for t in rest]
else:
rest = expand_tuples(L[1:])
return [(item,) + t for t in rest for item in L[0]]
# Taken from theano/theano/gof/sched.py
# Avoids licensing issues because this was written by <NAME>
def _toposort(edges):
""" Topological sort algorithm by Kahn [1] - O(nodes + vertices)
inputs:
edges - a dict of the form {a: {b, c}} where b and c depend on a
outputs:
L - an ordered list of nodes that satisfy the dependencies of edges
>>> _toposort({1: (2, 3), 2: (3, )})
[1, 2, 3]
Closely follows the wikipedia page [2]
[1] Kahn, <NAME>. (1962), "Topological sorting of large networks",
Communications of the ACM
[2] http://en.wikipedia.org/wiki/Toposort#Algorithms
"""
incoming_edges = reverse_dict(edges)
incoming_edges = OrderedDict((k, set(val)) for k, val in incoming_edges.items())
S = OrderedDict.fromkeys(v for v in edges if v not in incoming_edges)
L = []
while S:
n, _ = S.popitem()
L.append(n)
for m in edges.get(n, ()):
assert n in incoming_edges[m]
incoming_edges[m].remove(n)
if not incoming_edges[m]:
S[m] = None
if any(incoming_edges.get(v, None) for v in edges):
raise ValueError("Input has cycles")
return L
def reverse_dict(d):
"""
Reverses direction of dependence dict
>>> d = {'a': (1, 2), 'b': (2, 3), 'c':()}
>>> reverse_dict(d) # doctest: +SKIP
{1: ('a',), 2: ('a', 'b'), 3: ('b',)}
:note: dict order are not deterministic. As we iterate on the
input dict, it make the output of this function depend on the
dict order. So this function output order should be considered
as undeterministic.
"""
result = OrderedDict()
for key in d:
for val in d[key]:
result[val] = result.get(val, tuple()) + (key,)
return result
# Taken from toolz
# Avoids licensing issues because this version was authored by <NAME>
def groupby(func, seq):
""" Group a collection by a key function
>>> names = ['Alice', 'Bob', 'Charlie', 'Dan', 'Edith', 'Frank']
>>> groupby(len, names) # doctest: +SKIP
{3: ['Bob', 'Dan'], 5: ['Alice', 'Edith', 'Frank'], 7: ['Charlie']}
>>> iseven = lambda x: x % 2 == 0
>>> groupby(iseven, [1, 2, 3, 4, 5, 6, 7, 8]) # doctest: +SKIP
{False: [1, 3, 5, 7], True: [2, 4, 6, 8]}
See Also:
``countby``
"""
d = OrderedDict()
for item in seq:
key = func(item)
if key not in d:
d[key] = list()
d[key].append(item)
return d
def typename(type):
"""
Get the name of `type`.
Parameters
----------
type : Union[Type, Tuple[Type]]
Returns
-------
str
The name of `type` or a tuple of the names of the types in `type`.
Examples
--------
>>> typename(int)
'int'
>>> typename((int, float))
'(int, float)'
"""
try:
return type.__name__
except AttributeError:
if len(type) == 1:
return typename(*type)
return "(%s)" % ", ".join(map(typename, type))
# parse typing.Union
def parse_union(ann):
if hasattr(typing, "UnionMeta"):
if type(ann) is not typing.UnionMeta:
return
return ann.__union_params__
elif hasattr(typing, "_Union"):
if type(ann) is not typing._Union:
return
return ann.__args__
elif hasattr(typing, "_GenericAlias"):
if type(ann) is not typing._GenericAlias:
if type(ann) is not typing.Union:
return
else:
if ann.__origin__ is not typing.Union:
return
return ann.__args__
elif hasattr(typing, "Union"):
if typing.get_origin(ann) is not typing.Union:
return
return typing.get_args(ann)
else:
raise NotImplementedError("unsupported Python version")
|
imperative/python/megengine/core/tensor/multipledispatch/utils.py
|
import sys
import typing
from collections import OrderedDict
def raises(err, lamda):
try:
lamda()
return False
except err:
return True
def expand_tuples(L):
"""
>>> expand_tuples([1, (2, 3)])
[(1, 2), (1, 3)]
>>> expand_tuples([1, 2])
[(1, 2)]
"""
if not L:
return [()]
elif not isinstance(L[0], tuple):
rest = expand_tuples(L[1:])
return [(L[0],) + t for t in rest]
else:
rest = expand_tuples(L[1:])
return [(item,) + t for t in rest for item in L[0]]
# Taken from theano/theano/gof/sched.py
# Avoids licensing issues because this was written by <NAME>
def _toposort(edges):
""" Topological sort algorithm by Kahn [1] - O(nodes + vertices)
inputs:
edges - a dict of the form {a: {b, c}} where b and c depend on a
outputs:
L - an ordered list of nodes that satisfy the dependencies of edges
>>> _toposort({1: (2, 3), 2: (3, )})
[1, 2, 3]
Closely follows the wikipedia page [2]
[1] Kahn, <NAME>. (1962), "Topological sorting of large networks",
Communications of the ACM
[2] http://en.wikipedia.org/wiki/Toposort#Algorithms
"""
incoming_edges = reverse_dict(edges)
incoming_edges = OrderedDict((k, set(val)) for k, val in incoming_edges.items())
S = OrderedDict.fromkeys(v for v in edges if v not in incoming_edges)
L = []
while S:
n, _ = S.popitem()
L.append(n)
for m in edges.get(n, ()):
assert n in incoming_edges[m]
incoming_edges[m].remove(n)
if not incoming_edges[m]:
S[m] = None
if any(incoming_edges.get(v, None) for v in edges):
raise ValueError("Input has cycles")
return L
def reverse_dict(d):
"""
Reverses direction of dependence dict
>>> d = {'a': (1, 2), 'b': (2, 3), 'c':()}
>>> reverse_dict(d) # doctest: +SKIP
{1: ('a',), 2: ('a', 'b'), 3: ('b',)}
:note: dict order are not deterministic. As we iterate on the
input dict, it make the output of this function depend on the
dict order. So this function output order should be considered
as undeterministic.
"""
result = OrderedDict()
for key in d:
for val in d[key]:
result[val] = result.get(val, tuple()) + (key,)
return result
# Taken from toolz
# Avoids licensing issues because this version was authored by <NAME>
def groupby(func, seq):
""" Group a collection by a key function
>>> names = ['Alice', 'Bob', 'Charlie', 'Dan', 'Edith', 'Frank']
>>> groupby(len, names) # doctest: +SKIP
{3: ['Bob', 'Dan'], 5: ['Alice', 'Edith', 'Frank'], 7: ['Charlie']}
>>> iseven = lambda x: x % 2 == 0
>>> groupby(iseven, [1, 2, 3, 4, 5, 6, 7, 8]) # doctest: +SKIP
{False: [1, 3, 5, 7], True: [2, 4, 6, 8]}
See Also:
``countby``
"""
d = OrderedDict()
for item in seq:
key = func(item)
if key not in d:
d[key] = list()
d[key].append(item)
return d
def typename(type):
"""
Get the name of `type`.
Parameters
----------
type : Union[Type, Tuple[Type]]
Returns
-------
str
The name of `type` or a tuple of the names of the types in `type`.
Examples
--------
>>> typename(int)
'int'
>>> typename((int, float))
'(int, float)'
"""
try:
return type.__name__
except AttributeError:
if len(type) == 1:
return typename(*type)
return "(%s)" % ", ".join(map(typename, type))
# parse typing.Union
def parse_union(ann):
if hasattr(typing, "UnionMeta"):
if type(ann) is not typing.UnionMeta:
return
return ann.__union_params__
elif hasattr(typing, "_Union"):
if type(ann) is not typing._Union:
return
return ann.__args__
elif hasattr(typing, "_GenericAlias"):
if type(ann) is not typing._GenericAlias:
if type(ann) is not typing.Union:
return
else:
if ann.__origin__ is not typing.Union:
return
return ann.__args__
elif hasattr(typing, "Union"):
if typing.get_origin(ann) is not typing.Union:
return
return typing.get_args(ann)
else:
raise NotImplementedError("unsupported Python version")
| 0.596081 | 0.486697 |
"""Test materialized views"""
import pytest
from pyrseas.testutils import DatabaseToMapTestCase
from pyrseas.testutils import InputMapToSqlTestCase, fix_indent
CREATE_TABLE = "CREATE TABLE t1 (c1 INTEGER, c2 TEXT, c3 INTEGER)"
VIEW_STMT = "SELECT c1, c3 * 2 AS mc3 FROM t1"
CREATE_STMT = "CREATE MATERIALIZED VIEW sd.mv1 AS " + VIEW_STMT
COMMENT_STMT = "COMMENT ON MATERIALIZED VIEW sd.mv1 IS 'Test matview mv1'"
VIEW_DEFN = " SELECT t1.c1,\n t1.c3 * 2 AS mc3\n FROM sd.t1;"
class MatViewToMapTestCase(DatabaseToMapTestCase):
"""Test mapping of created materialized views"""
def test_map_view_simple(self):
"Map a created materialized view"
stmts = [CREATE_TABLE, CREATE_STMT]
dbmap = self.to_map(stmts)
expmap = {'columns': [{'c1': {'type': 'integer'}},
{'mc3': {'type': 'integer'}}],
'definition': VIEW_DEFN, 'with_data': True,
'depends_on': ['table t1']}
assert dbmap['schema sd']['materialized view mv1'] == expmap
def test_map_view_comment(self):
"Map a materialized view with a comment"
dbmap = self.to_map([CREATE_TABLE, CREATE_STMT, COMMENT_STMT])
assert dbmap['schema sd']['materialized view mv1'][
'description'] == 'Test matview mv1'
def test_map_view_index(self):
"Map a materialized view with an index"
stmts = [CREATE_TABLE, CREATE_STMT,
"CREATE INDEX idx1 ON mv1 (mc3)"]
dbmap = self.to_map(stmts)
expmap = {'columns': [{'c1': {'type': 'integer'}},
{'mc3': {'type': 'integer'}}],
'definition': VIEW_DEFN, 'with_data': True,
'indexes': {'idx1': {'keys': ['mc3']}},
'depends_on': ['table t1']}
assert dbmap['schema sd']['materialized view mv1'] == expmap
class MatViewToSqlTestCase(InputMapToSqlTestCase):
"""Test SQL generation from input materialized views"""
def test_create_view(self):
"Create a materialized view"
inmap = self.std_map()
inmap['schema sd'].update({'table t1': {
'columns': [{'c1': {'type': 'integer'}}, {'c2': {'type': 'text'}},
{'c3': {'type': 'integer'}}]}})
inmap['schema sd'].update({'materialized view mv1': {
'columns': [{'c1': {'type': 'integer'}},
{'mc3': {'type': 'integer'}}],
'definition': "SELECT c1, c3 * 2 AS mc3 FROM sd.t1",
'depends_on': ['table t1']}})
sql = self.to_sql(inmap)
assert fix_indent(sql[0]) == "CREATE TABLE sd.t1 (c1 integer, " \
"c2 text, c3 integer)"
assert fix_indent(sql[1]) == "CREATE MATERIALIZED VIEW sd.mv1 AS " \
"SELECT c1, c3 * 2 AS mc3 FROM sd.t1"
def test_bad_view_map(self):
"Error creating a materialized view with a bad map"
inmap = self.std_map()
inmap['schema sd'].update({'mv1': {'definition': VIEW_DEFN}})
with pytest.raises(KeyError):
self.to_sql(inmap)
def test_drop_view(self):
"Drop an existing materialized view with table dependencies"
stmts = ["CREATE TABLE t1 (c1 INTEGER, c2 TEXT)",
"CREATE TABLE t2 (c1 INTEGER, c3 TEXT)",
"CREATE MATERIALIZED VIEW mv1 AS SELECT t1.c1, c2, c3 "
"FROM t1 JOIN t2 ON (t1.c1 = t2.c1)"]
sql = self.to_sql(self.std_map(), stmts)
assert sql[0] == "DROP MATERIALIZED VIEW sd.mv1"
# can't control which table will be dropped first
drt1 = 1
drt2 = 2
if 't1' in sql[2]:
drt1 = 2
drt2 = 1
assert sql[drt1] == "DROP TABLE sd.t1"
assert sql[drt2] == "DROP TABLE sd.t2"
def test_view_with_comment(self):
"Create a materialized view with a comment"
inmap = self.std_map()
inmap['schema sd'].update({'materialized view mv1': {
'columns': [{'c1': {'type': 'integer'}},
{'mc3': {'type': 'integer'}}],
'definition': VIEW_STMT, 'description': "Test matview mv1"}})
sql = self.to_sql(inmap)
assert fix_indent(sql[0]) == CREATE_STMT
assert sql[1] == COMMENT_STMT
def test_view_index(self):
"Create an index on a materialized view"
stmts = [CREATE_TABLE, CREATE_STMT]
inmap = self.std_map()
inmap['schema sd'].update({'table t1': {
'columns': [{'c1': {'type': 'integer'}}, {'c2': {'type': 'text'}},
{'c3': {'type': 'integer'}}]}})
inmap['schema sd'].update({'materialized view mv1': {
'columns': [{'c1': {'type': 'integer'}},
{'mc3': {'type': 'integer'}}],
'definition': VIEW_DEFN, 'indexes': {'idx1': {'keys': ['mc3']}}}})
sql = self.to_sql(inmap, stmts)
assert sql == ["CREATE INDEX idx1 ON sd.mv1 (mc3)"]
|
tests/dbobject/test_matview.py
|
"""Test materialized views"""
import pytest
from pyrseas.testutils import DatabaseToMapTestCase
from pyrseas.testutils import InputMapToSqlTestCase, fix_indent
CREATE_TABLE = "CREATE TABLE t1 (c1 INTEGER, c2 TEXT, c3 INTEGER)"
VIEW_STMT = "SELECT c1, c3 * 2 AS mc3 FROM t1"
CREATE_STMT = "CREATE MATERIALIZED VIEW sd.mv1 AS " + VIEW_STMT
COMMENT_STMT = "COMMENT ON MATERIALIZED VIEW sd.mv1 IS 'Test matview mv1'"
VIEW_DEFN = " SELECT t1.c1,\n t1.c3 * 2 AS mc3\n FROM sd.t1;"
class MatViewToMapTestCase(DatabaseToMapTestCase):
"""Test mapping of created materialized views"""
def test_map_view_simple(self):
"Map a created materialized view"
stmts = [CREATE_TABLE, CREATE_STMT]
dbmap = self.to_map(stmts)
expmap = {'columns': [{'c1': {'type': 'integer'}},
{'mc3': {'type': 'integer'}}],
'definition': VIEW_DEFN, 'with_data': True,
'depends_on': ['table t1']}
assert dbmap['schema sd']['materialized view mv1'] == expmap
def test_map_view_comment(self):
"Map a materialized view with a comment"
dbmap = self.to_map([CREATE_TABLE, CREATE_STMT, COMMENT_STMT])
assert dbmap['schema sd']['materialized view mv1'][
'description'] == 'Test matview mv1'
def test_map_view_index(self):
"Map a materialized view with an index"
stmts = [CREATE_TABLE, CREATE_STMT,
"CREATE INDEX idx1 ON mv1 (mc3)"]
dbmap = self.to_map(stmts)
expmap = {'columns': [{'c1': {'type': 'integer'}},
{'mc3': {'type': 'integer'}}],
'definition': VIEW_DEFN, 'with_data': True,
'indexes': {'idx1': {'keys': ['mc3']}},
'depends_on': ['table t1']}
assert dbmap['schema sd']['materialized view mv1'] == expmap
class MatViewToSqlTestCase(InputMapToSqlTestCase):
"""Test SQL generation from input materialized views"""
def test_create_view(self):
"Create a materialized view"
inmap = self.std_map()
inmap['schema sd'].update({'table t1': {
'columns': [{'c1': {'type': 'integer'}}, {'c2': {'type': 'text'}},
{'c3': {'type': 'integer'}}]}})
inmap['schema sd'].update({'materialized view mv1': {
'columns': [{'c1': {'type': 'integer'}},
{'mc3': {'type': 'integer'}}],
'definition': "SELECT c1, c3 * 2 AS mc3 FROM sd.t1",
'depends_on': ['table t1']}})
sql = self.to_sql(inmap)
assert fix_indent(sql[0]) == "CREATE TABLE sd.t1 (c1 integer, " \
"c2 text, c3 integer)"
assert fix_indent(sql[1]) == "CREATE MATERIALIZED VIEW sd.mv1 AS " \
"SELECT c1, c3 * 2 AS mc3 FROM sd.t1"
def test_bad_view_map(self):
"Error creating a materialized view with a bad map"
inmap = self.std_map()
inmap['schema sd'].update({'mv1': {'definition': VIEW_DEFN}})
with pytest.raises(KeyError):
self.to_sql(inmap)
def test_drop_view(self):
"Drop an existing materialized view with table dependencies"
stmts = ["CREATE TABLE t1 (c1 INTEGER, c2 TEXT)",
"CREATE TABLE t2 (c1 INTEGER, c3 TEXT)",
"CREATE MATERIALIZED VIEW mv1 AS SELECT t1.c1, c2, c3 "
"FROM t1 JOIN t2 ON (t1.c1 = t2.c1)"]
sql = self.to_sql(self.std_map(), stmts)
assert sql[0] == "DROP MATERIALIZED VIEW sd.mv1"
# can't control which table will be dropped first
drt1 = 1
drt2 = 2
if 't1' in sql[2]:
drt1 = 2
drt2 = 1
assert sql[drt1] == "DROP TABLE sd.t1"
assert sql[drt2] == "DROP TABLE sd.t2"
def test_view_with_comment(self):
"Create a materialized view with a comment"
inmap = self.std_map()
inmap['schema sd'].update({'materialized view mv1': {
'columns': [{'c1': {'type': 'integer'}},
{'mc3': {'type': 'integer'}}],
'definition': VIEW_STMT, 'description': "Test matview mv1"}})
sql = self.to_sql(inmap)
assert fix_indent(sql[0]) == CREATE_STMT
assert sql[1] == COMMENT_STMT
def test_view_index(self):
"Create an index on a materialized view"
stmts = [CREATE_TABLE, CREATE_STMT]
inmap = self.std_map()
inmap['schema sd'].update({'table t1': {
'columns': [{'c1': {'type': 'integer'}}, {'c2': {'type': 'text'}},
{'c3': {'type': 'integer'}}]}})
inmap['schema sd'].update({'materialized view mv1': {
'columns': [{'c1': {'type': 'integer'}},
{'mc3': {'type': 'integer'}}],
'definition': VIEW_DEFN, 'indexes': {'idx1': {'keys': ['mc3']}}}})
sql = self.to_sql(inmap, stmts)
assert sql == ["CREATE INDEX idx1 ON sd.mv1 (mc3)"]
| 0.587943 | 0.428622 |
from django import forms
from django.contrib.localflavor.ar.forms import ARPostalCodeField
from django.contrib.localflavor.ar.forms import ARProvinceSelect
from django.forms.util import ErrorList
from ventas.models import DatosDeEnvio, GastosDeEnvio, ARP
class DumbSelect(forms.Select):
EMPTY_CHOICES = [('Otra', 'Other'),]
def __init__(self, attrs=None, choices=None):
if choices:
choices += DumbSelect.EMPTY_CHOICES
else:
choices = DumbSelect.EMPTY_CHOICES
super(DumbSelect, self).__init__(attrs=attrs, choices=choices)
class GastosDeEnvioSelect(forms.Select):
def __init__(self, gastos_de_envio, attrs=None, choices=None):
"""
Shipping costs is a queryset from models.GastosDeEnvio.
Assuming that provinces are being saved with province select
"""
choices_of_prov = [(p.provincia, ARP.get(p.provincia))
for p in gastos_de_envio]
if choices:
choices += list(choices)
else:
choices = choices_of_prov
super(GastosDeEnvioSelect, self).__init__(attrs=attrs, choices=choices)
def add_css_classes(f, **kwargs):
"""
From: http://djangosnippets.org/snippets/2097/
"""
field = f.formfield(**kwargs)
if field and field.required:
attrs = field.widget.attrs
attrs['class'] = attrs.get('class', '') + 'required'
return field
class DatosDeEnvioForm(forms.ModelForm):
formfield_callback = add_css_classes
direccion = forms.CharField(label=u'Dirección', required=True,
widget=forms.TextInput(attrs={'class':
'required'
}))
localidad = forms.CharField(widget=DumbSelect(), required=False)
codigo_de_area = forms.CharField(label=u'Código de Área',
widget=forms.TextInput(attrs={'class':
'required'
' telefono'}
))
telefono = forms.CharField(label=u'Teléfono',
widget=forms.TextInput(attrs={'class':
'required'
' telefono'
}))
codigo_postal = ARPostalCodeField(label=u'Código Postal',
widget=forms.TextInput(attrs={'class':
'required'
}))
def _add_msg_to_error_fields(self, fieldlist, msg):
for fieldname in fieldlist:
errorlist = self._errors.get(fieldname)
if errorlist:
errorlist.append(msg)
else:
self._errors[fieldname] = ErrorList([msg])
def clean(self, *args, **kwargs):
super(DatosDeEnvioForm, self).clean()
cleaned_data = self.cleaned_data
codigo_de_area = cleaned_data.get('codigo_de_area')
telefono = cleaned_data.get('telefono')
if not (codigo_de_area and telefono):
msg = u"Este campo sólo acepta números"
self._add_msg_to_error_fields(('telefono',), msg)
raise forms.ValidationError(msg)
if not (codigo_de_area.isdigit() and telefono.isdigit()):
msg = u"Este campo sólo acepta números"
self._add_msg_to_error_fields(('telefono',), msg)
raise forms.ValidationError(msg)
return cleaned_data
class Meta:
model = DatosDeEnvio
widgets = {
'provincia': GastosDeEnvioSelect(
GastosDeEnvio.objects.filter(localidad="")
),
}
class GastosDeEnvioForm(forms.ModelForm):
class Meta:
model = GastosDeEnvio
widgets = {
'provincia': ARProvinceSelect(),
}
|
ventas/forms.py
|
from django import forms
from django.contrib.localflavor.ar.forms import ARPostalCodeField
from django.contrib.localflavor.ar.forms import ARProvinceSelect
from django.forms.util import ErrorList
from ventas.models import DatosDeEnvio, GastosDeEnvio, ARP
class DumbSelect(forms.Select):
EMPTY_CHOICES = [('Otra', 'Other'),]
def __init__(self, attrs=None, choices=None):
if choices:
choices += DumbSelect.EMPTY_CHOICES
else:
choices = DumbSelect.EMPTY_CHOICES
super(DumbSelect, self).__init__(attrs=attrs, choices=choices)
class GastosDeEnvioSelect(forms.Select):
def __init__(self, gastos_de_envio, attrs=None, choices=None):
"""
Shipping costs is a queryset from models.GastosDeEnvio.
Assuming that provinces are being saved with province select
"""
choices_of_prov = [(p.provincia, ARP.get(p.provincia))
for p in gastos_de_envio]
if choices:
choices += list(choices)
else:
choices = choices_of_prov
super(GastosDeEnvioSelect, self).__init__(attrs=attrs, choices=choices)
def add_css_classes(f, **kwargs):
"""
From: http://djangosnippets.org/snippets/2097/
"""
field = f.formfield(**kwargs)
if field and field.required:
attrs = field.widget.attrs
attrs['class'] = attrs.get('class', '') + 'required'
return field
class DatosDeEnvioForm(forms.ModelForm):
formfield_callback = add_css_classes
direccion = forms.CharField(label=u'Dirección', required=True,
widget=forms.TextInput(attrs={'class':
'required'
}))
localidad = forms.CharField(widget=DumbSelect(), required=False)
codigo_de_area = forms.CharField(label=u'Código de Área',
widget=forms.TextInput(attrs={'class':
'required'
' telefono'}
))
telefono = forms.CharField(label=u'Teléfono',
widget=forms.TextInput(attrs={'class':
'required'
' telefono'
}))
codigo_postal = ARPostalCodeField(label=u'Código Postal',
widget=forms.TextInput(attrs={'class':
'required'
}))
def _add_msg_to_error_fields(self, fieldlist, msg):
for fieldname in fieldlist:
errorlist = self._errors.get(fieldname)
if errorlist:
errorlist.append(msg)
else:
self._errors[fieldname] = ErrorList([msg])
def clean(self, *args, **kwargs):
super(DatosDeEnvioForm, self).clean()
cleaned_data = self.cleaned_data
codigo_de_area = cleaned_data.get('codigo_de_area')
telefono = cleaned_data.get('telefono')
if not (codigo_de_area and telefono):
msg = u"Este campo sólo acepta números"
self._add_msg_to_error_fields(('telefono',), msg)
raise forms.ValidationError(msg)
if not (codigo_de_area.isdigit() and telefono.isdigit()):
msg = u"Este campo sólo acepta números"
self._add_msg_to_error_fields(('telefono',), msg)
raise forms.ValidationError(msg)
return cleaned_data
class Meta:
model = DatosDeEnvio
widgets = {
'provincia': GastosDeEnvioSelect(
GastosDeEnvio.objects.filter(localidad="")
),
}
class GastosDeEnvioForm(forms.ModelForm):
class Meta:
model = GastosDeEnvio
widgets = {
'provincia': ARProvinceSelect(),
}
| 0.561936 | 0.128279 |
from hops import constants
from hops.featurestore_impl.dao.datasets.training_dataset import TrainingDataset
from hops.featurestore_impl.dao.featuregroups.featuregroup import Featuregroup
from hops.featurestore_impl.dao.featurestore.featurestore import Featurestore
from hops.featurestore_impl.dao.settings.featurestore_settings import FeaturestoreSettings
from hops.featurestore_impl.dao.storageconnectors.hopsfs_connector import HopsfsStorageConnector
from hops.featurestore_impl.dao.storageconnectors.jdbc_connector import JDBCStorageConnector
from hops.featurestore_impl.dao.storageconnectors.s3_connector import S3StorageConnector
from hops.featurestore_impl.util import fs_utils
class FeaturestoreMetadata(object):
"""
Represents feature store metadata. This metadata is used by the feature store client to determine how to
fetch and push features from/to the feature store
"""
def __init__(self, metadata_json):
"""
Initialize the featurestore metadata from JSON payload
Args:
:metadata_json: JSON metadata about the featurestore returned from Hopsworks REST API
"""
featuregroups, training_datasets, features_to_featuregroups, featurestore, settings, storage_connectors, \
online_featurestore_connector = self._parse_featurestore_metadata(metadata_json)
self.featuregroups = featuregroups
self.training_datasets = training_datasets
self.features_to_featuregroups = features_to_featuregroups
self.featurestore = featurestore
self.settings = settings
self.storage_connectors = storage_connectors
constants.FEATURE_STORE.TRAINING_DATASET_SUPPORTED_FORMATS = settings.training_dataset_formats
self.online_featurestore_connector = online_featurestore_connector
def _parse_featurestore_metadata(self, metadata_json):
"""
Parses the featurestore metadata from the REST API and puts it into an optimized data structure
with O(1) lookup time for features, featuregroups, and training datasets
Args:
:featurestore_metadata: the JSON metadata of the featurestore returned by hopsworks
Returns:
the parsed metadata
"""
featuregroups = {}
training_datasets = {}
features_to_featuregroups = {}
storage_connectors = {}
for fg in metadata_json[constants.REST_CONFIG.JSON_FEATUREGROUPS]:
fg_obj = Featuregroup(fg)
featuregroups[fs_utils._get_table_name(fg[constants.REST_CONFIG.JSON_FEATUREGROUP_NAME],
fg[constants.REST_CONFIG.JSON_FEATUREGROUP_VERSION])] = fg_obj
for f in fg[constants.REST_CONFIG.JSON_FEATUREGROUP_FEATURES]:
if f[constants.REST_CONFIG.JSON_FEATURE_NAME] in features_to_featuregroups:
features_to_featuregroups[f[constants.REST_CONFIG.JSON_FEATURE_NAME]].append(fg_obj)
else:
features_to_featuregroups[f[constants.REST_CONFIG.JSON_FEATURE_NAME]] = [fg_obj]
for td in metadata_json[constants.REST_CONFIG.JSON_TRAINING_DATASETS]:
training_datasets[fs_utils._get_table_name(td[constants.REST_CONFIG.JSON_TRAINING_DATASET_NAME],
td[constants.REST_CONFIG.JSON_TRAINING_DATASET_VERSION])] = \
TrainingDataset(td)
settings = FeaturestoreSettings(metadata_json[constants.REST_CONFIG.JSON_FEATURESTORE_SETTINGS])
for sc in metadata_json[constants.REST_CONFIG.JSON_FEATURESTORE_STORAGE_CONNECTORS]:
if sc[constants.REST_CONFIG.JSON_FEATURESTORE_CONNECTOR_TYPE] == \
settings.jdbc_connector_type:
storage_connectors[sc[constants.REST_CONFIG.JSON_FEATURESTORE_CONNECTOR_NAME]] = \
JDBCStorageConnector(sc)
if sc[constants.REST_CONFIG.JSON_FEATURESTORE_CONNECTOR_TYPE] == \
settings.s3_connector_type:
storage_connectors[sc[constants.REST_CONFIG.JSON_FEATURESTORE_CONNECTOR_NAME]] = S3StorageConnector(sc)
if sc[constants.REST_CONFIG.JSON_FEATURESTORE_CONNECTOR_TYPE] == \
settings.hopsfs_connector_type:
storage_connectors[sc[constants.REST_CONFIG.JSON_FEATURESTORE_CONNECTOR_NAME]] = \
HopsfsStorageConnector(sc)
featurestore = Featurestore(metadata_json[constants.REST_CONFIG.JSON_FEATURESTORE])
if constants.REST_CONFIG.JSON_FEATURESTORE_ONLINE_CONNECTOR in metadata_json:
online_featurestore_connector = JDBCStorageConnector(
metadata_json[constants.REST_CONFIG.JSON_FEATURESTORE_ONLINE_CONNECTOR])
else:
online_featurestore_connector = None
return featuregroups, training_datasets, features_to_featuregroups, \
featurestore, settings, storage_connectors, online_featurestore_connector
|
hops/featurestore_impl/dao/common/featurestore_metadata.py
|
from hops import constants
from hops.featurestore_impl.dao.datasets.training_dataset import TrainingDataset
from hops.featurestore_impl.dao.featuregroups.featuregroup import Featuregroup
from hops.featurestore_impl.dao.featurestore.featurestore import Featurestore
from hops.featurestore_impl.dao.settings.featurestore_settings import FeaturestoreSettings
from hops.featurestore_impl.dao.storageconnectors.hopsfs_connector import HopsfsStorageConnector
from hops.featurestore_impl.dao.storageconnectors.jdbc_connector import JDBCStorageConnector
from hops.featurestore_impl.dao.storageconnectors.s3_connector import S3StorageConnector
from hops.featurestore_impl.util import fs_utils
class FeaturestoreMetadata(object):
"""
Represents feature store metadata. This metadata is used by the feature store client to determine how to
fetch and push features from/to the feature store
"""
def __init__(self, metadata_json):
"""
Initialize the featurestore metadata from JSON payload
Args:
:metadata_json: JSON metadata about the featurestore returned from Hopsworks REST API
"""
featuregroups, training_datasets, features_to_featuregroups, featurestore, settings, storage_connectors, \
online_featurestore_connector = self._parse_featurestore_metadata(metadata_json)
self.featuregroups = featuregroups
self.training_datasets = training_datasets
self.features_to_featuregroups = features_to_featuregroups
self.featurestore = featurestore
self.settings = settings
self.storage_connectors = storage_connectors
constants.FEATURE_STORE.TRAINING_DATASET_SUPPORTED_FORMATS = settings.training_dataset_formats
self.online_featurestore_connector = online_featurestore_connector
def _parse_featurestore_metadata(self, metadata_json):
"""
Parses the featurestore metadata from the REST API and puts it into an optimized data structure
with O(1) lookup time for features, featuregroups, and training datasets
Args:
:featurestore_metadata: the JSON metadata of the featurestore returned by hopsworks
Returns:
the parsed metadata
"""
featuregroups = {}
training_datasets = {}
features_to_featuregroups = {}
storage_connectors = {}
for fg in metadata_json[constants.REST_CONFIG.JSON_FEATUREGROUPS]:
fg_obj = Featuregroup(fg)
featuregroups[fs_utils._get_table_name(fg[constants.REST_CONFIG.JSON_FEATUREGROUP_NAME],
fg[constants.REST_CONFIG.JSON_FEATUREGROUP_VERSION])] = fg_obj
for f in fg[constants.REST_CONFIG.JSON_FEATUREGROUP_FEATURES]:
if f[constants.REST_CONFIG.JSON_FEATURE_NAME] in features_to_featuregroups:
features_to_featuregroups[f[constants.REST_CONFIG.JSON_FEATURE_NAME]].append(fg_obj)
else:
features_to_featuregroups[f[constants.REST_CONFIG.JSON_FEATURE_NAME]] = [fg_obj]
for td in metadata_json[constants.REST_CONFIG.JSON_TRAINING_DATASETS]:
training_datasets[fs_utils._get_table_name(td[constants.REST_CONFIG.JSON_TRAINING_DATASET_NAME],
td[constants.REST_CONFIG.JSON_TRAINING_DATASET_VERSION])] = \
TrainingDataset(td)
settings = FeaturestoreSettings(metadata_json[constants.REST_CONFIG.JSON_FEATURESTORE_SETTINGS])
for sc in metadata_json[constants.REST_CONFIG.JSON_FEATURESTORE_STORAGE_CONNECTORS]:
if sc[constants.REST_CONFIG.JSON_FEATURESTORE_CONNECTOR_TYPE] == \
settings.jdbc_connector_type:
storage_connectors[sc[constants.REST_CONFIG.JSON_FEATURESTORE_CONNECTOR_NAME]] = \
JDBCStorageConnector(sc)
if sc[constants.REST_CONFIG.JSON_FEATURESTORE_CONNECTOR_TYPE] == \
settings.s3_connector_type:
storage_connectors[sc[constants.REST_CONFIG.JSON_FEATURESTORE_CONNECTOR_NAME]] = S3StorageConnector(sc)
if sc[constants.REST_CONFIG.JSON_FEATURESTORE_CONNECTOR_TYPE] == \
settings.hopsfs_connector_type:
storage_connectors[sc[constants.REST_CONFIG.JSON_FEATURESTORE_CONNECTOR_NAME]] = \
HopsfsStorageConnector(sc)
featurestore = Featurestore(metadata_json[constants.REST_CONFIG.JSON_FEATURESTORE])
if constants.REST_CONFIG.JSON_FEATURESTORE_ONLINE_CONNECTOR in metadata_json:
online_featurestore_connector = JDBCStorageConnector(
metadata_json[constants.REST_CONFIG.JSON_FEATURESTORE_ONLINE_CONNECTOR])
else:
online_featurestore_connector = None
return featuregroups, training_datasets, features_to_featuregroups, \
featurestore, settings, storage_connectors, online_featurestore_connector
| 0.651355 | 0.315604 |
import numpy as np
from scipy.integrate import ode, odeint
import matplotlib.pyplot as plt
parsec = 3.086 * 1e16 #m
year = 3.156 * 1e7 #s
pi = 3.14159265358979323846
G = 6.67430 * 1e-11 #N * m2 / kg2
LO=93.016 * 1e9 * 9.461e15/2 #90 billions ly in m (diameter)
a0=1
omega_R = 4.8e-5
omega_lambda = 0.683-omega_R
omega_M = 0.317
T0 = 2.725
omega_K = 1 - (omega_R + omega_M + omega_lambda)
H0_ = 69.8 #km/s/Mpc
unit_correction = 1/(parsec*1e6) * (year) * (1e3)
H0 = H0_ * unit_correction #converting in H0 in 1/y
params = (H0, omega_R, omega_M, omega_lambda, omega_K)
time_scale = 'linear'
#time_scale = 'log'
def main():
if time_scale == 'log':
t0 = 0
y0 = 1e-18
t1 = 1e12
backend = 'dopri5'
solver = ode(friedmann_ode).set_integrator(backend)
sol = []
def solout(t, y):
sol.append([t, *y])
solver.set_solout(solout)
solver.set_initial_value(y0, t0).set_f_params(H0, omega_R, omega_M, omega_lambda, omega_K)
solver.integrate(t1)
sol = np.array(sol)
time = sol[:,0]
a = sol[:,1]
else:
y0 = [1e-19] #scale factor at t=0, should calculate it from plank density etc
time = np.linspace(0, 40, 10000)*1e9
sol = odeint(friedmann_odeint, y0, time, args=params)
a = sol[:,0]
redshift = a0/a - 1
T = (1+redshift) * T0
adot = np.gradient(a,time)
H = adot/a #expansion factor
rho = np.power(H,2)*3/(8*pi*G)/1e15 #need to double check 1e15, should be 1e9*year
index_now = np.argmin(np.abs(a-a0))
current_day = time[index_now]
rho_R = omega_R*(a0/a)**4
rho_M = omega_M*(a0/a)**3
rho_lamb = omega_lambda
rho_sum = rho_R + rho_M + rho_lamb
rho_R = rho_R/rho_sum
rho_M = rho_M/rho_sum
rho_lamb = rho_lamb/rho_sum
density_color = 'black'
if time_scale == 'log':
fig, ax1 = plt.subplots(1,figsize = (10,6))
ax1.plot(time*(year),a*LO,linewidth = 2, color = 'blue')
ax1.plot(np.logspace(3,17),np.power(np.logspace(3,17),2/3)*1e15*0.65, linestyle = '--', color = 'blue')
ax1.set_xlabel('Time after singularity [s]')
ax1.set_ylabel('Radius of Obs. universe [Gly]', color='b')
ax1.set_yscale('log')
ax1.set_xscale('log')
ax1.tick_params('y', colors='b')
ax1.set_xlim(1000,1e23)
ax1.set_ylim(np.min(a*LO)*1e6,np.max(a*LO)/1e14)
ax2 = ax1.twinx()
ax2.plot(time*(year),rho,linewidth = 2, color = density_color)
ax2.axhline(y=rho[-1],xmin = 0.8, linewidth = 2, color = density_color)
ax2.set_xlabel('Time after singularity [s]')
ax2.set_ylabel('Density [kg/m3]', color=density_color)
ax2.set_yscale('log')
ax2.set_xscale('log')
ax2.tick_params('y', colors=density_color)
ax2.set_xlim(1000,1e23)
ax2.set_ylim(ymin=1e-34,ymax = 1e10)
ax3 = ax1.twinx()
ax3.fill_between(time*(year), np.zeros(len(time)), rho_R, color = 'orange', label = 'Radiation $a \propto t^{1/2}$', alpha = 0.15)
ax3.fill_between(time*(year), rho_R, rho_M + rho_R, color = 'green', label = 'Matter $a \propto t^{2/3}$', alpha = 0.15)
ax3.fill_between(list(time*(year))+[1e25], list(rho_M + rho_R)+[0], list(rho_lamb + rho_M + rho_R)+[1], color = 'black', label = 'Dark Energy $a \propto e^{H t}$', alpha = 0.15)
ax3.axhline(y=1,linewidth = 2, color = 'black', alpha = 0.3)
ax3.axhline(y=0,xmin = 0.8, linewidth = 2, color = 'orange', alpha = 0.3)
ax3.axhline(y=0,xmin = 0.8, linewidth = 2, color = 'green', alpha = 0.3)
ax3.plot(time*(year),rho_R,linewidth = 2, color = 'orange', alpha = 0.3)
ax3.plot(time*(year),rho_M + rho_R,linewidth = 2, color = 'green', alpha = 0.3)
ax3.plot(time*(year),rho_lamb + rho_M + rho_R,linewidth = 2, color = 'black', alpha = 0.3)
ax3.axvline(x=current_day*(year), ymax = 0.03, linewidth = 6, color = 'purple')#, label = 'Current time')
ax3.axvline(x=380000*(year), ymax = 0.03, linewidth = 6, color = 'brown')#, label = 'CMB')
#ax3.axvline(x=current_day*(year), ymin = 0.97, linewidth = 6, color = 'purple')#, label = 'Current time')
#ax3.axvline(x=380000*(year), ymin = 0.97, linewidth = 6, color = 'red')#, label = 'CMB')
#ax3.axvline(x=current_day*(year), linewidth = 2, color = 'purple', alpha = 0.2)#, label = 'Current time')
#ax3.axvline(x=380000*(year), linewidth = 2, color = 'red', alpha = 0.2)#, label = 'CMB')
ax3.set_xlim(1000,1e23)
ax3.set_ylim(ymin=0,ymax=1)
ax3.set_xscale('log')
#ax3.set_ylabel('Depth d', color='r')
#ax3.set_ylim([0, max(y2)])
ax3.set_yticks([],[])
ax3.tick_params('y', colors='r')
#ax3.legend(loc = (0.045,0.65), prop = {'size':12.5})
ax3.legend(loc = (0.3,0.65), prop = {'size':12.5})
fig.tight_layout()
plt.show()
else:
a = a*46
time = time/1e9
fig, ax1 = plt.subplots(1,figsize = (8.5,6))
ax1.plot(time,a,linewidth = 2, color = 'blue')
ax1.set_xlabel('Time after singularity [Gy]')
ax1.set_ylabel('Radius of Obs. universe [Gly]', color='b')
ax1.tick_params('y', colors='b')
ax1.set_ylim(0,350/2)
ax1.set_xlim(0,40)
ax2 = ax1.twinx()
ax2.plot(time,rho,linewidth = 2, color = density_color)
ax2.set_xlabel('Time after singularity [Gy]')
ax2.set_ylabel('Density [kg/m3]', color=density_color)
ax2.set_yscale('log')
ax2.tick_params('y', colors=density_color)
ax2.set_xlim(np.min(time)-0.2,np.max(time))
ax2.set_ylim(1e-27,1e-20)
ax3 = ax1.twinx()
ax3.fill_between(time, rho_R, rho_M + rho_R, color = 'green', label = 'Matter $a \propto t^{2/3}$', alpha = 0.15)
ax3.fill_between(time, rho_M + rho_R, rho_lamb + rho_M + rho_R, color = 'black', label = 'Dark Energy $a \propto e^{H t}$', alpha = 0.15)
ax3.axhline(y=1,linewidth = 2, color = 'black', alpha = 0.3)
ax3.axhline(y=0,xmin = 0.8, linewidth = 2, color = 'orange', alpha = 0.3)
ax3.axhline(y=0,xmin = 0.8, linewidth = 2, color = 'green', alpha = 0.3)
ax3.plot(time,rho_R,linewidth = 2, color = 'orange', alpha = 0.3)
ax3.plot(time,rho_M + rho_R,linewidth = 2, color = 'green', alpha = 0.3)
ax3.plot(time,rho_lamb + rho_M + rho_R,linewidth = 2, color = 'black', alpha = 0.3)
ax3.axvline(x=current_day/1e9, linewidth = 2, color = 'purple', linestyle = '--')#, label = 'Current time')
ax3.set_ylim(ymin=0,ymax=1)
ax3.set_yticks([],[])
ax3.legend(loc = (0.54,0.2), prop = {'size':12.5})
ax3.set_xlim(0,40)
print("Current day is %.2f Gy from this model" % (current_day/1e9)) #Should be 13.813 Gy
def friedmann_ode(t, y, H0, omega_R, omega_M, omega_lambda, omega_K):
#(H0, omega_R, omega_M, omega_lambda, omega_K) = params
a = y[0]
dadt = a * H0 * np.sqrt( omega_R*(a0/a)**4 + omega_M*(a0/a)**3 + omega_K*(a0/a)**2 + omega_lambda)
dydt = [dadt]
return dydt
def friedmann_odeint(y, t, H0, omega_R, omega_M, omega_lambda, omega_K):
#(H0, omega_R, omega_M, omega_lambda, omega_K) = params
a = y[0]
dadt = a * H0 * np.sqrt( omega_R*(a0/a)**4 + omega_M*(a0/a)**3 + omega_K*(a0/a)**2 + omega_lambda)
dydt = [dadt]
return dydt
if __name__ == '__main__':
plt.rcParams.update({'font.size': 15})
main()
|
Rewinding the Universe to the Beginning of Time/Code/simulate_universe.py
|
import numpy as np
from scipy.integrate import ode, odeint
import matplotlib.pyplot as plt
parsec = 3.086 * 1e16 #m
year = 3.156 * 1e7 #s
pi = 3.14159265358979323846
G = 6.67430 * 1e-11 #N * m2 / kg2
LO=93.016 * 1e9 * 9.461e15/2 #90 billions ly in m (diameter)
a0=1
omega_R = 4.8e-5
omega_lambda = 0.683-omega_R
omega_M = 0.317
T0 = 2.725
omega_K = 1 - (omega_R + omega_M + omega_lambda)
H0_ = 69.8 #km/s/Mpc
unit_correction = 1/(parsec*1e6) * (year) * (1e3)
H0 = H0_ * unit_correction #converting in H0 in 1/y
params = (H0, omega_R, omega_M, omega_lambda, omega_K)
time_scale = 'linear'
#time_scale = 'log'
def main():
if time_scale == 'log':
t0 = 0
y0 = 1e-18
t1 = 1e12
backend = 'dopri5'
solver = ode(friedmann_ode).set_integrator(backend)
sol = []
def solout(t, y):
sol.append([t, *y])
solver.set_solout(solout)
solver.set_initial_value(y0, t0).set_f_params(H0, omega_R, omega_M, omega_lambda, omega_K)
solver.integrate(t1)
sol = np.array(sol)
time = sol[:,0]
a = sol[:,1]
else:
y0 = [1e-19] #scale factor at t=0, should calculate it from plank density etc
time = np.linspace(0, 40, 10000)*1e9
sol = odeint(friedmann_odeint, y0, time, args=params)
a = sol[:,0]
redshift = a0/a - 1
T = (1+redshift) * T0
adot = np.gradient(a,time)
H = adot/a #expansion factor
rho = np.power(H,2)*3/(8*pi*G)/1e15 #need to double check 1e15, should be 1e9*year
index_now = np.argmin(np.abs(a-a0))
current_day = time[index_now]
rho_R = omega_R*(a0/a)**4
rho_M = omega_M*(a0/a)**3
rho_lamb = omega_lambda
rho_sum = rho_R + rho_M + rho_lamb
rho_R = rho_R/rho_sum
rho_M = rho_M/rho_sum
rho_lamb = rho_lamb/rho_sum
density_color = 'black'
if time_scale == 'log':
fig, ax1 = plt.subplots(1,figsize = (10,6))
ax1.plot(time*(year),a*LO,linewidth = 2, color = 'blue')
ax1.plot(np.logspace(3,17),np.power(np.logspace(3,17),2/3)*1e15*0.65, linestyle = '--', color = 'blue')
ax1.set_xlabel('Time after singularity [s]')
ax1.set_ylabel('Radius of Obs. universe [Gly]', color='b')
ax1.set_yscale('log')
ax1.set_xscale('log')
ax1.tick_params('y', colors='b')
ax1.set_xlim(1000,1e23)
ax1.set_ylim(np.min(a*LO)*1e6,np.max(a*LO)/1e14)
ax2 = ax1.twinx()
ax2.plot(time*(year),rho,linewidth = 2, color = density_color)
ax2.axhline(y=rho[-1],xmin = 0.8, linewidth = 2, color = density_color)
ax2.set_xlabel('Time after singularity [s]')
ax2.set_ylabel('Density [kg/m3]', color=density_color)
ax2.set_yscale('log')
ax2.set_xscale('log')
ax2.tick_params('y', colors=density_color)
ax2.set_xlim(1000,1e23)
ax2.set_ylim(ymin=1e-34,ymax = 1e10)
ax3 = ax1.twinx()
ax3.fill_between(time*(year), np.zeros(len(time)), rho_R, color = 'orange', label = 'Radiation $a \propto t^{1/2}$', alpha = 0.15)
ax3.fill_between(time*(year), rho_R, rho_M + rho_R, color = 'green', label = 'Matter $a \propto t^{2/3}$', alpha = 0.15)
ax3.fill_between(list(time*(year))+[1e25], list(rho_M + rho_R)+[0], list(rho_lamb + rho_M + rho_R)+[1], color = 'black', label = 'Dark Energy $a \propto e^{H t}$', alpha = 0.15)
ax3.axhline(y=1,linewidth = 2, color = 'black', alpha = 0.3)
ax3.axhline(y=0,xmin = 0.8, linewidth = 2, color = 'orange', alpha = 0.3)
ax3.axhline(y=0,xmin = 0.8, linewidth = 2, color = 'green', alpha = 0.3)
ax3.plot(time*(year),rho_R,linewidth = 2, color = 'orange', alpha = 0.3)
ax3.plot(time*(year),rho_M + rho_R,linewidth = 2, color = 'green', alpha = 0.3)
ax3.plot(time*(year),rho_lamb + rho_M + rho_R,linewidth = 2, color = 'black', alpha = 0.3)
ax3.axvline(x=current_day*(year), ymax = 0.03, linewidth = 6, color = 'purple')#, label = 'Current time')
ax3.axvline(x=380000*(year), ymax = 0.03, linewidth = 6, color = 'brown')#, label = 'CMB')
#ax3.axvline(x=current_day*(year), ymin = 0.97, linewidth = 6, color = 'purple')#, label = 'Current time')
#ax3.axvline(x=380000*(year), ymin = 0.97, linewidth = 6, color = 'red')#, label = 'CMB')
#ax3.axvline(x=current_day*(year), linewidth = 2, color = 'purple', alpha = 0.2)#, label = 'Current time')
#ax3.axvline(x=380000*(year), linewidth = 2, color = 'red', alpha = 0.2)#, label = 'CMB')
ax3.set_xlim(1000,1e23)
ax3.set_ylim(ymin=0,ymax=1)
ax3.set_xscale('log')
#ax3.set_ylabel('Depth d', color='r')
#ax3.set_ylim([0, max(y2)])
ax3.set_yticks([],[])
ax3.tick_params('y', colors='r')
#ax3.legend(loc = (0.045,0.65), prop = {'size':12.5})
ax3.legend(loc = (0.3,0.65), prop = {'size':12.5})
fig.tight_layout()
plt.show()
else:
a = a*46
time = time/1e9
fig, ax1 = plt.subplots(1,figsize = (8.5,6))
ax1.plot(time,a,linewidth = 2, color = 'blue')
ax1.set_xlabel('Time after singularity [Gy]')
ax1.set_ylabel('Radius of Obs. universe [Gly]', color='b')
ax1.tick_params('y', colors='b')
ax1.set_ylim(0,350/2)
ax1.set_xlim(0,40)
ax2 = ax1.twinx()
ax2.plot(time,rho,linewidth = 2, color = density_color)
ax2.set_xlabel('Time after singularity [Gy]')
ax2.set_ylabel('Density [kg/m3]', color=density_color)
ax2.set_yscale('log')
ax2.tick_params('y', colors=density_color)
ax2.set_xlim(np.min(time)-0.2,np.max(time))
ax2.set_ylim(1e-27,1e-20)
ax3 = ax1.twinx()
ax3.fill_between(time, rho_R, rho_M + rho_R, color = 'green', label = 'Matter $a \propto t^{2/3}$', alpha = 0.15)
ax3.fill_between(time, rho_M + rho_R, rho_lamb + rho_M + rho_R, color = 'black', label = 'Dark Energy $a \propto e^{H t}$', alpha = 0.15)
ax3.axhline(y=1,linewidth = 2, color = 'black', alpha = 0.3)
ax3.axhline(y=0,xmin = 0.8, linewidth = 2, color = 'orange', alpha = 0.3)
ax3.axhline(y=0,xmin = 0.8, linewidth = 2, color = 'green', alpha = 0.3)
ax3.plot(time,rho_R,linewidth = 2, color = 'orange', alpha = 0.3)
ax3.plot(time,rho_M + rho_R,linewidth = 2, color = 'green', alpha = 0.3)
ax3.plot(time,rho_lamb + rho_M + rho_R,linewidth = 2, color = 'black', alpha = 0.3)
ax3.axvline(x=current_day/1e9, linewidth = 2, color = 'purple', linestyle = '--')#, label = 'Current time')
ax3.set_ylim(ymin=0,ymax=1)
ax3.set_yticks([],[])
ax3.legend(loc = (0.54,0.2), prop = {'size':12.5})
ax3.set_xlim(0,40)
print("Current day is %.2f Gy from this model" % (current_day/1e9)) #Should be 13.813 Gy
def friedmann_ode(t, y, H0, omega_R, omega_M, omega_lambda, omega_K):
#(H0, omega_R, omega_M, omega_lambda, omega_K) = params
a = y[0]
dadt = a * H0 * np.sqrt( omega_R*(a0/a)**4 + omega_M*(a0/a)**3 + omega_K*(a0/a)**2 + omega_lambda)
dydt = [dadt]
return dydt
def friedmann_odeint(y, t, H0, omega_R, omega_M, omega_lambda, omega_K):
#(H0, omega_R, omega_M, omega_lambda, omega_K) = params
a = y[0]
dadt = a * H0 * np.sqrt( omega_R*(a0/a)**4 + omega_M*(a0/a)**3 + omega_K*(a0/a)**2 + omega_lambda)
dydt = [dadt]
return dydt
if __name__ == '__main__':
plt.rcParams.update({'font.size': 15})
main()
| 0.357007 | 0.497864 |
import unittest
from sliding_puzzle.algorithm.search import Search
from sliding_puzzle.representation.puzzle import Puzzle
class SolvableAtFirstTestCase(unittest.TestCase):
def test_is_unsolvable(self):
puzzle: Puzzle = Puzzle(
[
[0, 1, 2, 3, 4, 5],
[6, 7, 8, 9, 10, 11],
[12, 13, 14, 15, 16, 17],
[18, 19, 20, 21, 22, 23],
[24, 25, 26, 27, 28, 29],
[30, 31, 32, 33, 35, 34],
]
)
self.assertFalse(Search.is_solvable(puzzle))
puzzle: Puzzle = Puzzle(
[[0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11], [12, 13, 15, 14]]
)
self.assertFalse(Search.is_solvable(puzzle))
puzzle: Puzzle = Puzzle(
[
[0, 1, 2, 3, 4],
[5, 6, 7, 8, 9],
[10, 11, 12, 13, 14],
[15, 16, 17, 18, 19],
[20, 21, 22, 24, 23],
]
)
self.assertFalse(Search.is_solvable(puzzle))
puzzle: Puzzle = Puzzle([[0, 1, 2], [3, 4, 5], [6, 8, 7]])
self.assertFalse(Search.is_solvable(puzzle))
def test_is_solvable(self):
puzzle: Puzzle = Puzzle([[0, 1, 2], [3, 4, 5], [6, 7, 8]])
self.assertTrue(Search.is_solvable(puzzle))
puzzle: Puzzle = Puzzle(
[[0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11], [12, 13, 14, 15]]
)
self.assertTrue(Search.is_solvable(puzzle))
puzzle: Puzzle = Puzzle(
[
[0, 1, 2, 3, 4],
[5, 6, 7, 8, 9],
[10, 11, 12, 13, 14],
[15, 16, 17, 18, 19],
[20, 21, 22, 23, 24],
]
)
self.assertTrue(Search.is_solvable(puzzle))
puzzle: Puzzle = Puzzle(
[
[0, 1, 2, 3, 4, 5],
[6, 7, 8, 9, 10, 11],
[12, 13, 14, 15, 16, 17],
[18, 19, 20, 21, 22, 23],
[24, 25, 26, 27, 28, 29],
[30, 31, 32, 33, 34, 35],
]
)
self.assertTrue(Search.is_solvable(puzzle))
def test_is_solvable_8(self):
solvable = [
[[0, 1, 2], [4, 5, 3], [7, 8, 6]],
[[1, 2, 3], [0, 4, 6], [7, 5, 8]],
[[1, 0, 3], [7, 2, 5], [8, 4, 6]],
]
not_solvable = [
[[1, 2, 3], [6, 8, 4], [5, 7, 0]],
[[1, 2, 3], [4, 5, 6], [8, 7, 0]],
[[1, 5, 0], [3, 2, 8], [4, 6, 7]],
]
[self.assertTrue(Search.is_solvable(Puzzle(p))) for p in solvable]
[self.assertFalse(Search.is_solvable(Puzzle(p))) for p in not_solvable]
def test_is_solvable_15(self):
solvable = [
[[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 0, 12], [13, 14, 15, 11]],
[[4, 1, 2, 3], [5, 6, 7, 11], [8, 9, 10, 15], [12, 13, 14, 0]],
]
not_solvable = [
[[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12], [13, 14, 15, 0]],
[[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 0, 11], [13, 14, 15, 12]],
]
[self.assertTrue(Search.is_solvable(Puzzle(p))) for p in solvable]
[self.assertFalse(Search.is_solvable(Puzzle(p))) for p in not_solvable]
class SolvableAtLastTestCase(unittest.TestCase):
def test_is_unsolvable(self):
puzzle: Puzzle = Puzzle([[2, 1, 3], [4, 5, 6], [7, 8, 0]], blank_at_first=False)
self.assertFalse(Search.is_solvable(puzzle))
puzzle: Puzzle = Puzzle(
[
[2, 1, 3, 4, 5, 6],
[7, 8, 9, 10, 11, 12],
[13, 14, 15, 16, 17, 18],
[19, 20, 21, 22, 23, 24],
[25, 26, 27, 28, 29, 30],
[31, 32, 33, 34, 35, 0],
],
blank_at_first=False,
)
self.assertFalse(Search.is_solvable(puzzle))
puzzle: Puzzle = Puzzle(
[[2, 1, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12], [13, 14, 15, 0]],
blank_at_first=False,
)
self.assertFalse(Search.is_solvable(puzzle))
puzzle: Puzzle = Puzzle(
[
[2, 1, 3, 4, 5],
[6, 7, 8, 9, 10],
[11, 12, 13, 14, 15],
[16, 17, 18, 19, 20],
[21, 22, 23, 24, 0],
],
blank_at_first=False,
)
self.assertFalse(Search.is_solvable(puzzle))
def test_is_solvable(self):
puzzle: Puzzle = Puzzle([[1, 2, 3], [4, 5, 6], [7, 8, 0]], blank_at_first=False)
self.assertTrue(Search.is_solvable(puzzle))
puzzle: Puzzle = Puzzle(
[[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12], [13, 14, 15, 0]],
blank_at_first=False,
)
self.assertTrue(Search.is_solvable(puzzle))
puzzle: Puzzle = Puzzle(
[
[1, 2, 3, 4, 5],
[6, 7, 8, 9, 10],
[11, 12, 13, 14, 15],
[16, 17, 18, 19, 20],
[21, 22, 23, 24, 0],
],
blank_at_first=False,
)
self.assertTrue(Search.is_solvable(puzzle))
puzzle: Puzzle = Puzzle(
[
[1, 2, 3, 4, 5, 6],
[7, 8, 9, 10, 11, 12],
[13, 14, 15, 16, 17, 18],
[19, 20, 21, 22, 23, 24],
[25, 26, 27, 28, 29, 30],
[31, 32, 33, 34, 35, 0],
],
blank_at_first=False,
)
self.assertTrue(Search.is_solvable(puzzle))
|
tests/test_solvable.py
|
import unittest
from sliding_puzzle.algorithm.search import Search
from sliding_puzzle.representation.puzzle import Puzzle
class SolvableAtFirstTestCase(unittest.TestCase):
def test_is_unsolvable(self):
puzzle: Puzzle = Puzzle(
[
[0, 1, 2, 3, 4, 5],
[6, 7, 8, 9, 10, 11],
[12, 13, 14, 15, 16, 17],
[18, 19, 20, 21, 22, 23],
[24, 25, 26, 27, 28, 29],
[30, 31, 32, 33, 35, 34],
]
)
self.assertFalse(Search.is_solvable(puzzle))
puzzle: Puzzle = Puzzle(
[[0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11], [12, 13, 15, 14]]
)
self.assertFalse(Search.is_solvable(puzzle))
puzzle: Puzzle = Puzzle(
[
[0, 1, 2, 3, 4],
[5, 6, 7, 8, 9],
[10, 11, 12, 13, 14],
[15, 16, 17, 18, 19],
[20, 21, 22, 24, 23],
]
)
self.assertFalse(Search.is_solvable(puzzle))
puzzle: Puzzle = Puzzle([[0, 1, 2], [3, 4, 5], [6, 8, 7]])
self.assertFalse(Search.is_solvable(puzzle))
def test_is_solvable(self):
puzzle: Puzzle = Puzzle([[0, 1, 2], [3, 4, 5], [6, 7, 8]])
self.assertTrue(Search.is_solvable(puzzle))
puzzle: Puzzle = Puzzle(
[[0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11], [12, 13, 14, 15]]
)
self.assertTrue(Search.is_solvable(puzzle))
puzzle: Puzzle = Puzzle(
[
[0, 1, 2, 3, 4],
[5, 6, 7, 8, 9],
[10, 11, 12, 13, 14],
[15, 16, 17, 18, 19],
[20, 21, 22, 23, 24],
]
)
self.assertTrue(Search.is_solvable(puzzle))
puzzle: Puzzle = Puzzle(
[
[0, 1, 2, 3, 4, 5],
[6, 7, 8, 9, 10, 11],
[12, 13, 14, 15, 16, 17],
[18, 19, 20, 21, 22, 23],
[24, 25, 26, 27, 28, 29],
[30, 31, 32, 33, 34, 35],
]
)
self.assertTrue(Search.is_solvable(puzzle))
def test_is_solvable_8(self):
solvable = [
[[0, 1, 2], [4, 5, 3], [7, 8, 6]],
[[1, 2, 3], [0, 4, 6], [7, 5, 8]],
[[1, 0, 3], [7, 2, 5], [8, 4, 6]],
]
not_solvable = [
[[1, 2, 3], [6, 8, 4], [5, 7, 0]],
[[1, 2, 3], [4, 5, 6], [8, 7, 0]],
[[1, 5, 0], [3, 2, 8], [4, 6, 7]],
]
[self.assertTrue(Search.is_solvable(Puzzle(p))) for p in solvable]
[self.assertFalse(Search.is_solvable(Puzzle(p))) for p in not_solvable]
def test_is_solvable_15(self):
solvable = [
[[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 0, 12], [13, 14, 15, 11]],
[[4, 1, 2, 3], [5, 6, 7, 11], [8, 9, 10, 15], [12, 13, 14, 0]],
]
not_solvable = [
[[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12], [13, 14, 15, 0]],
[[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 0, 11], [13, 14, 15, 12]],
]
[self.assertTrue(Search.is_solvable(Puzzle(p))) for p in solvable]
[self.assertFalse(Search.is_solvable(Puzzle(p))) for p in not_solvable]
class SolvableAtLastTestCase(unittest.TestCase):
def test_is_unsolvable(self):
puzzle: Puzzle = Puzzle([[2, 1, 3], [4, 5, 6], [7, 8, 0]], blank_at_first=False)
self.assertFalse(Search.is_solvable(puzzle))
puzzle: Puzzle = Puzzle(
[
[2, 1, 3, 4, 5, 6],
[7, 8, 9, 10, 11, 12],
[13, 14, 15, 16, 17, 18],
[19, 20, 21, 22, 23, 24],
[25, 26, 27, 28, 29, 30],
[31, 32, 33, 34, 35, 0],
],
blank_at_first=False,
)
self.assertFalse(Search.is_solvable(puzzle))
puzzle: Puzzle = Puzzle(
[[2, 1, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12], [13, 14, 15, 0]],
blank_at_first=False,
)
self.assertFalse(Search.is_solvable(puzzle))
puzzle: Puzzle = Puzzle(
[
[2, 1, 3, 4, 5],
[6, 7, 8, 9, 10],
[11, 12, 13, 14, 15],
[16, 17, 18, 19, 20],
[21, 22, 23, 24, 0],
],
blank_at_first=False,
)
self.assertFalse(Search.is_solvable(puzzle))
def test_is_solvable(self):
puzzle: Puzzle = Puzzle([[1, 2, 3], [4, 5, 6], [7, 8, 0]], blank_at_first=False)
self.assertTrue(Search.is_solvable(puzzle))
puzzle: Puzzle = Puzzle(
[[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12], [13, 14, 15, 0]],
blank_at_first=False,
)
self.assertTrue(Search.is_solvable(puzzle))
puzzle: Puzzle = Puzzle(
[
[1, 2, 3, 4, 5],
[6, 7, 8, 9, 10],
[11, 12, 13, 14, 15],
[16, 17, 18, 19, 20],
[21, 22, 23, 24, 0],
],
blank_at_first=False,
)
self.assertTrue(Search.is_solvable(puzzle))
puzzle: Puzzle = Puzzle(
[
[1, 2, 3, 4, 5, 6],
[7, 8, 9, 10, 11, 12],
[13, 14, 15, 16, 17, 18],
[19, 20, 21, 22, 23, 24],
[25, 26, 27, 28, 29, 30],
[31, 32, 33, 34, 35, 0],
],
blank_at_first=False,
)
self.assertTrue(Search.is_solvable(puzzle))
| 0.551815 | 0.689422 |
import random
from itertools import izip_longest, groupby, chain
from wa.framework.configuration.core import (MetaConfiguration, RunConfiguration,
JobGenerator, Status, settings)
from wa.framework.configuration.parsers import ConfigParser
from wa.framework.configuration.plugin_cache import PluginCache
from wa.framework.exception import NotFoundError
from wa.framework.job import Job
from wa.utils import log
class CombinedConfig(object):
@staticmethod
def from_pod(pod):
instance = CombinedConfig()
instance.settings = MetaConfiguration.from_pod(pod.get('settings', {}))
instance.run_config = RunConfiguration.from_pod(pod.get('run_config', {}))
return instance
def __init__(self, settings=None, run_config=None):
self.settings = settings
self.run_config = run_config
def to_pod(self):
return {'settings': self.settings.to_pod(),
'run_config': self.run_config.to_pod()}
class ConfigManager(object):
"""
Represents run-time state of WA. Mostly used as a container for loaded
configuration and discovered plugins.
This exists outside of any command or run and is associated with the running
instance of wA itself.
"""
@property
def enabled_instruments(self):
return self.jobs_config.enabled_instruments
@property
def enabled_processors(self):
return self.jobs_config.enabled_processors
@property
def job_specs(self):
if not self._jobs_generated:
msg = 'Attempting to access job specs before '\
'jobs have been generated'
raise RuntimeError(msg)
return [j.spec for j in self._jobs]
@property
def jobs(self):
if not self._jobs_generated:
msg = 'Attempting to access jobs before '\
'they have been generated'
raise RuntimeError(msg)
return self._jobs
def __init__(self, settings=settings):
self.settings = settings
self.run_config = RunConfiguration()
self.plugin_cache = PluginCache()
self.jobs_config = JobGenerator(self.plugin_cache)
self.loaded_config_sources = []
self._config_parser = ConfigParser()
self._jobs = []
self._jobs_generated = False
self.agenda = None
def load_config_file(self, filepath):
self._config_parser.load_from_path(self, filepath)
self.loaded_config_sources.append(filepath)
def load_config(self, values, source, wrap_exceptions=True):
self._config_parser.load(self, values, source)
self.loaded_config_sources.append(source)
def get_plugin(self, name=None, kind=None, *args, **kwargs):
return self.plugin_cache.get_plugin(name, kind, *args, **kwargs)
def get_instruments(self, target):
instruments = []
for name in self.enabled_instruments:
try:
instruments.append(self.get_plugin(name, kind='instrument',
target=target))
except NotFoundError:
msg = 'Instrument "{}" not found'
raise NotFoundError(msg.format(name))
return instruments
def get_processors(self):
processors = []
for name in self.enabled_processors:
try:
proc = self.plugin_cache.get_plugin(name, kind='output_processor')
except NotFoundError:
msg = 'Output Processor "{}" not found'
raise NotFoundError(msg.format(name))
processors.append(proc)
return processors
def finalize(self):
if not self.agenda:
msg = 'Attempting to finalize config before agenda has been set'
raise RuntimeError(msg)
self.run_config.merge_device_config(self.plugin_cache)
return CombinedConfig(self.settings, self.run_config)
def generate_jobs(self, context):
job_specs = self.jobs_config.generate_job_specs(context.tm)
exec_order = self.run_config.execution_order
log.indent()
for spec, i in permute_iterations(job_specs, exec_order):
job = Job(spec, i, context)
job.load(context.tm.target)
self._jobs.append(job)
context.run_state.add_job(job)
log.dedent()
self._jobs_generated = True
def permute_by_job(specs):
"""
This is that "classic" implementation that executes all iterations of a
workload spec before proceeding onto the next spec.
"""
for spec in specs:
for i in range(1, spec.iterations + 1):
yield (spec, i)
def permute_by_iteration(specs):
"""
Runs the first iteration for all benchmarks first, before proceeding to the
next iteration, i.e. A1, B1, C1, A2, B2, C2... instead of A1, A1, B1, B2,
C1, C2...
If multiple sections where specified in the agenda, this will run all
sections for the first global spec first, followed by all sections for the
second spec, etc.
e.g. given sections X and Y, and global specs A and B, with 2 iterations,
this will run
X.A1, Y.A1, X.B1, Y.B1, X.A2, Y.A2, X.B2, Y.B2
"""
groups = [list(g) for k, g in groupby(specs, lambda s: s.workload_id)]
all_tuples = []
for spec in chain(*groups):
all_tuples.append([(spec, i + 1)
for i in xrange(spec.iterations)])
for t in chain(*map(list, izip_longest(*all_tuples))):
if t is not None:
yield t
def permute_by_section(specs):
"""
Runs the first iteration for all benchmarks first, before proceeding to the
next iteration, i.e. A1, B1, C1, A2, B2, C2... instead of A1, A1, B1, B2,
C1, C2...
If multiple sections where specified in the agenda, this will run all specs
for the first section followed by all specs for the seciod section, etc.
e.g. given sections X and Y, and global specs A and B, with 2 iterations,
this will run
X.A1, X.B1, Y.A1, Y.B1, X.A2, X.B2, Y.A2, Y.B2
"""
groups = [list(g) for k, g in groupby(specs, lambda s: s.section_id)]
all_tuples = []
for spec in chain(*groups):
all_tuples.append([(spec, i + 1)
for i in xrange(spec.iterations)])
for t in chain(*map(list, izip_longest(*all_tuples))):
if t is not None:
yield t
def permute_randomly(specs):
"""
This will generate a random permutation of specs/iteration tuples.
"""
result = []
for spec in specs:
for i in xrange(1, spec.iterations + 1):
result.append((spec, i))
random.shuffle(result)
for t in result:
yield t
permute_map = {
'by_iteration': permute_by_iteration,
'by_job': permute_by_job,
'by_section': permute_by_section,
'random': permute_randomly,
}
def permute_iterations(specs, exec_order):
if exec_order not in permute_map:
msg = 'Unknown execution order "{}"; must be in: {}'
raise ValueError(msg.format(exec_order, permute_map.keys()))
return permute_map[exec_order](specs)
|
wa/framework/configuration/execution.py
|
import random
from itertools import izip_longest, groupby, chain
from wa.framework.configuration.core import (MetaConfiguration, RunConfiguration,
JobGenerator, Status, settings)
from wa.framework.configuration.parsers import ConfigParser
from wa.framework.configuration.plugin_cache import PluginCache
from wa.framework.exception import NotFoundError
from wa.framework.job import Job
from wa.utils import log
class CombinedConfig(object):
@staticmethod
def from_pod(pod):
instance = CombinedConfig()
instance.settings = MetaConfiguration.from_pod(pod.get('settings', {}))
instance.run_config = RunConfiguration.from_pod(pod.get('run_config', {}))
return instance
def __init__(self, settings=None, run_config=None):
self.settings = settings
self.run_config = run_config
def to_pod(self):
return {'settings': self.settings.to_pod(),
'run_config': self.run_config.to_pod()}
class ConfigManager(object):
"""
Represents run-time state of WA. Mostly used as a container for loaded
configuration and discovered plugins.
This exists outside of any command or run and is associated with the running
instance of wA itself.
"""
@property
def enabled_instruments(self):
return self.jobs_config.enabled_instruments
@property
def enabled_processors(self):
return self.jobs_config.enabled_processors
@property
def job_specs(self):
if not self._jobs_generated:
msg = 'Attempting to access job specs before '\
'jobs have been generated'
raise RuntimeError(msg)
return [j.spec for j in self._jobs]
@property
def jobs(self):
if not self._jobs_generated:
msg = 'Attempting to access jobs before '\
'they have been generated'
raise RuntimeError(msg)
return self._jobs
def __init__(self, settings=settings):
self.settings = settings
self.run_config = RunConfiguration()
self.plugin_cache = PluginCache()
self.jobs_config = JobGenerator(self.plugin_cache)
self.loaded_config_sources = []
self._config_parser = ConfigParser()
self._jobs = []
self._jobs_generated = False
self.agenda = None
def load_config_file(self, filepath):
self._config_parser.load_from_path(self, filepath)
self.loaded_config_sources.append(filepath)
def load_config(self, values, source, wrap_exceptions=True):
self._config_parser.load(self, values, source)
self.loaded_config_sources.append(source)
def get_plugin(self, name=None, kind=None, *args, **kwargs):
return self.plugin_cache.get_plugin(name, kind, *args, **kwargs)
def get_instruments(self, target):
instruments = []
for name in self.enabled_instruments:
try:
instruments.append(self.get_plugin(name, kind='instrument',
target=target))
except NotFoundError:
msg = 'Instrument "{}" not found'
raise NotFoundError(msg.format(name))
return instruments
def get_processors(self):
processors = []
for name in self.enabled_processors:
try:
proc = self.plugin_cache.get_plugin(name, kind='output_processor')
except NotFoundError:
msg = 'Output Processor "{}" not found'
raise NotFoundError(msg.format(name))
processors.append(proc)
return processors
def finalize(self):
if not self.agenda:
msg = 'Attempting to finalize config before agenda has been set'
raise RuntimeError(msg)
self.run_config.merge_device_config(self.plugin_cache)
return CombinedConfig(self.settings, self.run_config)
def generate_jobs(self, context):
job_specs = self.jobs_config.generate_job_specs(context.tm)
exec_order = self.run_config.execution_order
log.indent()
for spec, i in permute_iterations(job_specs, exec_order):
job = Job(spec, i, context)
job.load(context.tm.target)
self._jobs.append(job)
context.run_state.add_job(job)
log.dedent()
self._jobs_generated = True
def permute_by_job(specs):
"""
This is that "classic" implementation that executes all iterations of a
workload spec before proceeding onto the next spec.
"""
for spec in specs:
for i in range(1, spec.iterations + 1):
yield (spec, i)
def permute_by_iteration(specs):
"""
Runs the first iteration for all benchmarks first, before proceeding to the
next iteration, i.e. A1, B1, C1, A2, B2, C2... instead of A1, A1, B1, B2,
C1, C2...
If multiple sections where specified in the agenda, this will run all
sections for the first global spec first, followed by all sections for the
second spec, etc.
e.g. given sections X and Y, and global specs A and B, with 2 iterations,
this will run
X.A1, Y.A1, X.B1, Y.B1, X.A2, Y.A2, X.B2, Y.B2
"""
groups = [list(g) for k, g in groupby(specs, lambda s: s.workload_id)]
all_tuples = []
for spec in chain(*groups):
all_tuples.append([(spec, i + 1)
for i in xrange(spec.iterations)])
for t in chain(*map(list, izip_longest(*all_tuples))):
if t is not None:
yield t
def permute_by_section(specs):
"""
Runs the first iteration for all benchmarks first, before proceeding to the
next iteration, i.e. A1, B1, C1, A2, B2, C2... instead of A1, A1, B1, B2,
C1, C2...
If multiple sections where specified in the agenda, this will run all specs
for the first section followed by all specs for the seciod section, etc.
e.g. given sections X and Y, and global specs A and B, with 2 iterations,
this will run
X.A1, X.B1, Y.A1, Y.B1, X.A2, X.B2, Y.A2, Y.B2
"""
groups = [list(g) for k, g in groupby(specs, lambda s: s.section_id)]
all_tuples = []
for spec in chain(*groups):
all_tuples.append([(spec, i + 1)
for i in xrange(spec.iterations)])
for t in chain(*map(list, izip_longest(*all_tuples))):
if t is not None:
yield t
def permute_randomly(specs):
"""
This will generate a random permutation of specs/iteration tuples.
"""
result = []
for spec in specs:
for i in xrange(1, spec.iterations + 1):
result.append((spec, i))
random.shuffle(result)
for t in result:
yield t
permute_map = {
'by_iteration': permute_by_iteration,
'by_job': permute_by_job,
'by_section': permute_by_section,
'random': permute_randomly,
}
def permute_iterations(specs, exec_order):
if exec_order not in permute_map:
msg = 'Unknown execution order "{}"; must be in: {}'
raise ValueError(msg.format(exec_order, permute_map.keys()))
return permute_map[exec_order](specs)
| 0.577853 | 0.120258 |
import apps.basics.op_drf.fields
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('permission', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Api',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', apps.basics.op_drf.fields.DescriptionField(blank=True, default='', help_text='描述', null=True, verbose_name='描述')),
('modifier', apps.basics.op_drf.fields.ModifierCharField(blank=True, help_text='该记录最后修改者', max_length=255, null=True, verbose_name='修改者')),
('dept_belong_id', models.CharField(blank=True, max_length=64, null=True, verbose_name='数据归属部门')),
('update_datetime', apps.basics.op_drf.fields.UpdateDateTimeField(auto_now=True, help_text='修改时间', null=True, verbose_name='修改时间')),
('create_datetime', apps.basics.op_drf.fields.CreateDateTimeField(auto_now_add=True, help_text='创建时间', null=True, verbose_name='创建时间')),
('name', models.CharField(default=str, max_length=100, verbose_name='接口名称')),
('url', models.CharField(default=str, max_length=200, verbose_name='请求url')),
('headers', models.TextField(blank=True, default=dict, null=True, verbose_name='请求头信息')),
('params', models.TextField(blank=True, default=dict, null=True, verbose_name='请求参数')),
('validators', models.TextField(blank=True, default=list, null=True, verbose_name='验证器')),
('extractors', models.TextField(blank=True, default=list, null=True, verbose_name='提取器')),
('desc', models.CharField(blank=True, default=str, max_length=200, null=True, verbose_name='接口描述')),
('last_exe_status', models.IntegerField(choices=[(0, '未执行'), (1, '成功'), (2, '失败'), (3, '阻塞'), (4, '部分失败')], default=0, verbose_name='最后执行状态')),
('status', models.IntegerField(choices=[(0, '禁用'), (1, '正常'), (2, '仅自己可见')], default=1, verbose_name='状态')),
('creator', models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_query_name='creator_query', to=settings.AUTH_USER_MODEL, verbose_name='创建者')),
],
options={
'verbose_name_plural': '接口',
},
),
migrations.CreateModel(
name='Suite',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', apps.basics.op_drf.fields.DescriptionField(blank=True, default='', help_text='描述', null=True, verbose_name='描述')),
('modifier', apps.basics.op_drf.fields.ModifierCharField(blank=True, help_text='该记录最后修改者', max_length=255, null=True, verbose_name='修改者')),
('dept_belong_id', models.CharField(blank=True, max_length=64, null=True, verbose_name='数据归属部门')),
('update_datetime', apps.basics.op_drf.fields.UpdateDateTimeField(auto_now=True, help_text='修改时间', null=True, verbose_name='修改时间')),
('create_datetime', apps.basics.op_drf.fields.CreateDateTimeField(auto_now_add=True, help_text='创建时间', null=True, verbose_name='创建时间')),
('name', models.CharField(default=str, max_length=100, verbose_name='用例集名称')),
('status', models.IntegerField(choices=[(0, '禁用'), (1, '正常'), (2, '仅自己可见')], default=1, verbose_name='状态')),
('apis', models.ManyToManyField(to='api.Api', verbose_name='接口')),
('creator', models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_query_name='creator_query', to=settings.AUTH_USER_MODEL, verbose_name='创建者')),
('dept', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='api_suite_dept', to='permission.Dept', verbose_name='模块分类')),
],
options={
'verbose_name_plural': '接口集',
},
),
]
|
backend/apps/projects/api/migrations/0001_initial.py
|
import apps.basics.op_drf.fields
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('permission', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Api',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', apps.basics.op_drf.fields.DescriptionField(blank=True, default='', help_text='描述', null=True, verbose_name='描述')),
('modifier', apps.basics.op_drf.fields.ModifierCharField(blank=True, help_text='该记录最后修改者', max_length=255, null=True, verbose_name='修改者')),
('dept_belong_id', models.CharField(blank=True, max_length=64, null=True, verbose_name='数据归属部门')),
('update_datetime', apps.basics.op_drf.fields.UpdateDateTimeField(auto_now=True, help_text='修改时间', null=True, verbose_name='修改时间')),
('create_datetime', apps.basics.op_drf.fields.CreateDateTimeField(auto_now_add=True, help_text='创建时间', null=True, verbose_name='创建时间')),
('name', models.CharField(default=str, max_length=100, verbose_name='接口名称')),
('url', models.CharField(default=str, max_length=200, verbose_name='请求url')),
('headers', models.TextField(blank=True, default=dict, null=True, verbose_name='请求头信息')),
('params', models.TextField(blank=True, default=dict, null=True, verbose_name='请求参数')),
('validators', models.TextField(blank=True, default=list, null=True, verbose_name='验证器')),
('extractors', models.TextField(blank=True, default=list, null=True, verbose_name='提取器')),
('desc', models.CharField(blank=True, default=str, max_length=200, null=True, verbose_name='接口描述')),
('last_exe_status', models.IntegerField(choices=[(0, '未执行'), (1, '成功'), (2, '失败'), (3, '阻塞'), (4, '部分失败')], default=0, verbose_name='最后执行状态')),
('status', models.IntegerField(choices=[(0, '禁用'), (1, '正常'), (2, '仅自己可见')], default=1, verbose_name='状态')),
('creator', models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_query_name='creator_query', to=settings.AUTH_USER_MODEL, verbose_name='创建者')),
],
options={
'verbose_name_plural': '接口',
},
),
migrations.CreateModel(
name='Suite',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', apps.basics.op_drf.fields.DescriptionField(blank=True, default='', help_text='描述', null=True, verbose_name='描述')),
('modifier', apps.basics.op_drf.fields.ModifierCharField(blank=True, help_text='该记录最后修改者', max_length=255, null=True, verbose_name='修改者')),
('dept_belong_id', models.CharField(blank=True, max_length=64, null=True, verbose_name='数据归属部门')),
('update_datetime', apps.basics.op_drf.fields.UpdateDateTimeField(auto_now=True, help_text='修改时间', null=True, verbose_name='修改时间')),
('create_datetime', apps.basics.op_drf.fields.CreateDateTimeField(auto_now_add=True, help_text='创建时间', null=True, verbose_name='创建时间')),
('name', models.CharField(default=str, max_length=100, verbose_name='用例集名称')),
('status', models.IntegerField(choices=[(0, '禁用'), (1, '正常'), (2, '仅自己可见')], default=1, verbose_name='状态')),
('apis', models.ManyToManyField(to='api.Api', verbose_name='接口')),
('creator', models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_query_name='creator_query', to=settings.AUTH_USER_MODEL, verbose_name='创建者')),
('dept', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='api_suite_dept', to='permission.Dept', verbose_name='模块分类')),
],
options={
'verbose_name_plural': '接口集',
},
),
]
| 0.385837 | 0.217504 |
from queue import PriorityQueue
Coordinate = tuple[int, int]
map: list[list[int]] = []
PART_TWO = True
# For each position, we store the lowest cost path to get there.
lowest_cost: list[list[None | tuple[int, list[Coordinate]]]] = []
with open('2021-12-15.txt') as f:
for line in (l.strip() for l in f):
map_values = [int(x) for x in line]
if PART_TWO:
for i in range(1,5):
map_values += [(int(x)+i) for x in line]
map.append(map_values)
lowest_cost.append([None] * len(map_values))
if PART_TWO:
# Expand map 4 times below.
orig_map_len = len(map)
for i in range(1,5):
for y in range(orig_map_len):
map.append([(x+i) for x in map[y]])
lowest_cost.append([None] * len(map[0]))
# Deal with overflows: At most 9+4, so just subtract 9 as needed.
for y in range(len(map)):
for x in range(len(map[y])):
if map[y][x] > 9:
map[y][x] -= 9
# Priority queue always draws the current lowest cost path
work_queue: PriorityQueue[tuple[int,Coordinate, list[Coordinate]]] = PriorityQueue()
work_queue.put_nowait((0,(0,0),[(0,0)]))
NEIGHBORS = ((-1, 0), (1, 0), (0, 1), (0, -1))
max_y, max_x = len(map)-1, len(map[0])-1
while not work_queue.empty():
cost, (x, y), path = work_queue.get_nowait()
if lowest_cost[max_y][max_x] is not None:
if lowest_cost[max_y][max_x][0] < cost:
# Drain task if there is already a cheaper way to reach the end.
work_queue.task_done()
break
if lowest_cost[y][x] is not None and lowest_cost[y][x][0] < cost:
work_queue.task_done()
continue
lowest_cost[y][x] = (cost, path)
for dx, dy in NEIGHBORS:
nx, ny = x+dx, y+dy
# Skip out of bounds
if min(nx, ny) < 0 or ny > max_y or nx > max_x:
continue
new_cost = cost + map[ny][nx]
new_path = path + [(nx, ny)]
# Skip unless we're getting there cheaper.
if lowest_cost[ny][nx] is not None:
if lowest_cost[ny][nx][0] <= new_cost:
continue
# NOT THREAD SAFE: Per cell threading.Lock on lowest_cost cells would fix.
lowest_cost[ny][nx] = (new_cost, new_path)
work_queue.put_nowait((new_cost, (nx, ny), new_path))
work_queue.task_done()
print(lowest_cost[max_y][max_x])
print(lowest_cost[max_y][max_x][0])
|
2021/2021-12-15.py
|
from queue import PriorityQueue
Coordinate = tuple[int, int]
map: list[list[int]] = []
PART_TWO = True
# For each position, we store the lowest cost path to get there.
lowest_cost: list[list[None | tuple[int, list[Coordinate]]]] = []
with open('2021-12-15.txt') as f:
for line in (l.strip() for l in f):
map_values = [int(x) for x in line]
if PART_TWO:
for i in range(1,5):
map_values += [(int(x)+i) for x in line]
map.append(map_values)
lowest_cost.append([None] * len(map_values))
if PART_TWO:
# Expand map 4 times below.
orig_map_len = len(map)
for i in range(1,5):
for y in range(orig_map_len):
map.append([(x+i) for x in map[y]])
lowest_cost.append([None] * len(map[0]))
# Deal with overflows: At most 9+4, so just subtract 9 as needed.
for y in range(len(map)):
for x in range(len(map[y])):
if map[y][x] > 9:
map[y][x] -= 9
# Priority queue always draws the current lowest cost path
work_queue: PriorityQueue[tuple[int,Coordinate, list[Coordinate]]] = PriorityQueue()
work_queue.put_nowait((0,(0,0),[(0,0)]))
NEIGHBORS = ((-1, 0), (1, 0), (0, 1), (0, -1))
max_y, max_x = len(map)-1, len(map[0])-1
while not work_queue.empty():
cost, (x, y), path = work_queue.get_nowait()
if lowest_cost[max_y][max_x] is not None:
if lowest_cost[max_y][max_x][0] < cost:
# Drain task if there is already a cheaper way to reach the end.
work_queue.task_done()
break
if lowest_cost[y][x] is not None and lowest_cost[y][x][0] < cost:
work_queue.task_done()
continue
lowest_cost[y][x] = (cost, path)
for dx, dy in NEIGHBORS:
nx, ny = x+dx, y+dy
# Skip out of bounds
if min(nx, ny) < 0 or ny > max_y or nx > max_x:
continue
new_cost = cost + map[ny][nx]
new_path = path + [(nx, ny)]
# Skip unless we're getting there cheaper.
if lowest_cost[ny][nx] is not None:
if lowest_cost[ny][nx][0] <= new_cost:
continue
# NOT THREAD SAFE: Per cell threading.Lock on lowest_cost cells would fix.
lowest_cost[ny][nx] = (new_cost, new_path)
work_queue.put_nowait((new_cost, (nx, ny), new_path))
work_queue.task_done()
print(lowest_cost[max_y][max_x])
print(lowest_cost[max_y][max_x][0])
| 0.302082 | 0.367015 |
import itertools
import pytest
from multpersist import OrderNotFound, compute_mp_order, \
efficient_candidate_generator, find_max_order, find_next, \
find_with_order, infinite_candidate_generator, is_in_order
def predetermined_number_generator():
for x in [10, 18, 237, 2777778888899, 277777788888899]:
yield x
def ranged_generator(start, stop):
for x in list(range(start, stop)):
s = str(x)
if '5' in s:
next
elif not is_in_order(s):
next
else:
yield x
def test_is_in_order():
assert is_in_order(str(11))
assert is_in_order(str(123))
assert not is_in_order(str(321))
def test_ranged_generator():
count = 0
for xl in ranged_generator(10, 999):
count += 1
assert count == 155
def test_efficient_candidate_generator():
count = 0
for xl in efficient_candidate_generator(10, 999):
count += 1
assert count == 155
def test_compute_order():
assert compute_mp_order(10) == 1
assert compute_mp_order(18) == 1
assert compute_mp_order(25) == 2
assert compute_mp_order(237) == 2
assert compute_mp_order(2777778888899) == 3
assert compute_mp_order(277777788888899) == 11
def test_find_order():
assert find_max_order(predetermined_number_generator) == \
(11, 277777788888899)
def test_largest_order_under_1e6():
def generator():
return efficient_candidate_generator(10, 1000000)
assert find_max_order(generator) == (7, 68889)
def test_largest_order_between_1e6_3e6():
def generator():
return efficient_candidate_generator(1000000, 10000000)
assert find_max_order(generator) == (8, 2677889)
def test_infinite_generator():
generator = infinite_candidate_generator(10)
items = list(itertools.islice(generator, 10))
assert items == [10, 11, 12, 13, 14, 15, 16, 17, 18, 19]
generator = infinite_candidate_generator(1000)
items = list(itertools.islice(generator, 10))
assert items == [1111, 1112, 1113, 1114, 1116, 1117, 1118, 1119, 1122, 1123]
def test_find_with_order_not_found():
def generator():
return predetermined_number_generator()
with pytest.raises(OrderNotFound):
find_with_order(generator, 10)
def test_find_smallest_with_order():
def generator():
return infinite_candidate_generator(10)
assert find_with_order(generator, 6) == (6, 6788)
assert find_with_order(generator, 7) == (7, 68889)
assert find_with_order(generator, 8) == (8, 2677889)
assert find_with_order(generator, 9) == (9, 26888999)
def test_find_next():
def generator():
return infinite_candidate_generator(10)
results = list(itertools.islice(find_next(generator, 1), 4))
assert results == [(1, 10), (2, 25), (3, 39), (4, 77)]
|
tests/test_compute_order.py
|
import itertools
import pytest
from multpersist import OrderNotFound, compute_mp_order, \
efficient_candidate_generator, find_max_order, find_next, \
find_with_order, infinite_candidate_generator, is_in_order
def predetermined_number_generator():
for x in [10, 18, 237, 2777778888899, 277777788888899]:
yield x
def ranged_generator(start, stop):
for x in list(range(start, stop)):
s = str(x)
if '5' in s:
next
elif not is_in_order(s):
next
else:
yield x
def test_is_in_order():
assert is_in_order(str(11))
assert is_in_order(str(123))
assert not is_in_order(str(321))
def test_ranged_generator():
count = 0
for xl in ranged_generator(10, 999):
count += 1
assert count == 155
def test_efficient_candidate_generator():
count = 0
for xl in efficient_candidate_generator(10, 999):
count += 1
assert count == 155
def test_compute_order():
assert compute_mp_order(10) == 1
assert compute_mp_order(18) == 1
assert compute_mp_order(25) == 2
assert compute_mp_order(237) == 2
assert compute_mp_order(2777778888899) == 3
assert compute_mp_order(277777788888899) == 11
def test_find_order():
assert find_max_order(predetermined_number_generator) == \
(11, 277777788888899)
def test_largest_order_under_1e6():
def generator():
return efficient_candidate_generator(10, 1000000)
assert find_max_order(generator) == (7, 68889)
def test_largest_order_between_1e6_3e6():
def generator():
return efficient_candidate_generator(1000000, 10000000)
assert find_max_order(generator) == (8, 2677889)
def test_infinite_generator():
generator = infinite_candidate_generator(10)
items = list(itertools.islice(generator, 10))
assert items == [10, 11, 12, 13, 14, 15, 16, 17, 18, 19]
generator = infinite_candidate_generator(1000)
items = list(itertools.islice(generator, 10))
assert items == [1111, 1112, 1113, 1114, 1116, 1117, 1118, 1119, 1122, 1123]
def test_find_with_order_not_found():
def generator():
return predetermined_number_generator()
with pytest.raises(OrderNotFound):
find_with_order(generator, 10)
def test_find_smallest_with_order():
def generator():
return infinite_candidate_generator(10)
assert find_with_order(generator, 6) == (6, 6788)
assert find_with_order(generator, 7) == (7, 68889)
assert find_with_order(generator, 8) == (8, 2677889)
assert find_with_order(generator, 9) == (9, 26888999)
def test_find_next():
def generator():
return infinite_candidate_generator(10)
results = list(itertools.islice(find_next(generator, 1), 4))
assert results == [(1, 10), (2, 25), (3, 39), (4, 77)]
| 0.426083 | 0.44059 |
from pprint import pformat
from six import iteritems
import re
class CouponDiscount(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, created=None, changed_by=None, updated=None, pricing_component=None, pricing_component_name=None, pricing_component_id=None, unit_of_measure_name=None, unit_of_measure_id=None, units_free=None, percentage_discount=None, cash_discount=None):
"""
CouponDiscount - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'created': 'datetime',
'changed_by': 'str',
'updated': 'datetime',
'pricing_component': 'str',
'pricing_component_name': 'str',
'pricing_component_id': 'str',
'unit_of_measure_name': 'str',
'unit_of_measure_id': 'str',
'units_free': 'int',
'percentage_discount': 'float',
'cash_discount': 'float'
}
self.attribute_map = {
'created': 'created',
'changed_by': 'changedBy',
'updated': 'updated',
'pricing_component': 'pricingComponent',
'pricing_component_name': 'pricingComponentName',
'pricing_component_id': 'pricingComponentID',
'unit_of_measure_name': 'unitOfMeasureName',
'unit_of_measure_id': 'unitOfMeasureID',
'units_free': 'unitsFree',
'percentage_discount': 'percentageDiscount',
'cash_discount': 'cashDiscount'
}
self._created = created
self._changed_by = changed_by
self._updated = updated
self._pricing_component = pricing_component
self._pricing_component_name = pricing_component_name
self._pricing_component_id = pricing_component_id
self._unit_of_measure_name = unit_of_measure_name
self._unit_of_measure_id = unit_of_measure_id
self._units_free = units_free
self._percentage_discount = percentage_discount
self._cash_discount = cash_discount
@property
def created(self):
"""
Gets the created of this CouponDiscount.
{ \"description\" : \"The UTC DateTime when the object was created.\", \"verbs\":[] }
:return: The created of this CouponDiscount.
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""
Sets the created of this CouponDiscount.
{ \"description\" : \"The UTC DateTime when the object was created.\", \"verbs\":[] }
:param created: The created of this CouponDiscount.
:type: datetime
"""
self._created = created
@property
def changed_by(self):
"""
Gets the changed_by of this CouponDiscount.
{ \"description\" : \"ID of the user who last updated the entity.\", \"verbs\":[] }
:return: The changed_by of this CouponDiscount.
:rtype: str
"""
return self._changed_by
@changed_by.setter
def changed_by(self, changed_by):
"""
Sets the changed_by of this CouponDiscount.
{ \"description\" : \"ID of the user who last updated the entity.\", \"verbs\":[] }
:param changed_by: The changed_by of this CouponDiscount.
:type: str
"""
self._changed_by = changed_by
@property
def updated(self):
"""
Gets the updated of this CouponDiscount.
{ \"description\" : \"The UTC DateTime when the object was last updated.\", \"verbs\":[] }
:return: The updated of this CouponDiscount.
:rtype: datetime
"""
return self._updated
@updated.setter
def updated(self, updated):
"""
Sets the updated of this CouponDiscount.
{ \"description\" : \"The UTC DateTime when the object was last updated.\", \"verbs\":[] }
:param updated: The updated of this CouponDiscount.
:type: datetime
"""
self._updated = updated
@property
def pricing_component(self):
"""
Gets the pricing_component of this CouponDiscount.
{ \"description\" : \"Name or ID of the pricing component to apply the discount to. If not set blank discount is applied at the invoice level.\", \"verbs\":[\"POST\"] }
:return: The pricing_component of this CouponDiscount.
:rtype: str
"""
return self._pricing_component
@pricing_component.setter
def pricing_component(self, pricing_component):
"""
Sets the pricing_component of this CouponDiscount.
{ \"description\" : \"Name or ID of the pricing component to apply the discount to. If not set blank discount is applied at the invoice level.\", \"verbs\":[\"POST\"] }
:param pricing_component: The pricing_component of this CouponDiscount.
:type: str
"""
self._pricing_component = pricing_component
@property
def pricing_component_name(self):
"""
Gets the pricing_component_name of this CouponDiscount.
{ \"description\" : \"\", \"verbs\":[\"GET\"] }
:return: The pricing_component_name of this CouponDiscount.
:rtype: str
"""
return self._pricing_component_name
@pricing_component_name.setter
def pricing_component_name(self, pricing_component_name):
"""
Sets the pricing_component_name of this CouponDiscount.
{ \"description\" : \"\", \"verbs\":[\"GET\"] }
:param pricing_component_name: The pricing_component_name of this CouponDiscount.
:type: str
"""
self._pricing_component_name = pricing_component_name
@property
def pricing_component_id(self):
"""
Gets the pricing_component_id of this CouponDiscount.
{ \"description\" : \"\", \"verbs\":[\"GET\"] }
:return: The pricing_component_id of this CouponDiscount.
:rtype: str
"""
return self._pricing_component_id
@pricing_component_id.setter
def pricing_component_id(self, pricing_component_id):
"""
Sets the pricing_component_id of this CouponDiscount.
{ \"description\" : \"\", \"verbs\":[\"GET\"] }
:param pricing_component_id: The pricing_component_id of this CouponDiscount.
:type: str
"""
self._pricing_component_id = pricing_component_id
@property
def unit_of_measure_name(self):
"""
Gets the unit_of_measure_name of this CouponDiscount.
:return: The unit_of_measure_name of this CouponDiscount.
:rtype: str
"""
return self._unit_of_measure_name
@unit_of_measure_name.setter
def unit_of_measure_name(self, unit_of_measure_name):
"""
Sets the unit_of_measure_name of this CouponDiscount.
:param unit_of_measure_name: The unit_of_measure_name of this CouponDiscount.
:type: str
"""
self._unit_of_measure_name = unit_of_measure_name
@property
def unit_of_measure_id(self):
"""
Gets the unit_of_measure_id of this CouponDiscount.
:return: The unit_of_measure_id of this CouponDiscount.
:rtype: str
"""
return self._unit_of_measure_id
@unit_of_measure_id.setter
def unit_of_measure_id(self, unit_of_measure_id):
"""
Sets the unit_of_measure_id of this CouponDiscount.
:param unit_of_measure_id: The unit_of_measure_id of this CouponDiscount.
:type: str
"""
self._unit_of_measure_id = unit_of_measure_id
@property
def units_free(self):
"""
Gets the units_free of this CouponDiscount.
{ \"description\" : \"Number of units that are free for a pricing-component.\", \"verbs\":[\"POST\",\"GET\"] }
:return: The units_free of this CouponDiscount.
:rtype: int
"""
return self._units_free
@units_free.setter
def units_free(self, units_free):
"""
Sets the units_free of this CouponDiscount.
{ \"description\" : \"Number of units that are free for a pricing-component.\", \"verbs\":[\"POST\",\"GET\"] }
:param units_free: The units_free of this CouponDiscount.
:type: int
"""
self._units_free = units_free
@property
def percentage_discount(self):
"""
Gets the percentage_discount of this CouponDiscount.
{ \"description\" : \"Percentage to be discounted\", \"verbs\":[\"POST\",\"GET\"] }
:return: The percentage_discount of this CouponDiscount.
:rtype: float
"""
return self._percentage_discount
@percentage_discount.setter
def percentage_discount(self, percentage_discount):
"""
Sets the percentage_discount of this CouponDiscount.
{ \"description\" : \"Percentage to be discounted\", \"verbs\":[\"POST\",\"GET\"] }
:param percentage_discount: The percentage_discount of this CouponDiscount.
:type: float
"""
self._percentage_discount = percentage_discount
@property
def cash_discount(self):
"""
Gets the cash_discount of this CouponDiscount.
{ \"description\" : \"Fixed monetary amount to be discounted\", \"verbs\":[\"POST\",\"GET\"] }
:return: The cash_discount of this CouponDiscount.
:rtype: float
"""
return self._cash_discount
@cash_discount.setter
def cash_discount(self, cash_discount):
"""
Sets the cash_discount of this CouponDiscount.
{ \"description\" : \"Fixed monetary amount to be discounted\", \"verbs\":[\"POST\",\"GET\"] }
:param cash_discount: The cash_discount of this CouponDiscount.
:type: float
"""
self._cash_discount = cash_discount
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
billforward/models/coupon_discount.py
|
from pprint import pformat
from six import iteritems
import re
class CouponDiscount(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, created=None, changed_by=None, updated=None, pricing_component=None, pricing_component_name=None, pricing_component_id=None, unit_of_measure_name=None, unit_of_measure_id=None, units_free=None, percentage_discount=None, cash_discount=None):
"""
CouponDiscount - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'created': 'datetime',
'changed_by': 'str',
'updated': 'datetime',
'pricing_component': 'str',
'pricing_component_name': 'str',
'pricing_component_id': 'str',
'unit_of_measure_name': 'str',
'unit_of_measure_id': 'str',
'units_free': 'int',
'percentage_discount': 'float',
'cash_discount': 'float'
}
self.attribute_map = {
'created': 'created',
'changed_by': 'changedBy',
'updated': 'updated',
'pricing_component': 'pricingComponent',
'pricing_component_name': 'pricingComponentName',
'pricing_component_id': 'pricingComponentID',
'unit_of_measure_name': 'unitOfMeasureName',
'unit_of_measure_id': 'unitOfMeasureID',
'units_free': 'unitsFree',
'percentage_discount': 'percentageDiscount',
'cash_discount': 'cashDiscount'
}
self._created = created
self._changed_by = changed_by
self._updated = updated
self._pricing_component = pricing_component
self._pricing_component_name = pricing_component_name
self._pricing_component_id = pricing_component_id
self._unit_of_measure_name = unit_of_measure_name
self._unit_of_measure_id = unit_of_measure_id
self._units_free = units_free
self._percentage_discount = percentage_discount
self._cash_discount = cash_discount
@property
def created(self):
"""
Gets the created of this CouponDiscount.
{ \"description\" : \"The UTC DateTime when the object was created.\", \"verbs\":[] }
:return: The created of this CouponDiscount.
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""
Sets the created of this CouponDiscount.
{ \"description\" : \"The UTC DateTime when the object was created.\", \"verbs\":[] }
:param created: The created of this CouponDiscount.
:type: datetime
"""
self._created = created
@property
def changed_by(self):
"""
Gets the changed_by of this CouponDiscount.
{ \"description\" : \"ID of the user who last updated the entity.\", \"verbs\":[] }
:return: The changed_by of this CouponDiscount.
:rtype: str
"""
return self._changed_by
@changed_by.setter
def changed_by(self, changed_by):
"""
Sets the changed_by of this CouponDiscount.
{ \"description\" : \"ID of the user who last updated the entity.\", \"verbs\":[] }
:param changed_by: The changed_by of this CouponDiscount.
:type: str
"""
self._changed_by = changed_by
@property
def updated(self):
"""
Gets the updated of this CouponDiscount.
{ \"description\" : \"The UTC DateTime when the object was last updated.\", \"verbs\":[] }
:return: The updated of this CouponDiscount.
:rtype: datetime
"""
return self._updated
@updated.setter
def updated(self, updated):
"""
Sets the updated of this CouponDiscount.
{ \"description\" : \"The UTC DateTime when the object was last updated.\", \"verbs\":[] }
:param updated: The updated of this CouponDiscount.
:type: datetime
"""
self._updated = updated
@property
def pricing_component(self):
"""
Gets the pricing_component of this CouponDiscount.
{ \"description\" : \"Name or ID of the pricing component to apply the discount to. If not set blank discount is applied at the invoice level.\", \"verbs\":[\"POST\"] }
:return: The pricing_component of this CouponDiscount.
:rtype: str
"""
return self._pricing_component
@pricing_component.setter
def pricing_component(self, pricing_component):
"""
Sets the pricing_component of this CouponDiscount.
{ \"description\" : \"Name or ID of the pricing component to apply the discount to. If not set blank discount is applied at the invoice level.\", \"verbs\":[\"POST\"] }
:param pricing_component: The pricing_component of this CouponDiscount.
:type: str
"""
self._pricing_component = pricing_component
@property
def pricing_component_name(self):
"""
Gets the pricing_component_name of this CouponDiscount.
{ \"description\" : \"\", \"verbs\":[\"GET\"] }
:return: The pricing_component_name of this CouponDiscount.
:rtype: str
"""
return self._pricing_component_name
@pricing_component_name.setter
def pricing_component_name(self, pricing_component_name):
"""
Sets the pricing_component_name of this CouponDiscount.
{ \"description\" : \"\", \"verbs\":[\"GET\"] }
:param pricing_component_name: The pricing_component_name of this CouponDiscount.
:type: str
"""
self._pricing_component_name = pricing_component_name
@property
def pricing_component_id(self):
"""
Gets the pricing_component_id of this CouponDiscount.
{ \"description\" : \"\", \"verbs\":[\"GET\"] }
:return: The pricing_component_id of this CouponDiscount.
:rtype: str
"""
return self._pricing_component_id
@pricing_component_id.setter
def pricing_component_id(self, pricing_component_id):
"""
Sets the pricing_component_id of this CouponDiscount.
{ \"description\" : \"\", \"verbs\":[\"GET\"] }
:param pricing_component_id: The pricing_component_id of this CouponDiscount.
:type: str
"""
self._pricing_component_id = pricing_component_id
@property
def unit_of_measure_name(self):
"""
Gets the unit_of_measure_name of this CouponDiscount.
:return: The unit_of_measure_name of this CouponDiscount.
:rtype: str
"""
return self._unit_of_measure_name
@unit_of_measure_name.setter
def unit_of_measure_name(self, unit_of_measure_name):
"""
Sets the unit_of_measure_name of this CouponDiscount.
:param unit_of_measure_name: The unit_of_measure_name of this CouponDiscount.
:type: str
"""
self._unit_of_measure_name = unit_of_measure_name
@property
def unit_of_measure_id(self):
"""
Gets the unit_of_measure_id of this CouponDiscount.
:return: The unit_of_measure_id of this CouponDiscount.
:rtype: str
"""
return self._unit_of_measure_id
@unit_of_measure_id.setter
def unit_of_measure_id(self, unit_of_measure_id):
"""
Sets the unit_of_measure_id of this CouponDiscount.
:param unit_of_measure_id: The unit_of_measure_id of this CouponDiscount.
:type: str
"""
self._unit_of_measure_id = unit_of_measure_id
@property
def units_free(self):
"""
Gets the units_free of this CouponDiscount.
{ \"description\" : \"Number of units that are free for a pricing-component.\", \"verbs\":[\"POST\",\"GET\"] }
:return: The units_free of this CouponDiscount.
:rtype: int
"""
return self._units_free
@units_free.setter
def units_free(self, units_free):
"""
Sets the units_free of this CouponDiscount.
{ \"description\" : \"Number of units that are free for a pricing-component.\", \"verbs\":[\"POST\",\"GET\"] }
:param units_free: The units_free of this CouponDiscount.
:type: int
"""
self._units_free = units_free
@property
def percentage_discount(self):
"""
Gets the percentage_discount of this CouponDiscount.
{ \"description\" : \"Percentage to be discounted\", \"verbs\":[\"POST\",\"GET\"] }
:return: The percentage_discount of this CouponDiscount.
:rtype: float
"""
return self._percentage_discount
@percentage_discount.setter
def percentage_discount(self, percentage_discount):
"""
Sets the percentage_discount of this CouponDiscount.
{ \"description\" : \"Percentage to be discounted\", \"verbs\":[\"POST\",\"GET\"] }
:param percentage_discount: The percentage_discount of this CouponDiscount.
:type: float
"""
self._percentage_discount = percentage_discount
@property
def cash_discount(self):
"""
Gets the cash_discount of this CouponDiscount.
{ \"description\" : \"Fixed monetary amount to be discounted\", \"verbs\":[\"POST\",\"GET\"] }
:return: The cash_discount of this CouponDiscount.
:rtype: float
"""
return self._cash_discount
@cash_discount.setter
def cash_discount(self, cash_discount):
"""
Sets the cash_discount of this CouponDiscount.
{ \"description\" : \"Fixed monetary amount to be discounted\", \"verbs\":[\"POST\",\"GET\"] }
:param cash_discount: The cash_discount of this CouponDiscount.
:type: float
"""
self._cash_discount = cash_discount
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 0.757884 | 0.188567 |