identifier
stringlengths 0
89
| parameters
stringlengths 0
399
| return_statement
stringlengths 0
982
⌀ | docstring
stringlengths 10
3.04k
| docstring_summary
stringlengths 0
3.04k
| function
stringlengths 13
25.8k
| function_tokens
sequence | start_point
sequence | end_point
sequence | argument_list
null | language
stringclasses 3
values | docstring_language
stringclasses 4
values | docstring_language_predictions
stringclasses 4
values | is_langid_reliable
stringclasses 2
values | is_langid_extra_reliable
bool 1
class | type
stringclasses 9
values |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Monat.anzahl_tage | (self) | return self.__anzahl_tage | Die Anzahl Tage im Monat in einem nicht Schaltjahr | Die Anzahl Tage im Monat in einem nicht Schaltjahr | def anzahl_tage(self) -> AnzahlTageImMonat:
"""Die Anzahl Tage im Monat in einem nicht Schaltjahr"""
return self.__anzahl_tage | [
"def",
"anzahl_tage",
"(",
"self",
")",
"->",
"AnzahlTageImMonat",
":",
"return",
"self",
".",
"__anzahl_tage"
] | [
70,
4
] | [
72,
33
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
random_value | (values: dict, data: StepData) | Wählt random einen Wert aus einem Array oder einem Dictionary (zu einem bestimmten Key) aus.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
| Wählt random einen Wert aus einem Array oder einem Dictionary (zu einem bestimmten Key) aus. | def random_value(values: dict, data: StepData):
"""Wählt random einen Wert aus einem Array oder einem Dictionary (zu einem bestimmten Key) aus.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
"""
if "array" in values:
for key in values["new_keys"]:
array = data.get_data(values["array"], values, list)
length = len(array)
rand = randint(0, length - 1)
new_value = data.format(array[rand], values)
data.insert_data(key, new_value, values)
elif "dict" in values:
for idx, key in data.loop_key(values["keys"], values):
new_key = get_new_keys(values, idx)
new_values = data.get_data(values.get("dict", None), values, dict)
value = str(data.get_data(key, values))
length = len(new_values[value])
rand = randint(0, length - 1)
new_value = data.format(new_values[value][rand], values)
data.insert_data(new_key, new_value, values) | [
"def",
"random_value",
"(",
"values",
":",
"dict",
",",
"data",
":",
"StepData",
")",
":",
"if",
"\"array\"",
"in",
"values",
":",
"for",
"key",
"in",
"values",
"[",
"\"new_keys\"",
"]",
":",
"array",
"=",
"data",
".",
"get_data",
"(",
"values",
"[",
"\"array\"",
"]",
",",
"values",
",",
"list",
")",
"length",
"=",
"len",
"(",
"array",
")",
"rand",
"=",
"randint",
"(",
"0",
",",
"length",
"-",
"1",
")",
"new_value",
"=",
"data",
".",
"format",
"(",
"array",
"[",
"rand",
"]",
",",
"values",
")",
"data",
".",
"insert_data",
"(",
"key",
",",
"new_value",
",",
"values",
")",
"elif",
"\"dict\"",
"in",
"values",
":",
"for",
"idx",
",",
"key",
"in",
"data",
".",
"loop_key",
"(",
"values",
"[",
"\"keys\"",
"]",
",",
"values",
")",
":",
"new_key",
"=",
"get_new_keys",
"(",
"values",
",",
"idx",
")",
"new_values",
"=",
"data",
".",
"get_data",
"(",
"values",
".",
"get",
"(",
"\"dict\"",
",",
"None",
")",
",",
"values",
",",
"dict",
")",
"value",
"=",
"str",
"(",
"data",
".",
"get_data",
"(",
"key",
",",
"values",
")",
")",
"length",
"=",
"len",
"(",
"new_values",
"[",
"value",
"]",
")",
"rand",
"=",
"randint",
"(",
"0",
",",
"length",
"-",
"1",
")",
"new_value",
"=",
"data",
".",
"format",
"(",
"new_values",
"[",
"value",
"]",
"[",
"rand",
"]",
",",
"values",
")",
"data",
".",
"insert_data",
"(",
"new_key",
",",
"new_value",
",",
"values",
")"
] | [
476,
0
] | [
499,
56
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
PdfGenerator.image | (self, image: [str, io.BytesIO], area:dict={}, attrs:dict={}, render=None, imageType="png") | return element_html | Bild an der aktuellen Cursor Position oder wenn angegeben bei x, y einfügen.
Internen Cursor auf die neuen x,y Positionen setzen
Das Image wird wenn möglich rechts eingefügt.
Parameters
----------
image : str|io.BytesIO
Das eigentliche Bild
area : Area {left,top,with,height}
die Größe der Ausgabe
attrs : dict
zu ändernde id class oder Style Angaben
render : bool
sofort rendern oder nur zurückgeben ohne Angabe wird self.autoRender verwendet
imageType : str
png
svg+xml
Returns
-------
element_html: str
HTML des erzeugten Elements
.. code::
<img src="data:image/png;base64,{{image}}" />
oder
<img src="file://{{image}}" />
| Bild an der aktuellen Cursor Position oder wenn angegeben bei x, y einfügen. | def image(self, image: [str, io.BytesIO], area:dict={}, attrs:dict={}, render=None, imageType="png"):
"""Bild an der aktuellen Cursor Position oder wenn angegeben bei x, y einfügen.
Internen Cursor auf die neuen x,y Positionen setzen
Das Image wird wenn möglich rechts eingefügt.
Parameters
----------
image : str|io.BytesIO
Das eigentliche Bild
area : Area {left,top,with,height}
die Größe der Ausgabe
attrs : dict
zu ändernde id class oder Style Angaben
render : bool
sofort rendern oder nur zurückgeben ohne Angabe wird self.autoRender verwendet
imageType : str
png
svg+xml
Returns
-------
element_html: str
HTML des erzeugten Elements
.. code::
<img src="data:image/png;base64,{{image}}" />
oder
<img src="file://{{image}}" />
"""
if render == None:
render = self.autoRender
element_html = ""
# Eigenschaften des Elements
if not "font-size" in attrs:
attrs["font-size"] = "8pt"
_id, _class, _style = self._get_attrs( attrs )
_area = self._get_area_style( area )
if isinstance(image, io.BytesIO):
# Bild steckt in image
image.seek(0)
context = base64.b64encode( image.getvalue() ).decode("ascii")
element_html = '\n\t<img class="image {_class}" style="{_style} {_area}" src="data:image/{_type};base64,{content}" />'.format(
_class = _class,
_style = _style,
_area=_area,
content=context,
_type=imageType
)
elif type(image) is str:
# Image liegt als Datei vor
if image[0] == "/":
# absolute Angabe verwenden
filepath = image
else:
# aus resources
filepath = osp.join(self._variables["resources"], image )
# gibt es die Datei dann einbinden
if osp.exists( filepath ):
element_html = '\n\t<img class="image {_class}" style="{_style} {_area}" src="file://{filepath}"></>'.format(
_class = _class,
_style = _style,
_area=_area,
filepath=filepath
)
if render:
self._html(element_html)
return element_html | [
"def",
"image",
"(",
"self",
",",
"image",
":",
"[",
"str",
",",
"io",
".",
"BytesIO",
"]",
",",
"area",
":",
"dict",
"=",
"{",
"}",
",",
"attrs",
":",
"dict",
"=",
"{",
"}",
",",
"render",
"=",
"None",
",",
"imageType",
"=",
"\"png\"",
")",
":",
"if",
"render",
"==",
"None",
":",
"render",
"=",
"self",
".",
"autoRender",
"element_html",
"=",
"\"\"",
"# Eigenschaften des Elements",
"if",
"not",
"\"font-size\"",
"in",
"attrs",
":",
"attrs",
"[",
"\"font-size\"",
"]",
"=",
"\"8pt\"",
"_id",
",",
"_class",
",",
"_style",
"=",
"self",
".",
"_get_attrs",
"(",
"attrs",
")",
"_area",
"=",
"self",
".",
"_get_area_style",
"(",
"area",
")",
"if",
"isinstance",
"(",
"image",
",",
"io",
".",
"BytesIO",
")",
":",
"# Bild steckt in image",
"image",
".",
"seek",
"(",
"0",
")",
"context",
"=",
"base64",
".",
"b64encode",
"(",
"image",
".",
"getvalue",
"(",
")",
")",
".",
"decode",
"(",
"\"ascii\"",
")",
"element_html",
"=",
"'\\n\\t<img class=\"image {_class}\" style=\"{_style} {_area}\" src=\"data:image/{_type};base64,{content}\" />'",
".",
"format",
"(",
"_class",
"=",
"_class",
",",
"_style",
"=",
"_style",
",",
"_area",
"=",
"_area",
",",
"content",
"=",
"context",
",",
"_type",
"=",
"imageType",
")",
"elif",
"type",
"(",
"image",
")",
"is",
"str",
":",
"# Image liegt als Datei vor",
"if",
"image",
"[",
"0",
"]",
"==",
"\"/\"",
":",
"# absolute Angabe verwenden",
"filepath",
"=",
"image",
"else",
":",
"# aus resources",
"filepath",
"=",
"osp",
".",
"join",
"(",
"self",
".",
"_variables",
"[",
"\"resources\"",
"]",
",",
"image",
")",
"# gibt es die Datei dann einbinden",
"if",
"osp",
".",
"exists",
"(",
"filepath",
")",
":",
"element_html",
"=",
"'\\n\\t<img class=\"image {_class}\" style=\"{_style} {_area}\" src=\"file://{filepath}\"></>'",
".",
"format",
"(",
"_class",
"=",
"_class",
",",
"_style",
"=",
"_style",
",",
"_area",
"=",
"_area",
",",
"filepath",
"=",
"filepath",
")",
"if",
"render",
":",
"self",
".",
"_html",
"(",
"element_html",
")",
"return",
"element_html"
] | [
1065,
4
] | [
1143,
27
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
Arc.intersect_arc | (self, arc, rtol=1e-03, atol=1e-03, include_end=False) | return remaining_points | Von zwei Arc-Objekten werden die Schnittpunkte bestimmt und in
einer Liste ausgegeben.
| Von zwei Arc-Objekten werden die Schnittpunkte bestimmt und in
einer Liste ausgegeben.
| def intersect_arc(self, arc, rtol=1e-03, atol=1e-03, include_end=False):
""" Von zwei Arc-Objekten werden die Schnittpunkte bestimmt und in
einer Liste ausgegeben.
"""
assert(isinstance(arc, Arc))
points = self.intersect_circle(arc, rtol, atol, include_end)
# Check if the points are on a arc
# (has been assumed as a circle)
remaining_points = []
for p in points:
if arc.is_point_inside(p, rtol, atol, include_end):
remaining_points.append(p)
return remaining_points | [
"def",
"intersect_arc",
"(",
"self",
",",
"arc",
",",
"rtol",
"=",
"1e-03",
",",
"atol",
"=",
"1e-03",
",",
"include_end",
"=",
"False",
")",
":",
"assert",
"(",
"isinstance",
"(",
"arc",
",",
"Arc",
")",
")",
"points",
"=",
"self",
".",
"intersect_circle",
"(",
"arc",
",",
"rtol",
",",
"atol",
",",
"include_end",
")",
"# Check if the points are on a arc",
"# (has been assumed as a circle)",
"remaining_points",
"=",
"[",
"]",
"for",
"p",
"in",
"points",
":",
"if",
"arc",
".",
"is_point_inside",
"(",
"p",
",",
"rtol",
",",
"atol",
",",
"include_end",
")",
":",
"remaining_points",
".",
"append",
"(",
"p",
")",
"return",
"remaining_points"
] | [
694,
4
] | [
707,
31
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
created | (values: dict, step_data: StepData) | Verwendet ein bereits erstelltes Bild als Thumbnail für das zu erstellende Video.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
:return:
| Verwendet ein bereits erstelltes Bild als Thumbnail für das zu erstellende Video. | def created(values: dict, step_data: StepData):
"""Verwendet ein bereits erstelltes Bild als Thumbnail für das zu erstellende Video.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
:return:
"""
src_file = values["images"][values["thumbnail"]["name"]]
_copy_and_rename(src_file, values, step_data) | [
"def",
"created",
"(",
"values",
":",
"dict",
",",
"step_data",
":",
"StepData",
")",
":",
"src_file",
"=",
"values",
"[",
"\"images\"",
"]",
"[",
"values",
"[",
"\"thumbnail\"",
"]",
"[",
"\"name\"",
"]",
"]",
"_copy_and_rename",
"(",
"src_file",
",",
"values",
",",
"step_data",
")"
] | [
63,
0
] | [
71,
49
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
ispTest.test_config_mqtt | (self) | isp.config ispConfig mit MQTTHandler (isp.mqtt) prüfen immer mit neuen kernel für mqttInitLogging
| isp.config ispConfig mit MQTTHandler (isp.mqtt) prüfen immer mit neuen kernel für mqttInitLogging
| def test_config_mqtt(self):
'''isp.config ispConfig mit MQTTHandler (isp.mqtt) prüfen immer mit neuen kernel für mqttInitLogging
'''
# zuerst ohne parameter aufrufen
config = ispConfig( )
# __repr__ testen soll nicht die Klasse sondern die config selbst (dotmap) geben
self.assertEqual(
repr(config)[:7], 'DotMap(' , "Fehler beim laden __repr__")
# Magic Methods prüfen
self.assertEqual(
config.__dict__["_loadErrors"], [], "Fehler beim laden von _loadErrors")
self.assertEqual(
config._loadErrors, [], "__getitem__ Fehler bei vorhandenen _loadErrors im Object")
self.assertEqual(
type(config.test), dotmap.DotMap, "__getitem__ Fehler bei nicht vorhandenen in der config")
# __getattr__ wird bei nicht vorhandenen aufgerufen
self.assertEqual(
config._test, None, "__getitem__ Fehler bei nicht vorhandenen im Object")
# __getitem__
self.assertEqual(
config["_loadErrors"], [], "__getitem__ Fehler")
# __getitem__
self.assertEqual(
type(config["versions"]), dotmap.DotMap, "__getitem__ mit dotmap Fehler")
# __getattr__ mit dotmap (config Values)
self.assertEqual(
type(config.versions), dotmap.DotMap, "__getattr__ mit dotmap Fehler")
# __setitem__
config["_version"] = '2.unittest' # __setitem__
self.assertEqual(
config.__dict__["_version"], '2.unittest', "__setitem__ Fehler")
# __setitem__ mit dotmap (config Values)
config["unittest"] = '3.unittest' # __setitem__
self.assertEqual(
config.unittest, '3.unittest', "__setitem__ mit dotmap Fehler")
# __setattr__
config._version = '3.unittest' # __setattr__
self.assertEqual(
config.__dict__["_version"], '3.unittest', "__setattr__ Fehler")
# Zugiffe auf die config selbst
#
# komplette config als dict
self.assertEqual(
type( config.get() ), dict, "komplette config als dict")
# config get mit default
self.assertEqual(
config.get("gibtsnicht", "defaultValue"), 'defaultValue', "config get mit default")
# dotmap set oberste ebene
config._config["unittest"] = '4.unittest'
self.assertEqual(
config.get("unittest") , '4.unittest', "dotmap get auf erster ebene")
# dotmap set/get auf einer ebene
config._config.A.unittest = '4A.unittest'
self.assertEqual(
config.get("A.unittest") , '4A.unittest', "dotmap get auf zweiter ebene")
config._config.A.B.unittest = '4AB.unittest'
self.assertEqual(
config.get( ["A", "B", "unittest"] ) , '4AB.unittest', "dotmap get auf dritter ebene")
# dotmap set oberste ebene
config.set("5unittest", '5-unittest')
# dotmap get
self.assertEqual(
config.get("5unittest"), '5-unittest', "dotmap set auf erster ebene anlegen")
# dotmap set oberste ebene überschreiben
config.set("5unittest", '5a-unittest')
# dotmap get
self.assertEqual(
config.get("5unittest"), '5a-unittest', "dotmap set auf erster ebene ändern")
# dotmap set zweite ebene
config.set("B5.unittest", '5B-unittest')
# dotmap get
self.assertEqual(
config.get("B5.unittest"), '5B-unittest', "dotmap set auf zweiter ebene")
# dotmap set zweite ebene als list
config.set(["C5","unittest"], '5C-unittest')
# dotmap get
self.assertEqual(
config.get(["C5","unittest"]), '5C-unittest', "dotmap set/get auf zweiter ebene als list")
# dotmap set zweite ebene neues Element
config.set("B5.unittestA", '5B-unittest')
self.assertEqual(
config.get("B5").toDict(), {'unittest': '5B-unittest', 'unittestA': '5B-unittest'}, "dotmap set zweite ebene neues Element")
# hilfsfunktion dict_merge testen
a = {"A":1}
b = {"B":2}
c = dict_merge(a, b)
self.assertEqual(
c, {'A': 1, 'B': 2}, "dict_merge auch neue keys")
c = dict_merge(a, b, False)
self.assertEqual(
c, {'A': 1}, "dict_merge nur vorhandene keys")
# test in config setzen update prüfen
#
localtime = time.strftime("%Y%m%d %H:%M:%S.%f", time.localtime(time.time()) )
config.test = {"a":1, "time": localtime }
# a verändern
config.update( {
"test": {"a":2}
})
self.assertEqual(
config.test, {"a":2, "time": localtime }, "Fehler bei config update")
# ohne mqtt findet default logging statt (konsole)
# .. todo:: Konsole logger funktionen noch überprüfen
logger = logging.getLogger( "MQTT" )
logger.debug('logger.debug')
logger.info("logger.info")
logger.warning("logger.warning")
logger.error("logger.error")
# mqtt logging prüfen
#
if config.get("server.mqtt.host", "") == "":
print( "(MQTT) keine Angaben in config vorhanden. MQTT wird nicht getestet!")
return;
# config mit anderem mqttLevel
config = ispConfig( mqttlevel=30 )
mqtt = config.mqttGetHandler()
self.assertIsNotNone(
mqtt, "kein MQTT handler vorhanden")
results = {}
mqtt_event = threading.Event()
mqttResult = None
def onMqtt( msg ):
global mqttResult
# in results die empfangenen ablegen
mqttResult = msg
results[ msg["topic"] ] = msg["payload"]
mqtt_event.set()
# funktion bei signal aufrufen
mqtt.signal.connect( onMqtt )
def publishThread( args ):
global mqttResult
mqttResult = None
mqtt_event.clear()
# Als Thread aufrufen, über mq.get() wird die Rückgabe von _retrieve abgerufen
thread = threading.Thread( target=mqtt.publish, args=( args,) )
thread.start()
# max 2 sekunden oder auf mqtt_event aus onMqtt warten
while not mqtt_event.wait( timeout=3 ):
mqtt_event.set()
return mqttResult
# die eigenen script infos
result = publishThread({
"topic": "cmnd/status"
} )
self.assertEqual(
result["topic"], "stat/status", "Fehler bei cmnd/status abfrage")
# python process vorhanden?
result = publishThread({
"topic": "cmnd/process",
"payload" : "python"
} )
#print("----------------------cmnd/process", result )
self.assertEqual(
result["topic"], "stat/process", "Fehler bei process abfrage")
# publish ohne topic - publish wird nicht aufgerufen
# hier wird in publishThread auf timeout gewartet
result = publishThread({
"payload": "publish ohne topic - publish wird nicht aufgerufen"
})
self.assertIsNone(
result, "Fehler bei process abfrage")
# publish ohne payload - publish wird mit leerem payload aufgerufen
result = publishThread({
"topic": "cmnd/test/leer"
})
self.assertEqual(
result["payload"], "", "Fehler bei leerem payload")
# payload mit object - publish wird mit leerem payload aufgerufen nur (str, bytearray, int, float) ist ok
result = publishThread({
"topic": "cmnd/test/object",
"payload": object()
})
self.assertEqual(
result["payload"], "", "Fehler bei object payload")
# payload als Text
result = publishThread({
"topic": "cmnd/test/string",
"payload": "payload als Text"
})
self.assertEqual(
result["payload"], "payload als Text", "Fehler bei text payload")
# payload als dict
result = publishThread({
"topic": "cmnd/test/dict",
"payload": {"text":"payload als dict"}
})
self.assertEqual(
result["payload"], {"text":"payload als dict"}, "Fehler bei dict payload")
# mqtt.client.subscribe( "gqa_dev/logging/#" )
# mqtt funktionen über logger
logger = logging.getLogger( "MQTT" )
logger.setLevel( logging.DEBUG )
logger.send()
logger.send("test/publish")
logger.progressStart( "test" )
logger.progress( "test", 50 )
logger.progressReady( "test" )
# test über mqtt anstatt über sofort über logger
mqtt.logging = True
mqtt.info("config.info")
mqtt.warning("config.warning")
mqtt.error("config.error")
# .. todo:: config ohne mqtt Ausgabe auf der Konsole
config.mqttCleanup()
mqtt.info("config.info nach cleanup")
mqtt.warning("config.warning nach cleanup")
mqtt.error("config.error nach cleanup")
# config mit falschen mqtt Angaben
#
config = ispConfig( )
port = config._config.server.mqtt.port
config._config.server.mqtt.port = 111111
config.mqttInitLogger( cleanup=True )
mqtt = config.mqttGetHandler()
self.assertIsNone(
mqtt, "Trotz init Fehler MQTT handler vorhanden")
#mqtt.info("config.info nach Fehler bei MQTT config")
config._config.server.mqtt.port = port
config.mqttInitLogger( cleanup=True )
time.sleep(4) # Sleep for 2 seconds um mqtt zu empfangen
# hier gibt es keine Ausgaben, da mqtt nicht mehr da ist
logger.info("logger.info nach MQTT init Fehler")
logger.send("cmnd/test/publish", "nach MQTT init Fehler")
time.sleep(2) # Sleep for 2 seconds um logger mqtt zu empfangen
#print( results )
self.assertIn(
"cmnd/test/publish", results, "Fehler nach MQTT init Fehler")
#mqtt.publish({
# "topic": "cmnd/status"
#})
# mqtt in config schließen
config.mqttCleanup( ) | [
"def",
"test_config_mqtt",
"(",
"self",
")",
":",
"# zuerst ohne parameter aufrufen",
"config",
"=",
"ispConfig",
"(",
")",
"# __repr__ testen soll nicht die Klasse sondern die config selbst (dotmap) geben",
"self",
".",
"assertEqual",
"(",
"repr",
"(",
"config",
")",
"[",
":",
"7",
"]",
",",
"'DotMap('",
",",
"\"Fehler beim laden __repr__\"",
")",
"# Magic Methods prüfen",
"self",
".",
"assertEqual",
"(",
"config",
".",
"__dict__",
"[",
"\"_loadErrors\"",
"]",
",",
"[",
"]",
",",
"\"Fehler beim laden von _loadErrors\"",
")",
"self",
".",
"assertEqual",
"(",
"config",
".",
"_loadErrors",
",",
"[",
"]",
",",
"\"__getitem__ Fehler bei vorhandenen _loadErrors im Object\"",
")",
"self",
".",
"assertEqual",
"(",
"type",
"(",
"config",
".",
"test",
")",
",",
"dotmap",
".",
"DotMap",
",",
"\"__getitem__ Fehler bei nicht vorhandenen in der config\"",
")",
"# __getattr__ wird bei nicht vorhandenen aufgerufen",
"self",
".",
"assertEqual",
"(",
"config",
".",
"_test",
",",
"None",
",",
"\"__getitem__ Fehler bei nicht vorhandenen im Object\"",
")",
"# __getitem__",
"self",
".",
"assertEqual",
"(",
"config",
"[",
"\"_loadErrors\"",
"]",
",",
"[",
"]",
",",
"\"__getitem__ Fehler\"",
")",
"# __getitem__",
"self",
".",
"assertEqual",
"(",
"type",
"(",
"config",
"[",
"\"versions\"",
"]",
")",
",",
"dotmap",
".",
"DotMap",
",",
"\"__getitem__ mit dotmap Fehler\"",
")",
"# __getattr__ mit dotmap (config Values) ",
"self",
".",
"assertEqual",
"(",
"type",
"(",
"config",
".",
"versions",
")",
",",
"dotmap",
".",
"DotMap",
",",
"\"__getattr__ mit dotmap Fehler\"",
")",
"# __setitem__",
"config",
"[",
"\"_version\"",
"]",
"=",
"'2.unittest'",
"# __setitem__",
"self",
".",
"assertEqual",
"(",
"config",
".",
"__dict__",
"[",
"\"_version\"",
"]",
",",
"'2.unittest'",
",",
"\"__setitem__ Fehler\"",
")",
"# __setitem__ mit dotmap (config Values) ",
"config",
"[",
"\"unittest\"",
"]",
"=",
"'3.unittest'",
"# __setitem__",
"self",
".",
"assertEqual",
"(",
"config",
".",
"unittest",
",",
"'3.unittest'",
",",
"\"__setitem__ mit dotmap Fehler\"",
")",
"# __setattr__",
"config",
".",
"_version",
"=",
"'3.unittest'",
"# __setattr__",
"self",
".",
"assertEqual",
"(",
"config",
".",
"__dict__",
"[",
"\"_version\"",
"]",
",",
"'3.unittest'",
",",
"\"__setattr__ Fehler\"",
")",
"# Zugiffe auf die config selbst",
"#",
"# komplette config als dict",
"self",
".",
"assertEqual",
"(",
"type",
"(",
"config",
".",
"get",
"(",
")",
")",
",",
"dict",
",",
"\"komplette config als dict\"",
")",
"# config get mit default",
"self",
".",
"assertEqual",
"(",
"config",
".",
"get",
"(",
"\"gibtsnicht\"",
",",
"\"defaultValue\"",
")",
",",
"'defaultValue'",
",",
"\"config get mit default\"",
")",
"# dotmap set oberste ebene",
"config",
".",
"_config",
"[",
"\"unittest\"",
"]",
"=",
"'4.unittest'",
"self",
".",
"assertEqual",
"(",
"config",
".",
"get",
"(",
"\"unittest\"",
")",
",",
"'4.unittest'",
",",
"\"dotmap get auf erster ebene\"",
")",
"# dotmap set/get auf einer ebene",
"config",
".",
"_config",
".",
"A",
".",
"unittest",
"=",
"'4A.unittest'",
"self",
".",
"assertEqual",
"(",
"config",
".",
"get",
"(",
"\"A.unittest\"",
")",
",",
"'4A.unittest'",
",",
"\"dotmap get auf zweiter ebene\"",
")",
"config",
".",
"_config",
".",
"A",
".",
"B",
".",
"unittest",
"=",
"'4AB.unittest'",
"self",
".",
"assertEqual",
"(",
"config",
".",
"get",
"(",
"[",
"\"A\"",
",",
"\"B\"",
",",
"\"unittest\"",
"]",
")",
",",
"'4AB.unittest'",
",",
"\"dotmap get auf dritter ebene\"",
")",
"# dotmap set oberste ebene",
"config",
".",
"set",
"(",
"\"5unittest\"",
",",
"'5-unittest'",
")",
"# dotmap get",
"self",
".",
"assertEqual",
"(",
"config",
".",
"get",
"(",
"\"5unittest\"",
")",
",",
"'5-unittest'",
",",
"\"dotmap set auf erster ebene anlegen\"",
")",
"# dotmap set oberste ebene überschreiben",
"config",
".",
"set",
"(",
"\"5unittest\"",
",",
"'5a-unittest'",
")",
"# dotmap get",
"self",
".",
"assertEqual",
"(",
"config",
".",
"get",
"(",
"\"5unittest\"",
")",
",",
"'5a-unittest'",
",",
"\"dotmap set auf erster ebene ändern\")",
" ",
"# dotmap set zweite ebene",
"config",
".",
"set",
"(",
"\"B5.unittest\"",
",",
"'5B-unittest'",
")",
"# dotmap get",
"self",
".",
"assertEqual",
"(",
"config",
".",
"get",
"(",
"\"B5.unittest\"",
")",
",",
"'5B-unittest'",
",",
"\"dotmap set auf zweiter ebene\"",
")",
"# dotmap set zweite ebene als list",
"config",
".",
"set",
"(",
"[",
"\"C5\"",
",",
"\"unittest\"",
"]",
",",
"'5C-unittest'",
")",
"# dotmap get",
"self",
".",
"assertEqual",
"(",
"config",
".",
"get",
"(",
"[",
"\"C5\"",
",",
"\"unittest\"",
"]",
")",
",",
"'5C-unittest'",
",",
"\"dotmap set/get auf zweiter ebene als list\"",
")",
"# dotmap set zweite ebene neues Element",
"config",
".",
"set",
"(",
"\"B5.unittestA\"",
",",
"'5B-unittest'",
")",
"self",
".",
"assertEqual",
"(",
"config",
".",
"get",
"(",
"\"B5\"",
")",
".",
"toDict",
"(",
")",
",",
"{",
"'unittest'",
":",
"'5B-unittest'",
",",
"'unittestA'",
":",
"'5B-unittest'",
"}",
",",
"\"dotmap set zweite ebene neues Element\"",
")",
"# hilfsfunktion dict_merge testen",
"a",
"=",
"{",
"\"A\"",
":",
"1",
"}",
"b",
"=",
"{",
"\"B\"",
":",
"2",
"}",
"c",
"=",
"dict_merge",
"(",
"a",
",",
"b",
")",
"self",
".",
"assertEqual",
"(",
"c",
",",
"{",
"'A'",
":",
"1",
",",
"'B'",
":",
"2",
"}",
",",
"\"dict_merge auch neue keys\"",
")",
"c",
"=",
"dict_merge",
"(",
"a",
",",
"b",
",",
"False",
")",
"self",
".",
"assertEqual",
"(",
"c",
",",
"{",
"'A'",
":",
"1",
"}",
",",
"\"dict_merge nur vorhandene keys\"",
")",
"# test in config setzen update prüfen",
"#",
"localtime",
"=",
"time",
".",
"strftime",
"(",
"\"%Y%m%d %H:%M:%S.%f\"",
",",
"time",
".",
"localtime",
"(",
"time",
".",
"time",
"(",
")",
")",
")",
"config",
".",
"test",
"=",
"{",
"\"a\"",
":",
"1",
",",
"\"time\"",
":",
"localtime",
"}",
"# a verändern",
"config",
".",
"update",
"(",
"{",
"\"test\"",
":",
"{",
"\"a\"",
":",
"2",
"}",
"}",
")",
"self",
".",
"assertEqual",
"(",
"config",
".",
"test",
",",
"{",
"\"a\"",
":",
"2",
",",
"\"time\"",
":",
"localtime",
"}",
",",
"\"Fehler bei config update\"",
")",
"# ohne mqtt findet default logging statt (konsole)",
"# .. todo:: Konsole logger funktionen noch überprüfen",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
"\"MQTT\"",
")",
"logger",
".",
"debug",
"(",
"'logger.debug'",
")",
"logger",
".",
"info",
"(",
"\"logger.info\"",
")",
"logger",
".",
"warning",
"(",
"\"logger.warning\"",
")",
"logger",
".",
"error",
"(",
"\"logger.error\"",
")",
"# mqtt logging prüfen",
"#",
"if",
"config",
".",
"get",
"(",
"\"server.mqtt.host\"",
",",
"\"\"",
")",
"==",
"\"\"",
":",
"print",
"(",
"\"(MQTT) keine Angaben in config vorhanden. MQTT wird nicht getestet!\"",
")",
"return",
"# config mit anderem mqttLevel",
"config",
"=",
"ispConfig",
"(",
"mqttlevel",
"=",
"30",
")",
"mqtt",
"=",
"config",
".",
"mqttGetHandler",
"(",
")",
"self",
".",
"assertIsNotNone",
"(",
"mqtt",
",",
"\"kein MQTT handler vorhanden\"",
")",
"results",
"=",
"{",
"}",
"mqtt_event",
"=",
"threading",
".",
"Event",
"(",
")",
"mqttResult",
"=",
"None",
"def",
"onMqtt",
"(",
"msg",
")",
":",
"global",
"mqttResult",
"# in results die empfangenen ablegen",
"mqttResult",
"=",
"msg",
"results",
"[",
"msg",
"[",
"\"topic\"",
"]",
"]",
"=",
"msg",
"[",
"\"payload\"",
"]",
"mqtt_event",
".",
"set",
"(",
")",
"# funktion bei signal aufrufen",
"mqtt",
".",
"signal",
".",
"connect",
"(",
"onMqtt",
")",
"def",
"publishThread",
"(",
"args",
")",
":",
"global",
"mqttResult",
"mqttResult",
"=",
"None",
"mqtt_event",
".",
"clear",
"(",
")",
"# Als Thread aufrufen, über mq.get() wird die Rückgabe von _retrieve abgerufen",
"thread",
"=",
"threading",
".",
"Thread",
"(",
"target",
"=",
"mqtt",
".",
"publish",
",",
"args",
"=",
"(",
"args",
",",
")",
")",
"thread",
".",
"start",
"(",
")",
"# max 2 sekunden oder auf mqtt_event aus onMqtt warten ",
"while",
"not",
"mqtt_event",
".",
"wait",
"(",
"timeout",
"=",
"3",
")",
":",
"mqtt_event",
".",
"set",
"(",
")",
"return",
"mqttResult",
"# die eigenen script infos ",
"result",
"=",
"publishThread",
"(",
"{",
"\"topic\"",
":",
"\"cmnd/status\"",
"}",
")",
"self",
".",
"assertEqual",
"(",
"result",
"[",
"\"topic\"",
"]",
",",
"\"stat/status\"",
",",
"\"Fehler bei cmnd/status abfrage\"",
")",
"# python process vorhanden?",
"result",
"=",
"publishThread",
"(",
"{",
"\"topic\"",
":",
"\"cmnd/process\"",
",",
"\"payload\"",
":",
"\"python\"",
"}",
")",
"#print(\"----------------------cmnd/process\", result )",
"self",
".",
"assertEqual",
"(",
"result",
"[",
"\"topic\"",
"]",
",",
"\"stat/process\"",
",",
"\"Fehler bei process abfrage\"",
")",
"# publish ohne topic - publish wird nicht aufgerufen",
"# hier wird in publishThread auf timeout gewartet",
"result",
"=",
"publishThread",
"(",
"{",
"\"payload\"",
":",
"\"publish ohne topic - publish wird nicht aufgerufen\"",
"}",
")",
"self",
".",
"assertIsNone",
"(",
"result",
",",
"\"Fehler bei process abfrage\"",
")",
"# publish ohne payload - publish wird mit leerem payload aufgerufen",
"result",
"=",
"publishThread",
"(",
"{",
"\"topic\"",
":",
"\"cmnd/test/leer\"",
"}",
")",
"self",
".",
"assertEqual",
"(",
"result",
"[",
"\"payload\"",
"]",
",",
"\"\"",
",",
"\"Fehler bei leerem payload\"",
")",
"# payload mit object - publish wird mit leerem payload aufgerufen nur (str, bytearray, int, float) ist ok",
"result",
"=",
"publishThread",
"(",
"{",
"\"topic\"",
":",
"\"cmnd/test/object\"",
",",
"\"payload\"",
":",
"object",
"(",
")",
"}",
")",
"self",
".",
"assertEqual",
"(",
"result",
"[",
"\"payload\"",
"]",
",",
"\"\"",
",",
"\"Fehler bei object payload\"",
")",
"# payload als Text",
"result",
"=",
"publishThread",
"(",
"{",
"\"topic\"",
":",
"\"cmnd/test/string\"",
",",
"\"payload\"",
":",
"\"payload als Text\"",
"}",
")",
"self",
".",
"assertEqual",
"(",
"result",
"[",
"\"payload\"",
"]",
",",
"\"payload als Text\"",
",",
"\"Fehler bei text payload\"",
")",
"# payload als dict",
"result",
"=",
"publishThread",
"(",
"{",
"\"topic\"",
":",
"\"cmnd/test/dict\"",
",",
"\"payload\"",
":",
"{",
"\"text\"",
":",
"\"payload als dict\"",
"}",
"}",
")",
"self",
".",
"assertEqual",
"(",
"result",
"[",
"\"payload\"",
"]",
",",
"{",
"\"text\"",
":",
"\"payload als dict\"",
"}",
",",
"\"Fehler bei dict payload\"",
")",
"# mqtt.client.subscribe( \"gqa_dev/logging/#\" )",
"# mqtt funktionen über logger",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
"\"MQTT\"",
")",
"logger",
".",
"setLevel",
"(",
"logging",
".",
"DEBUG",
")",
"logger",
".",
"send",
"(",
")",
"logger",
".",
"send",
"(",
"\"test/publish\"",
")",
"logger",
".",
"progressStart",
"(",
"\"test\"",
")",
"logger",
".",
"progress",
"(",
"\"test\"",
",",
"50",
")",
"logger",
".",
"progressReady",
"(",
"\"test\"",
")",
"# test über mqtt anstatt über sofort über logger",
"mqtt",
".",
"logging",
"=",
"True",
"mqtt",
".",
"info",
"(",
"\"config.info\"",
")",
"mqtt",
".",
"warning",
"(",
"\"config.warning\"",
")",
"mqtt",
".",
"error",
"(",
"\"config.error\"",
")",
"# .. todo:: config ohne mqtt Ausgabe auf der Konsole",
"config",
".",
"mqttCleanup",
"(",
")",
"mqtt",
".",
"info",
"(",
"\"config.info nach cleanup\"",
")",
"mqtt",
".",
"warning",
"(",
"\"config.warning nach cleanup\"",
")",
"mqtt",
".",
"error",
"(",
"\"config.error nach cleanup\"",
")",
"# config mit falschen mqtt Angaben ",
"#",
"config",
"=",
"ispConfig",
"(",
")",
"port",
"=",
"config",
".",
"_config",
".",
"server",
".",
"mqtt",
".",
"port",
"config",
".",
"_config",
".",
"server",
".",
"mqtt",
".",
"port",
"=",
"111111",
"config",
".",
"mqttInitLogger",
"(",
"cleanup",
"=",
"True",
")",
"mqtt",
"=",
"config",
".",
"mqttGetHandler",
"(",
")",
"self",
".",
"assertIsNone",
"(",
"mqtt",
",",
"\"Trotz init Fehler MQTT handler vorhanden\"",
")",
"#mqtt.info(\"config.info nach Fehler bei MQTT config\")",
"config",
".",
"_config",
".",
"server",
".",
"mqtt",
".",
"port",
"=",
"port",
"config",
".",
"mqttInitLogger",
"(",
"cleanup",
"=",
"True",
")",
"time",
".",
"sleep",
"(",
"4",
")",
"# Sleep for 2 seconds um mqtt zu empfangen",
"# hier gibt es keine Ausgaben, da mqtt nicht mehr da ist",
"logger",
".",
"info",
"(",
"\"logger.info nach MQTT init Fehler\"",
")",
"logger",
".",
"send",
"(",
"\"cmnd/test/publish\"",
",",
"\"nach MQTT init Fehler\"",
")",
"time",
".",
"sleep",
"(",
"2",
")",
"# Sleep for 2 seconds um logger mqtt zu empfangen",
"#print( results )",
"self",
".",
"assertIn",
"(",
"\"cmnd/test/publish\"",
",",
"results",
",",
"\"Fehler nach MQTT init Fehler\"",
")",
"#mqtt.publish({",
"# \"topic\": \"cmnd/status\"",
"#})",
"# mqtt in config schließen",
"config",
".",
"mqttCleanup",
"(",
")"
] | [
951,
4
] | [
1257,
29
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
regex | (values: dict, data: StepData) | Führt `"re.sub"` für die angegebenen Felder aus.
regex (suche nach dieser Expression, replace_by (ersetze Expression durch), value (String in dem ersetzt werden soll)
Geht nur für regex ohne backslash \
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
| Führt `"re.sub"` für die angegebenen Felder aus.
regex (suche nach dieser Expression, replace_by (ersetze Expression durch), value (String in dem ersetzt werden soll) | def regex(values: dict, data: StepData):
"""Führt `"re.sub"` für die angegebenen Felder aus.
regex (suche nach dieser Expression, replace_by (ersetze Expression durch), value (String in dem ersetzt werden soll)
Geht nur für regex ohne backslash \
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
"""
for idx, key in data.loop_key(values["keys"], values):
value = data.get_data(key, values)
new_key = get_new_keys(values, idx)
regex = data.format(values["regex"], values)
find = fr"{regex}"
replace_by = data.format(values["replace_by"], values)
new_value = re.sub(find, replace_by, value)
data.insert_data(new_key, new_value, values) | [
"def",
"regex",
"(",
"values",
":",
"dict",
",",
"data",
":",
"StepData",
")",
":",
"for",
"idx",
",",
"key",
"in",
"data",
".",
"loop_key",
"(",
"values",
"[",
"\"keys\"",
"]",
",",
"values",
")",
":",
"value",
"=",
"data",
".",
"get_data",
"(",
"key",
",",
"values",
")",
"new_key",
"=",
"get_new_keys",
"(",
"values",
",",
"idx",
")",
"regex",
"=",
"data",
".",
"format",
"(",
"values",
"[",
"\"regex\"",
"]",
",",
"values",
")",
"find",
"=",
"fr\"{regex}\"",
"replace_by",
"=",
"data",
".",
"format",
"(",
"values",
"[",
"\"replace_by\"",
"]",
",",
"values",
")",
"new_value",
"=",
"re",
".",
"sub",
"(",
"find",
",",
"replace_by",
",",
"value",
")",
"data",
".",
"insert_data",
"(",
"new_key",
",",
"new_value",
",",
"values",
")"
] | [
258,
0
] | [
275,
52
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
HeistSystem.game_NewPlayerData | (self, data) | return dictPlayerData | Neuer Spieler: Daten prüfen in einem Dictonary zurückgeben | Neuer Spieler: Daten prüfen in einem Dictonary zurückgeben | def game_NewPlayerData(self, data):
''' Neuer Spieler: Daten prüfen in einem Dictonary zurückgeben '''
thisActionName = "game_NewPlayer"
dictPlayerData = {}
playerStake = 0
# Spieler will alles einsetzen
if (data.GetParam(1).lower() == "max") or (data.GetParam(1).lower() == "all") or (data.GetParam(1).lower() == "allin"):
# Übernehme die aktuellen Punkte aus der Datenbank
playerStake = int(self.Parent.GetPoints(data.User))
else:
# Übernehme die Zahl aus den Parametern
try:
playerStake = int(data.GetParam(
1).replace(".", "").replace(",", ""))
# Übergebener Parameter ist keine Zahl
except:
# Abbruch und leeres Dictionary an aufrufende Funktion
return dictPlayerData
# Einsatz ist grösser als Maximum
if playerStake > int(self.Settings.Game_Settings_MaxStake):
playerStake = int(self.Settings.Game_Settings_MaxStake)
maximumStakeReached = True
else:
maximumStakeReached = False
# Einsatz ist kleiner oder gleich dem eingestellten Minimum
if playerStake < self.Settings.Game_Settings_MinStake:
belowStake = "belowMinStake"
else:
belowStake = True
# Spieler hat nicht genügend Punkte um am Einsatz teilzunehmen
if playerStake > int(self.Parent.GetPoints(data.User)):
notEnoughPoints = True
else:
notEnoughPoints = False
# Daten in Dictionary übertragen
dictPlayerData = {
"playerName": data.User,
"playerDisplayName": data.UserName,
"playerStake": playerStake,
"belowStake": belowStake,
"maximumStake": maximumStakeReached,
"currentPlayerPoints": int(self.Parent.GetPoints(data.User)),
"notEnoughPoints": notEnoughPoints
}
return dictPlayerData | [
"def",
"game_NewPlayerData",
"(",
"self",
",",
"data",
")",
":",
"thisActionName",
"=",
"\"game_NewPlayer\"",
"dictPlayerData",
"=",
"{",
"}",
"playerStake",
"=",
"0",
"# Spieler will alles einsetzen\r",
"if",
"(",
"data",
".",
"GetParam",
"(",
"1",
")",
".",
"lower",
"(",
")",
"==",
"\"max\"",
")",
"or",
"(",
"data",
".",
"GetParam",
"(",
"1",
")",
".",
"lower",
"(",
")",
"==",
"\"all\"",
")",
"or",
"(",
"data",
".",
"GetParam",
"(",
"1",
")",
".",
"lower",
"(",
")",
"==",
"\"allin\"",
")",
":",
"# Übernehme die aktuellen Punkte aus der Datenbank\r",
"playerStake",
"=",
"int",
"(",
"self",
".",
"Parent",
".",
"GetPoints",
"(",
"data",
".",
"User",
")",
")",
"else",
":",
"# Übernehme die Zahl aus den Parametern\r",
"try",
":",
"playerStake",
"=",
"int",
"(",
"data",
".",
"GetParam",
"(",
"1",
")",
".",
"replace",
"(",
"\".\"",
",",
"\"\"",
")",
".",
"replace",
"(",
"\",\"",
",",
"\"\"",
")",
")",
"# Übergebener Parameter ist keine Zahl\r",
"except",
":",
"# Abbruch und leeres Dictionary an aufrufende Funktion\r",
"return",
"dictPlayerData",
"# Einsatz ist grösser als Maximum\r",
"if",
"playerStake",
">",
"int",
"(",
"self",
".",
"Settings",
".",
"Game_Settings_MaxStake",
")",
":",
"playerStake",
"=",
"int",
"(",
"self",
".",
"Settings",
".",
"Game_Settings_MaxStake",
")",
"maximumStakeReached",
"=",
"True",
"else",
":",
"maximumStakeReached",
"=",
"False",
"# Einsatz ist kleiner oder gleich dem eingestellten Minimum\r",
"if",
"playerStake",
"<",
"self",
".",
"Settings",
".",
"Game_Settings_MinStake",
":",
"belowStake",
"=",
"\"belowMinStake\"",
"else",
":",
"belowStake",
"=",
"True",
"# Spieler hat nicht genügend Punkte um am Einsatz teilzunehmen\r",
"if",
"playerStake",
">",
"int",
"(",
"self",
".",
"Parent",
".",
"GetPoints",
"(",
"data",
".",
"User",
")",
")",
":",
"notEnoughPoints",
"=",
"True",
"else",
":",
"notEnoughPoints",
"=",
"False",
"# Daten in Dictionary übertragen\r",
"dictPlayerData",
"=",
"{",
"\"playerName\"",
":",
"data",
".",
"User",
",",
"\"playerDisplayName\"",
":",
"data",
".",
"UserName",
",",
"\"playerStake\"",
":",
"playerStake",
",",
"\"belowStake\"",
":",
"belowStake",
",",
"\"maximumStake\"",
":",
"maximumStakeReached",
",",
"\"currentPlayerPoints\"",
":",
"int",
"(",
"self",
".",
"Parent",
".",
"GetPoints",
"(",
"data",
".",
"User",
")",
")",
",",
"\"notEnoughPoints\"",
":",
"notEnoughPoints",
"}",
"return",
"dictPlayerData"
] | [
1404,
4
] | [
1463,
29
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
text | (values, data) | return data.format(values["pattern"], values), True | Gibt den Text unter pattern aus.
Gibt den Text unter pattern aus. Wenn dieser Ersetzungen erwartet, werden diese durchgeführt.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
| Gibt den Text unter pattern aus. | def text(values, data):
"""Gibt den Text unter pattern aus.
Gibt den Text unter pattern aus. Wenn dieser Ersetzungen erwartet, werden diese durchgeführt.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
"""
return data.format(values["pattern"], values), True | [
"def",
"text",
"(",
"values",
",",
"data",
")",
":",
"return",
"data",
".",
"format",
"(",
"values",
"[",
"\"pattern\"",
"]",
",",
"values",
")",
",",
"True"
] | [
46,
0
] | [
54,
55
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
GUI.festlegen_Styles | (self) | Festlegen der genutzten Styles | Festlegen der genutzten Styles | def festlegen_Styles(self):
""" Festlegen der genutzten Styles """
sblock = ttk.Style()
sblock.configure('TLabel', font=("Tahoma", 11))
sblock.configure('TButton', font=("Tahoma", 11 ))
sblock.configure('TCheckbutton', font=("Tahoma", 11))
sblock.configure( "Haupt.TFrame", background = Konfig.HAUPT_BACKGROUND)
sblock.configure( "HauptLabel2.TLabel", background =Konfig.HAUPT_BACKGROUND, font=("Tahoma", 11, "bold"))
sblock.configure( "HauptLabel1.TLabel", background =Konfig.HAUPT_BACKGROUND, font=("Tahoma", 24, "bold"))
sblock.configure( "Block.TFrame", background = Konfig.BLOCK_BACKGROUND, relief=RAISED)
sblock.configure( "BlockLabel2.TLabel", background =Konfig.BLOCK_BACKGROUND, font=("Tahoma", 11, "bold"))
sblock.configure( "BlockLabel.TLabel", background =Konfig.BLOCK_BACKGROUND)
sblock.configure( "BlockStatusLabel.TLabel", background =Konfig.BLOCK_BACKGROUND, font=("Tahoma", 11, "bold"))
sblock.configure( "BlockCheckbutton.TCheckbutton", background = Konfig.BLOCK_BACKGROUND)
sblock.configure( "Unterblock.TFrame", background = Konfig.BLOCK_BACKGROUND) | [
"def",
"festlegen_Styles",
"(",
"self",
")",
":",
"sblock",
"=",
"ttk",
".",
"Style",
"(",
")",
"sblock",
".",
"configure",
"(",
"'TLabel'",
",",
"font",
"=",
"(",
"\"Tahoma\"",
",",
"11",
")",
")",
"sblock",
".",
"configure",
"(",
"'TButton'",
",",
"font",
"=",
"(",
"\"Tahoma\"",
",",
"11",
")",
")",
"sblock",
".",
"configure",
"(",
"'TCheckbutton'",
",",
"font",
"=",
"(",
"\"Tahoma\"",
",",
"11",
")",
")",
"sblock",
".",
"configure",
"(",
"\"Haupt.TFrame\"",
",",
"background",
"=",
"Konfig",
".",
"HAUPT_BACKGROUND",
")",
"sblock",
".",
"configure",
"(",
"\"HauptLabel2.TLabel\"",
",",
"background",
"=",
"Konfig",
".",
"HAUPT_BACKGROUND",
",",
"font",
"=",
"(",
"\"Tahoma\"",
",",
"11",
",",
"\"bold\"",
")",
")",
"sblock",
".",
"configure",
"(",
"\"HauptLabel1.TLabel\"",
",",
"background",
"=",
"Konfig",
".",
"HAUPT_BACKGROUND",
",",
"font",
"=",
"(",
"\"Tahoma\"",
",",
"24",
",",
"\"bold\"",
")",
")",
"sblock",
".",
"configure",
"(",
"\"Block.TFrame\"",
",",
"background",
"=",
"Konfig",
".",
"BLOCK_BACKGROUND",
",",
"relief",
"=",
"RAISED",
")",
"sblock",
".",
"configure",
"(",
"\"BlockLabel2.TLabel\"",
",",
"background",
"=",
"Konfig",
".",
"BLOCK_BACKGROUND",
",",
"font",
"=",
"(",
"\"Tahoma\"",
",",
"11",
",",
"\"bold\"",
")",
")",
"sblock",
".",
"configure",
"(",
"\"BlockLabel.TLabel\"",
",",
"background",
"=",
"Konfig",
".",
"BLOCK_BACKGROUND",
")",
"sblock",
".",
"configure",
"(",
"\"BlockStatusLabel.TLabel\"",
",",
"background",
"=",
"Konfig",
".",
"BLOCK_BACKGROUND",
",",
"font",
"=",
"(",
"\"Tahoma\"",
",",
"11",
",",
"\"bold\"",
")",
")",
"sblock",
".",
"configure",
"(",
"\"BlockCheckbutton.TCheckbutton\"",
",",
"background",
"=",
"Konfig",
".",
"BLOCK_BACKGROUND",
")",
"sblock",
".",
"configure",
"(",
"\"Unterblock.TFrame\"",
",",
"background",
"=",
"Konfig",
".",
"BLOCK_BACKGROUND",
")"
] | [
124,
4
] | [
141,
84
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
ispConfig.mqttInitLogger | ( self, level:int=None, cleanup:bool=False ) | Logging über MQTT einschalten.
Parameters
----------
level : int, optional
NOTSET=0, DEBUG=10, INFO=20, WARN=30, ERROR=40, and CRITICAL=50. Default: NOTSET
cleanup : bool, optional
MQTT Cleanup vor dem initialisieren durchführen. Default = False
Returns
-------
None.
| Logging über MQTT einschalten. | def mqttInitLogger( self, level:int=None, cleanup:bool=False ):
"""Logging über MQTT einschalten.
Parameters
----------
level : int, optional
NOTSET=0, DEBUG=10, INFO=20, WARN=30, ERROR=40, and CRITICAL=50. Default: NOTSET
cleanup : bool, optional
MQTT Cleanup vor dem initialisieren durchführen. Default = False
Returns
-------
None.
"""
# zuerst root logger
self.logger_name = "root"
# wenn gewünscht handler neu aufsetzen
if cleanup:
self.mqttCleanup()
if self._config.server.mqtt:
# MQTT Logger seltezn
logger = logging.getLogger( "MQTT" )
# Handler auf MQTT
mqtthdlr = self.mqttGetHandler( )
if not mqtthdlr:
#
# wenn hier was geändert wird muss der kernel neu gestartet bzw. mqttCleanup aufgerufen werden
#
mqtt_init_ready = threading.Event()
self._thread_mqtthdlr = None
def signalStartup( msg ):
#print( "MQTT signalStartup", msg)
#print( time.strftime("%Y%m%d %H:%M:%S", time.localtime(time.time()) ) )
mqtt_init_ready.set()
def startMQTTclass():
"""MQTTclass über threading starten und auf signalStartup warten.
Returns
-------
None.
"""
self._thread_mqtthdlr = MQTTclass( self._config.server.mqtt.toDict() )
# auf eine signalisierung
self._thread_mqtthdlr.signalStartup.connect( signalStartup )
# Als Thread aufrufen, über mq.get() wird die Rückgabe von _retrieve abgerufen
thread = threading.Thread( target=startMQTTclass )
thread.start()
# max 2 sekunden oder auf mqtt_init_ready signalStartup warten
while not mqtt_init_ready.wait( timeout=2 ):
mqtt_init_ready.set()
# wenn der mqtt handler initialisiert wurde logging und _mqtthdlr setzen
if self._thread_mqtthdlr and self._thread_mqtthdlr._mqttc:
_mqtthdlr = self._thread_mqtthdlr
# logging Handler mit der MQTTclass Klasse initialisieren
logging.Handler.__init__( _mqtthdlr )
logger.addHandler( _mqtthdlr )
# einen Verweis auf _mqtthdlr sowie send bereitstellen
logger._mqtthdlr = _mqtthdlr
logger.send = _mqtthdlr.send
# progress bereitstellen
logger.progressStart = _mqtthdlr.progress_start
logger.progress = _mqtthdlr.progress_run
logger.progressReady = _mqtthdlr.progress_ready
# wenn alles fertig ist _mqtthdlr in self merken
self._mqtthdlr = _mqtthdlr
# logger name merken
self.logger_name = logger.name
else:
# logger ist vorhanden verweis wieder in _mqtthdlr ablegen
self._mqtthdlr = mqtthdlr
# logger name merken
self.logger_name = logger.name
# level wenn angegeben neu setzen
if level:
logger.setLevel( level ) | [
"def",
"mqttInitLogger",
"(",
"self",
",",
"level",
":",
"int",
"=",
"None",
",",
"cleanup",
":",
"bool",
"=",
"False",
")",
":",
"# zuerst root logger",
"self",
".",
"logger_name",
"=",
"\"root\"",
"# wenn gewünscht handler neu aufsetzen",
"if",
"cleanup",
":",
"self",
".",
"mqttCleanup",
"(",
")",
"if",
"self",
".",
"_config",
".",
"server",
".",
"mqtt",
":",
"# MQTT Logger seltezn",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
"\"MQTT\"",
")",
"# Handler auf MQTT",
"mqtthdlr",
"=",
"self",
".",
"mqttGetHandler",
"(",
")",
"if",
"not",
"mqtthdlr",
":",
"#",
"# wenn hier was geändert wird muss der kernel neu gestartet bzw. mqttCleanup aufgerufen werden",
"#",
"mqtt_init_ready",
"=",
"threading",
".",
"Event",
"(",
")",
"self",
".",
"_thread_mqtthdlr",
"=",
"None",
"def",
"signalStartup",
"(",
"msg",
")",
":",
"#print( \"MQTT signalStartup\", msg)",
"#print( time.strftime(\"%Y%m%d %H:%M:%S\", time.localtime(time.time()) ) )",
"mqtt_init_ready",
".",
"set",
"(",
")",
"def",
"startMQTTclass",
"(",
")",
":",
"\"\"\"MQTTclass über threading starten und auf signalStartup warten.\n\n Returns\n -------\n None.\n\n \"\"\"",
"self",
".",
"_thread_mqtthdlr",
"=",
"MQTTclass",
"(",
"self",
".",
"_config",
".",
"server",
".",
"mqtt",
".",
"toDict",
"(",
")",
")",
"# auf eine signalisierung",
"self",
".",
"_thread_mqtthdlr",
".",
"signalStartup",
".",
"connect",
"(",
"signalStartup",
")",
"# Als Thread aufrufen, über mq.get() wird die Rückgabe von _retrieve abgerufen",
"thread",
"=",
"threading",
".",
"Thread",
"(",
"target",
"=",
"startMQTTclass",
")",
"thread",
".",
"start",
"(",
")",
"# max 2 sekunden oder auf mqtt_init_ready signalStartup warten",
"while",
"not",
"mqtt_init_ready",
".",
"wait",
"(",
"timeout",
"=",
"2",
")",
":",
"mqtt_init_ready",
".",
"set",
"(",
")",
"# wenn der mqtt handler initialisiert wurde logging und _mqtthdlr setzen",
"if",
"self",
".",
"_thread_mqtthdlr",
"and",
"self",
".",
"_thread_mqtthdlr",
".",
"_mqttc",
":",
"_mqtthdlr",
"=",
"self",
".",
"_thread_mqtthdlr",
"# logging Handler mit der MQTTclass Klasse initialisieren",
"logging",
".",
"Handler",
".",
"__init__",
"(",
"_mqtthdlr",
")",
"logger",
".",
"addHandler",
"(",
"_mqtthdlr",
")",
"# einen Verweis auf _mqtthdlr sowie send bereitstellen",
"logger",
".",
"_mqtthdlr",
"=",
"_mqtthdlr",
"logger",
".",
"send",
"=",
"_mqtthdlr",
".",
"send",
"# progress bereitstellen",
"logger",
".",
"progressStart",
"=",
"_mqtthdlr",
".",
"progress_start",
"logger",
".",
"progress",
"=",
"_mqtthdlr",
".",
"progress_run",
"logger",
".",
"progressReady",
"=",
"_mqtthdlr",
".",
"progress_ready",
"# wenn alles fertig ist _mqtthdlr in self merken",
"self",
".",
"_mqtthdlr",
"=",
"_mqtthdlr",
"# logger name merken",
"self",
".",
"logger_name",
"=",
"logger",
".",
"name",
"else",
":",
"# logger ist vorhanden verweis wieder in _mqtthdlr ablegen",
"self",
".",
"_mqtthdlr",
"=",
"mqtthdlr",
"# logger name merken",
"self",
".",
"logger_name",
"=",
"logger",
".",
"name",
"# level wenn angegeben neu setzen",
"if",
"level",
":",
"logger",
".",
"setLevel",
"(",
"level",
")"
] | [
638,
4
] | [
732,
40
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
append | (values: dict, data: StepData) | Speichert den Wert unter `"key"` in einem Array.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
| Speichert den Wert unter `"key"` in einem Array. | def append(values: dict, data: StepData):
"""Speichert den Wert unter `"key"` in einem Array.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
"""
for idx, key in data.loop_key(values["keys"], values):
value = data.get_data(key, values)
if values.get("new_key_type", "multiple") != "single":
new_key = values["new_keys"][idx]
else:
new_key = values["new_keys"][0]
new_key_format = data.format(values.get("append_type", "list"))
try:
result = data.get_data(new_key, values)
except StepKeyError:
if new_key_format == "string":
data.insert_data(new_key, "", values)
else:
data.insert_data(new_key, [], values)
result = data.get_data(new_key, values)
if new_key_format == "string":
result = result + data.format(values.get("delimiter", " ")) + value
data.insert_data(new_key, result, values)
else:
result.append(value) | [
"def",
"append",
"(",
"values",
":",
"dict",
",",
"data",
":",
"StepData",
")",
":",
"for",
"idx",
",",
"key",
"in",
"data",
".",
"loop_key",
"(",
"values",
"[",
"\"keys\"",
"]",
",",
"values",
")",
":",
"value",
"=",
"data",
".",
"get_data",
"(",
"key",
",",
"values",
")",
"if",
"values",
".",
"get",
"(",
"\"new_key_type\"",
",",
"\"multiple\"",
")",
"!=",
"\"single\"",
":",
"new_key",
"=",
"values",
"[",
"\"new_keys\"",
"]",
"[",
"idx",
"]",
"else",
":",
"new_key",
"=",
"values",
"[",
"\"new_keys\"",
"]",
"[",
"0",
"]",
"new_key_format",
"=",
"data",
".",
"format",
"(",
"values",
".",
"get",
"(",
"\"append_type\"",
",",
"\"list\"",
")",
")",
"try",
":",
"result",
"=",
"data",
".",
"get_data",
"(",
"new_key",
",",
"values",
")",
"except",
"StepKeyError",
":",
"if",
"new_key_format",
"==",
"\"string\"",
":",
"data",
".",
"insert_data",
"(",
"new_key",
",",
"\"\"",
",",
"values",
")",
"else",
":",
"data",
".",
"insert_data",
"(",
"new_key",
",",
"[",
"]",
",",
"values",
")",
"result",
"=",
"data",
".",
"get_data",
"(",
"new_key",
",",
"values",
")",
"if",
"new_key_format",
"==",
"\"string\"",
":",
"result",
"=",
"result",
"+",
"data",
".",
"format",
"(",
"values",
".",
"get",
"(",
"\"delimiter\"",
",",
"\" \"",
")",
")",
"+",
"value",
"data",
".",
"insert_data",
"(",
"new_key",
",",
"result",
",",
"values",
")",
"else",
":",
"result",
".",
"append",
"(",
"value",
")"
] | [
141,
0
] | [
169,
32
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
transmissionsToPlot | ( **args ) | Hilfsfunktion um Linien im Plot bei den Messorten zu plotten
| Hilfsfunktion um Linien im Plot bei den Messorten zu plotten | def transmissionsToPlot( **args ):
""" Hilfsfunktion um Linien im Plot bei den Messorten zu plotten
"""
self = args["self"]
ax = args["ax"]
transmissions = args["transmissions"]
style = dict(linestyle="-", linewidth=1, color="green")
# plot transmission positions
for idx in transmissions:
if self.infos["collimator"] == 0 or self.infos["collimator"] == 180:
ax.axvline( transmissions[idx]["pxPosition"], **style )
elif self.infos["collimator"] == 90:
pxPosition = self.mm2dots_X(transmissions[idx]["position"] * -1 )
ax.axhline( pxPosition, **style )
else:
pxPosition = self.mm2dots_X(transmissions[idx]["position"] )
ax.axhline( transmissions[idx]["pxPosition"] , **style ) | [
"def",
"transmissionsToPlot",
"(",
"*",
"*",
"args",
")",
":",
"self",
"=",
"args",
"[",
"\"self\"",
"]",
"ax",
"=",
"args",
"[",
"\"ax\"",
"]",
"transmissions",
"=",
"args",
"[",
"\"transmissions\"",
"]",
"style",
"=",
"dict",
"(",
"linestyle",
"=",
"\"-\"",
",",
"linewidth",
"=",
"1",
",",
"color",
"=",
"\"green\"",
")",
"# plot transmission positions",
"for",
"idx",
"in",
"transmissions",
":",
"if",
"self",
".",
"infos",
"[",
"\"collimator\"",
"]",
"==",
"0",
"or",
"self",
".",
"infos",
"[",
"\"collimator\"",
"]",
"==",
"180",
":",
"ax",
".",
"axvline",
"(",
"transmissions",
"[",
"idx",
"]",
"[",
"\"pxPosition\"",
"]",
",",
"*",
"*",
"style",
")",
"elif",
"self",
".",
"infos",
"[",
"\"collimator\"",
"]",
"==",
"90",
":",
"pxPosition",
"=",
"self",
".",
"mm2dots_X",
"(",
"transmissions",
"[",
"idx",
"]",
"[",
"\"position\"",
"]",
"*",
"-",
"1",
")",
"ax",
".",
"axhline",
"(",
"pxPosition",
",",
"*",
"*",
"style",
")",
"else",
":",
"pxPosition",
"=",
"self",
".",
"mm2dots_X",
"(",
"transmissions",
"[",
"idx",
"]",
"[",
"\"position\"",
"]",
")",
"ax",
".",
"axhline",
"(",
"transmissions",
"[",
"idx",
"]",
"[",
"\"pxPosition\"",
"]",
",",
"*",
"*",
"style",
")"
] | [
693,
0
] | [
712,
68
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
ispConfig.merge | (self, name:str=None, config:dict={}) | return self | Führt ein update in einem angegebenen config Zweig aus.
Gibt es name nicht wird er angelegt
Parameters
----------
name : str
Bezeichner dessen Inhalt ausgelesen wird . operator für die tiefe
config : dict
In den config Zweig zu mischendes dict.
Returns
-------
self
| Führt ein update in einem angegebenen config Zweig aus. | def merge(self, name:str=None, config:dict={}):
"""Führt ein update in einem angegebenen config Zweig aus.
Gibt es name nicht wird er angelegt
Parameters
----------
name : str
Bezeichner dessen Inhalt ausgelesen wird . operator für die tiefe
config : dict
In den config Zweig zu mischendes dict.
Returns
-------
self
"""
branch = self.get(name, {} )
self.set( name, dict_merge(branch, DotMap( config ) ) )
return self | [
"def",
"merge",
"(",
"self",
",",
"name",
":",
"str",
"=",
"None",
",",
"config",
":",
"dict",
"=",
"{",
"}",
")",
":",
"branch",
"=",
"self",
".",
"get",
"(",
"name",
",",
"{",
"}",
")",
"self",
".",
"set",
"(",
"name",
",",
"dict_merge",
"(",
"branch",
",",
"DotMap",
"(",
"config",
")",
")",
")",
"return",
"self"
] | [
245,
4
] | [
266,
19
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
Arc.minmax_from_center | (self, center) | return (dist_min, dist_max) | Die Funktion ermittelt den minimalen und maximalen
Abstand vom Center
| Die Funktion ermittelt den minimalen und maximalen
Abstand vom Center
| def minmax_from_center(self, center):
""" Die Funktion ermittelt den minimalen und maximalen
Abstand vom Center
"""
d = distance(center, self.center)
if np.isclose(d, 0.0):
return (self.radius, self.radius)
angle = alpha_line(center, self.center)
dist_min = abs(d - self.radius)
dist_max = d + self.radius
pmax = point(center, d + self.radius, angle)
alpha_pmax = alpha_line(self.center, pmax)
if not self.is_angle_inside(alpha_pmax, 1e-08):
dist_max = max(distance(center, self.p1),
distance(center, self.p2))
pmin = point(center, d - self.radius, angle)
alpha_pmin = alpha_line(self.center, pmin)
if not self.is_angle_inside(alpha_pmin, 1e-08):
dist_min = min(distance(center, self.p1),
distance(center, self.p2))
return (dist_min, dist_max) | [
"def",
"minmax_from_center",
"(",
"self",
",",
"center",
")",
":",
"d",
"=",
"distance",
"(",
"center",
",",
"self",
".",
"center",
")",
"if",
"np",
".",
"isclose",
"(",
"d",
",",
"0.0",
")",
":",
"return",
"(",
"self",
".",
"radius",
",",
"self",
".",
"radius",
")",
"angle",
"=",
"alpha_line",
"(",
"center",
",",
"self",
".",
"center",
")",
"dist_min",
"=",
"abs",
"(",
"d",
"-",
"self",
".",
"radius",
")",
"dist_max",
"=",
"d",
"+",
"self",
".",
"radius",
"pmax",
"=",
"point",
"(",
"center",
",",
"d",
"+",
"self",
".",
"radius",
",",
"angle",
")",
"alpha_pmax",
"=",
"alpha_line",
"(",
"self",
".",
"center",
",",
"pmax",
")",
"if",
"not",
"self",
".",
"is_angle_inside",
"(",
"alpha_pmax",
",",
"1e-08",
")",
":",
"dist_max",
"=",
"max",
"(",
"distance",
"(",
"center",
",",
"self",
".",
"p1",
")",
",",
"distance",
"(",
"center",
",",
"self",
".",
"p2",
")",
")",
"pmin",
"=",
"point",
"(",
"center",
",",
"d",
"-",
"self",
".",
"radius",
",",
"angle",
")",
"alpha_pmin",
"=",
"alpha_line",
"(",
"self",
".",
"center",
",",
"pmin",
")",
"if",
"not",
"self",
".",
"is_angle_inside",
"(",
"alpha_pmin",
",",
"1e-08",
")",
":",
"dist_min",
"=",
"min",
"(",
"distance",
"(",
"center",
",",
"self",
".",
"p1",
")",
",",
"distance",
"(",
"center",
",",
"self",
".",
"p2",
")",
")",
"return",
"(",
"dist_min",
",",
"dist_max",
")"
] | [
856,
4
] | [
881,
35
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
HeistSystem.DB_create_DefaultTargets | (self) | return | Erzeuge verschiedene Target-Einträge in der Datenbank | Erzeuge verschiedene Target-Einträge in der Datenbank | def DB_create_DefaultTargets(self):
''' Erzeuge verschiedene Target-Einträge in der Datenbank '''
thisActionName = "DB_create_DefaultTargets"
# Bestehende Tabelle löschen
self.DB_dropTable_Targets()
# Tabelle neu erzeugen
self.DB_create_Tables()
# Basis SQL Kommando um neue Daten in die Tabelle einzutragen
sql = "INSERT INTO game_heist_targets ( targetName ) VALUES ( ? )"
targets = ["Ares Macrotechnology",
"Aztechnology",
"Mitsuhama Computer Technologies",
"Renraku Computer Systems",
"Saeder-Krupp",
"Yamatetsu Corporation"]
# Für jeden Eintrag der Liste einen Datensatz in der Datenbank erzeugen
for target in targets:
# Variable für das SQL-Basis Kommando vorbereiten
val_target = (target, )
# In die Datenbank schreiben - Doppelte Werte werden ignoriert
try:
self.GameDB.execute(sql, val_target)
except:
pass
# Daten speichern
self.GameDB.commit()
return | [
"def",
"DB_create_DefaultTargets",
"(",
"self",
")",
":",
"thisActionName",
"=",
"\"DB_create_DefaultTargets\"",
"# Bestehende Tabelle löschen\r",
"self",
".",
"DB_dropTable_Targets",
"(",
")",
"# Tabelle neu erzeugen\r",
"self",
".",
"DB_create_Tables",
"(",
")",
"# Basis SQL Kommando um neue Daten in die Tabelle einzutragen\r",
"sql",
"=",
"\"INSERT INTO game_heist_targets ( targetName ) VALUES ( ? )\"",
"targets",
"=",
"[",
"\"Ares Macrotechnology\"",
",",
"\"Aztechnology\"",
",",
"\"Mitsuhama Computer Technologies\"",
",",
"\"Renraku Computer Systems\"",
",",
"\"Saeder-Krupp\"",
",",
"\"Yamatetsu Corporation\"",
"]",
"# Für jeden Eintrag der Liste einen Datensatz in der Datenbank erzeugen\r",
"for",
"target",
"in",
"targets",
":",
"# Variable für das SQL-Basis Kommando vorbereiten\r",
"val_target",
"=",
"(",
"target",
",",
")",
"# In die Datenbank schreiben - Doppelte Werte werden ignoriert\r",
"try",
":",
"self",
".",
"GameDB",
".",
"execute",
"(",
"sql",
",",
"val_target",
")",
"except",
":",
"pass",
"# Daten speichern\r",
"self",
".",
"GameDB",
".",
"commit",
"(",
")",
"return"
] | [
722,
4
] | [
758,
14
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
ispConfig.render_template | ( self, tpl:str="", variables:dict=None, deep_replace:bool=False ) | return tpl | Ersetzt in tmp alle variablen aus variables.
Wird variables nicht angegeben wird _config["variables"] verwendet
Parameters
----------
tpl : str, optional
Jinja template string. The default is "".
variables : dict, optional
Zu ersetzende variables Angaben. The default is _config["variables"].
deep_replace: bool, optional
Führt render zweimal aus um in variables liegende Anweisungen auch zu ersetzen. The default is False
Returns
-------
None.
| Ersetzt in tmp alle variablen aus variables. | def render_template( self, tpl:str="", variables:dict=None, deep_replace:bool=False ):
"""Ersetzt in tmp alle variablen aus variables.
Wird variables nicht angegeben wird _config["variables"] verwendet
Parameters
----------
tpl : str, optional
Jinja template string. The default is "".
variables : dict, optional
Zu ersetzende variables Angaben. The default is _config["variables"].
deep_replace: bool, optional
Führt render zweimal aus um in variables liegende Anweisungen auch zu ersetzen. The default is False
Returns
-------
None.
"""
if not variables:
variables = self._config["variables"]
# immer now mit der aktuellen Zeit mitgeben
variables["now"] = datetime.now()
# je nach deep_replace einfacher Durchlauf oder mehrere
n = range(1)
if deep_replace:
n = range(3)
for i in n:
_tpl = self._env.from_string( tpl )
try:
tpl = _tpl.render( **variables )
except Exception as e: # pragma: no cover
print("CONFIG: config.render_template error bei _tpl.render", e)
return tpl | [
"def",
"render_template",
"(",
"self",
",",
"tpl",
":",
"str",
"=",
"\"\"",
",",
"variables",
":",
"dict",
"=",
"None",
",",
"deep_replace",
":",
"bool",
"=",
"False",
")",
":",
"if",
"not",
"variables",
":",
"variables",
"=",
"self",
".",
"_config",
"[",
"\"variables\"",
"]",
"# immer now mit der aktuellen Zeit mitgeben",
"variables",
"[",
"\"now\"",
"]",
"=",
"datetime",
".",
"now",
"(",
")",
"# je nach deep_replace einfacher Durchlauf oder mehrere",
"n",
"=",
"range",
"(",
"1",
")",
"if",
"deep_replace",
":",
"n",
"=",
"range",
"(",
"3",
")",
"for",
"i",
"in",
"n",
":",
"_tpl",
"=",
"self",
".",
"_env",
".",
"from_string",
"(",
"tpl",
")",
"try",
":",
"tpl",
"=",
"_tpl",
".",
"render",
"(",
"*",
"*",
"variables",
")",
"except",
"Exception",
"as",
"e",
":",
"# pragma: no cover",
"print",
"(",
"\"CONFIG: config.render_template error bei _tpl.render\"",
",",
"e",
")",
"return",
"tpl"
] | [
599,
4
] | [
633,
18
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
ispDicom.IMAGE | ( self, query:dict={} ) | return self.query( ds ) | Führt eine suche nach IMAGE durch.
Wie query mit einem default Dataset
Parameters
----------
query : dict, optional
query parameter für ds. The default is {}.
Returns
-------
results : list
gefundene daten
status : hex
Rückgabecode von send_c_find::
| Führt eine suche nach IMAGE durch. | def IMAGE( self, query:dict={} ):
"""Führt eine suche nach IMAGE durch.
Wie query mit einem default Dataset
Parameters
----------
query : dict, optional
query parameter für ds. The default is {}.
Returns
-------
results : list
gefundene daten
status : hex
Rückgabecode von send_c_find::
"""
ds_model = dicomQueryDefaults["PATIENT"].copy()
ds_model.update( dicomQueryDefaults["STUDY"] )
ds_model.update( dicomQueryDefaults["SERIES"] )
ds_model.update( dicomQueryDefaults["IMAGE"] )
ds_model.update( query )
ds = Dataset()
for name, value in ds_model.items():
ds.__setattr__(name, value)
# Abfrage durchführen
return self.query( ds ) | [
"def",
"IMAGE",
"(",
"self",
",",
"query",
":",
"dict",
"=",
"{",
"}",
")",
":",
"ds_model",
"=",
"dicomQueryDefaults",
"[",
"\"PATIENT\"",
"]",
".",
"copy",
"(",
")",
"ds_model",
".",
"update",
"(",
"dicomQueryDefaults",
"[",
"\"STUDY\"",
"]",
")",
"ds_model",
".",
"update",
"(",
"dicomQueryDefaults",
"[",
"\"SERIES\"",
"]",
")",
"ds_model",
".",
"update",
"(",
"dicomQueryDefaults",
"[",
"\"IMAGE\"",
"]",
")",
"ds_model",
".",
"update",
"(",
"query",
")",
"ds",
"=",
"Dataset",
"(",
")",
"for",
"name",
",",
"value",
"in",
"ds_model",
".",
"items",
"(",
")",
":",
"ds",
".",
"__setattr__",
"(",
"name",
",",
"value",
")",
"# Abfrage durchführen",
"return",
"self",
".",
"query",
"(",
"ds",
")"
] | [
789,
4
] | [
818,
31
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
ispDicom.query | ( self, ds=None ) | return results, status | Führt eine DICOM Abfrage durch.
Parameters
----------
ds : Dataset
Dataset für die Suche und Rückgabe. The default is None.
Returns
-------
results : list
gefundene daten
status : hex
Rückgabecode von send_c_find::
C-FIND related - 0xC300 to 0xC3FF
Zusätzlich:
- 0xC3F1 - keine PatientID
- 0xC0FF - initAE: Verbindung fehlgeschlagen
| Führt eine DICOM Abfrage durch. | def query( self, ds=None ):
"""Führt eine DICOM Abfrage durch.
Parameters
----------
ds : Dataset
Dataset für die Suche und Rückgabe. The default is None.
Returns
-------
results : list
gefundene daten
status : hex
Rückgabecode von send_c_find::
C-FIND related - 0xC300 to 0xC3FF
Zusätzlich:
- 0xC3F1 - keine PatientID
- 0xC0FF - initAE: Verbindung fehlgeschlagen
"""
results = []
if not ds: # pragma: no cover
logger.warning("dicomClass.query: kein Dataset")
return results, 0xC3F1
# Verbindung ggf herstellen
if not self.assoc:
status = self.initAE()
# und testen
if not self.assoc: # pragma: no cover
#print("dicomClass.query: Verbindung fehlgeschlagen")
logger.warning("dicomClass.query: Verbindung fehlgeschlagen")
return results, status
logger.warning("dicomClass.query: Abfrage durchführen")
# Abfrage durchführen
responses = self.assoc.send_c_find(
ds,
query_model=PatientRootQueryRetrieveInformationModelFind
)
# Rückgabe auswerten
for (response_status, rds) in responses:
# status code bestimmen
status = 0xC3F3
if response_status:
status = response_status.Status
# je nach status
if status in (0xFF00, 0xFF01) and rds:
# If the status is 'Pending' then `identifier` is the C-FIND response
results.append( rds )
elif status == 0x0000:
# abfrage wurde komplett durchgeführt
# print("identifier:", identifier)
pass
else: # pragma: no cover
#print('dicomClass.query: Connection timed out, was aborted or received invalid response: 0x{0:04x}'.format( status ))
logger.warning('dicomClass.query: Connection timed out, was aborted or received invalid response: 0x{0:04x}'.format( status ) )
return results, status | [
"def",
"query",
"(",
"self",
",",
"ds",
"=",
"None",
")",
":",
"results",
"=",
"[",
"]",
"if",
"not",
"ds",
":",
"# pragma: no cover",
"logger",
".",
"warning",
"(",
"\"dicomClass.query: kein Dataset\"",
")",
"return",
"results",
",",
"0xC3F1",
"# Verbindung ggf herstellen",
"if",
"not",
"self",
".",
"assoc",
":",
"status",
"=",
"self",
".",
"initAE",
"(",
")",
"# und testen",
"if",
"not",
"self",
".",
"assoc",
":",
"# pragma: no cover",
"#print(\"dicomClass.query: Verbindung fehlgeschlagen\")",
"logger",
".",
"warning",
"(",
"\"dicomClass.query: Verbindung fehlgeschlagen\"",
")",
"return",
"results",
",",
"status",
"logger",
".",
"warning",
"(",
"\"dicomClass.query: Abfrage durchführen\")",
"",
"# Abfrage durchführen",
"responses",
"=",
"self",
".",
"assoc",
".",
"send_c_find",
"(",
"ds",
",",
"query_model",
"=",
"PatientRootQueryRetrieveInformationModelFind",
")",
"# Rückgabe auswerten",
"for",
"(",
"response_status",
",",
"rds",
")",
"in",
"responses",
":",
"# status code bestimmen",
"status",
"=",
"0xC3F3",
"if",
"response_status",
":",
"status",
"=",
"response_status",
".",
"Status",
"# je nach status",
"if",
"status",
"in",
"(",
"0xFF00",
",",
"0xFF01",
")",
"and",
"rds",
":",
"# If the status is 'Pending' then `identifier` is the C-FIND response",
"results",
".",
"append",
"(",
"rds",
")",
"elif",
"status",
"==",
"0x0000",
":",
"# abfrage wurde komplett durchgeführt",
"# print(\"identifier:\", identifier)",
"pass",
"else",
":",
"# pragma: no cover",
"#print('dicomClass.query: Connection timed out, was aborted or received invalid response: 0x{0:04x}'.format( status ))",
"logger",
".",
"warning",
"(",
"'dicomClass.query: Connection timed out, was aborted or received invalid response: 0x{0:04x}'",
".",
"format",
"(",
"status",
")",
")",
"return",
"results",
",",
"status"
] | [
635,
4
] | [
700,
30
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
MQTTclass.onConnect | (self, client, userdata, flags, rc) | Nach einem erfolgreichem connect des Client ``<basetopic>/<cmnd>`` abbonieren.
.. note::
If you want the client to subscribe to multiple topics then you can put them in a list of tuples.
Example::
client.subscribe([(‘topicName1’, 1),(‘topicName2’, 1)])
The format of the tuple is [(Topic Name, QoS Level)]
Parameters
----------
client:
The Client instance that is calling the callback
userdata:
user data of any type and can be set when creating a new client instance
flags : dict
flags is a dict that contains response flags from the broker
rc: int
The value of rc determines success or not
.. code::
0: Connection successful
1: Connection refused – incorrect protocol version
2: Connection refused – invalid client identifier
3: Connection refused – server unavailable
4: Connection refused – bad username or password
5: Connection refused – not authorised
6-255: Currently unused.
| Nach einem erfolgreichem connect des Client ``<basetopic>/<cmnd>`` abbonieren. | def onConnect(self, client, userdata, flags, rc):
"""Nach einem erfolgreichem connect des Client ``<basetopic>/<cmnd>`` abbonieren.
.. note::
If you want the client to subscribe to multiple topics then you can put them in a list of tuples.
Example::
client.subscribe([(‘topicName1’, 1),(‘topicName2’, 1)])
The format of the tuple is [(Topic Name, QoS Level)]
Parameters
----------
client:
The Client instance that is calling the callback
userdata:
user data of any type and can be set when creating a new client instance
flags : dict
flags is a dict that contains response flags from the broker
rc: int
The value of rc determines success or not
.. code::
0: Connection successful
1: Connection refused – incorrect protocol version
2: Connection refused – invalid client identifier
3: Connection refused – server unavailable
4: Connection refused – bad username or password
5: Connection refused – not authorised
6-255: Currently unused.
"""
if rc != 0: # pragma: no cover
self.info("mqtt.Connected with result code: " + str(rc))
else:
self.info("mqtt.Connected: {}:{}".format( self.config["host"], self.config["port"] ) )
# Subscribe to <basetopic>/<cmnd> and all sub topics
client.subscribe( "{basetopic}/{cmnd}/#".format( **self.defaults ) )
# nach dem starten den eigenen status abrufen
#self._mqttc.publish( "{basetopic}/{cmnd}/#".format( **self.defaults ), payload="0" )
self.signalStartup.send( { "onConnect": rc } ) | [
"def",
"onConnect",
"(",
"self",
",",
"client",
",",
"userdata",
",",
"flags",
",",
"rc",
")",
":",
"if",
"rc",
"!=",
"0",
":",
"# pragma: no cover",
"self",
".",
"info",
"(",
"\"mqtt.Connected with result code: \"",
"+",
"str",
"(",
"rc",
")",
")",
"else",
":",
"self",
".",
"info",
"(",
"\"mqtt.Connected: {}:{}\"",
".",
"format",
"(",
"self",
".",
"config",
"[",
"\"host\"",
"]",
",",
"self",
".",
"config",
"[",
"\"port\"",
"]",
")",
")",
"# Subscribe to <basetopic>/<cmnd> and all sub topics",
"client",
".",
"subscribe",
"(",
"\"{basetopic}/{cmnd}/#\"",
".",
"format",
"(",
"*",
"*",
"self",
".",
"defaults",
")",
")",
"# nach dem starten den eigenen status abrufen",
"#self._mqttc.publish( \"{basetopic}/{cmnd}/#\".format( **self.defaults ), payload=\"0\" )",
"self",
".",
"signalStartup",
".",
"send",
"(",
"{",
"\"onConnect\"",
":",
"rc",
"}",
")"
] | [
503,
4
] | [
546,
55
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
ChatbotMessages.WriteChatMessage_NoPermission | ( self, user, command, commandName = False ) | return | Schreibe eine Nachricht bezüglich mangelnder Zugriffsrechte in den Chat | Schreibe eine Nachricht bezüglich mangelnder Zugriffsrechte in den Chat | def WriteChatMessage_NoPermission( self, user, command, commandName = False ):
''' Schreibe eine Nachricht bezüglich mangelnder Zugriffsrechte in den Chat '''
thisActionName = "WriteChatMessage_NoPermission"
UserDisplayName = self.Parent.GetDisplayName( user )
# auf Grund Kompatibilität
if commandName:
self.CommandName = commandName
else:
self.CommandName = command
self.Parent.SendStreamMessage( "/me : ⛔ {0}, du hast nicht die erforderlichen Rechte für das Kommando '{1}'. ⛔".format(UserDisplayName,
self.CommandName ) )
return | [
"def",
"WriteChatMessage_NoPermission",
"(",
"self",
",",
"user",
",",
"command",
",",
"commandName",
"=",
"False",
")",
":",
"thisActionName",
"=",
"\"WriteChatMessage_NoPermission\"",
"UserDisplayName",
"=",
"self",
".",
"Parent",
".",
"GetDisplayName",
"(",
"user",
")",
"# auf Grund Kompatibilität\r",
"if",
"commandName",
":",
"self",
".",
"CommandName",
"=",
"commandName",
"else",
":",
"self",
".",
"CommandName",
"=",
"command",
"self",
".",
"Parent",
".",
"SendStreamMessage",
"(",
"\"/me : ⛔ {0}, du hast nicht die erforderlichen Rechte für das Kommando '{1}'. ⛔\".form",
"a",
"t(User",
"D",
"isplayName, \r",
"",
"self",
".",
"CommandName",
")",
")",
"return"
] | [
155,
4
] | [
170,
14
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
RawData.write_SubscriberDataLog | ( self, subtype = "", subplan = "", submonth = "", userDisplayName = "" ) | return | Schreibt die Subscriber-Daten in ein Logfile | Schreibt die Subscriber-Daten in ein Logfile | def write_SubscriberDataLog( self, subtype = "", subplan = "", submonth = "", userDisplayName = "" ):
''' Schreibt die Subscriber-Daten in ein Logfile '''
thisActionName = "write_SubscriberDataLog"
tmpText = "User = {0} ( SubType = {1} | SubPlan = {2} | SubMonth = {3} )".format(
userDisplayName,
str.upper( subtype ),
subplan,
submonth
)
text = str( '[' + myTime.TimeStampLog() + '] : ' + str( tmpText ) )
# Daten nur Schreiben, wenn des Log-Files-Verzeichnis angegeben wurde
if self.LogFilesPath:
AppendDataToFile( self.SubscriberLogFile, text )
return | [
"def",
"write_SubscriberDataLog",
"(",
"self",
",",
"subtype",
"=",
"\"\"",
",",
"subplan",
"=",
"\"\"",
",",
"submonth",
"=",
"\"\"",
",",
"userDisplayName",
"=",
"\"\"",
")",
":",
"thisActionName",
"=",
"\"write_SubscriberDataLog\"",
"tmpText",
"=",
"\"User = {0} ( SubType = {1} | SubPlan = {2} | SubMonth = {3} )\"",
".",
"format",
"(",
"userDisplayName",
",",
"str",
".",
"upper",
"(",
"subtype",
")",
",",
"subplan",
",",
"submonth",
")",
"text",
"=",
"str",
"(",
"'['",
"+",
"myTime",
".",
"TimeStampLog",
"(",
")",
"+",
"'] : '",
"+",
"str",
"(",
"tmpText",
")",
")",
"# Daten nur Schreiben, wenn des Log-Files-Verzeichnis angegeben wurde\r",
"if",
"self",
".",
"LogFilesPath",
":",
"AppendDataToFile",
"(",
"self",
".",
"SubscriberLogFile",
",",
"text",
")",
"return"
] | [
107,
4
] | [
123,
14
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
PdfGenerator.__init__ | (self, variables:dict={}, autoRender:bool=True, config=None, filename: str=None ) | PDF vorbereiten.
Parameters
----------
variables: dict
zum ergänzen oder ändern von config.variables
_variables wird zum ersetzen von Schlüsselwörtern verwendet
autoRender: bool - True
Elemente sofort rendern oder nur zurückgeben
config: dict oder isp config
verwendet wird variables, templates und pdf
filename:
Den angegebenen Dateinamen verwenden und nicht aus den variables erzeugen
| PDF vorbereiten. | def __init__(self, variables:dict={}, autoRender:bool=True, config=None, filename: str=None ):
"""PDF vorbereiten.
Parameters
----------
variables: dict
zum ergänzen oder ändern von config.variables
_variables wird zum ersetzen von Schlüsselwörtern verwendet
autoRender: bool - True
Elemente sofort rendern oder nur zurückgeben
config: dict oder isp config
verwendet wird variables, templates und pdf
filename:
Den angegebenen Dateinamen verwenden und nicht aus den variables erzeugen
"""
# Konfiguration
if not config:
self._config = ispConfig( mqttlevel=logging.WARNING )
else:
self._config = config
# Defaultwerte der Klassse setzen
self.set_class_defaults()
# default _variables Angaben mit config.variables ergänzen
self._variables = dict_merge( self._variables, self._config.get( "variables", {} ) )
# zusätzliche angegebene variables ergänzen
self._variables = dict_merge( self._variables, variables )
# Dateiname ggf. aus den metadaten aufbereiten
if filename:
self._variables["filename"] = filename
self.autoRender = autoRender
# overlay und page layout setzen
self.set_page_defaults()
# plot defaults setzen
plt.rcParams.update( rcParams ) | [
"def",
"__init__",
"(",
"self",
",",
"variables",
":",
"dict",
"=",
"{",
"}",
",",
"autoRender",
":",
"bool",
"=",
"True",
",",
"config",
"=",
"None",
",",
"filename",
":",
"str",
"=",
"None",
")",
":",
"# Konfiguration",
"if",
"not",
"config",
":",
"self",
".",
"_config",
"=",
"ispConfig",
"(",
"mqttlevel",
"=",
"logging",
".",
"WARNING",
")",
"else",
":",
"self",
".",
"_config",
"=",
"config",
"# Defaultwerte der Klassse setzen",
"self",
".",
"set_class_defaults",
"(",
")",
"# default _variables Angaben mit config.variables ergänzen",
"self",
".",
"_variables",
"=",
"dict_merge",
"(",
"self",
".",
"_variables",
",",
"self",
".",
"_config",
".",
"get",
"(",
"\"variables\"",
",",
"{",
"}",
")",
")",
"# zusätzliche angegebene variables ergänzen",
"self",
".",
"_variables",
"=",
"dict_merge",
"(",
"self",
".",
"_variables",
",",
"variables",
")",
"# Dateiname ggf. aus den metadaten aufbereiten",
"if",
"filename",
":",
"self",
".",
"_variables",
"[",
"\"filename\"",
"]",
"=",
"filename",
"self",
".",
"autoRender",
"=",
"autoRender",
"# overlay und page layout setzen",
"self",
".",
"set_page_defaults",
"(",
")",
"# plot defaults setzen",
"plt",
".",
"rcParams",
".",
"update",
"(",
"rcParams",
")"
] | [
185,
4
] | [
229,
39
] | null | python | de | ['de', 'de', 'de'] | False | true | null |
|
read_csv_header | (path, delimiter=',') | Lies CSV Header ein
Arguments:
path (str): Pfad zur CSV-Datei
Returns:
header (list): CSV-Header
| Lies CSV Header ein | def read_csv_header(path, delimiter=','):
"""Lies CSV Header ein
Arguments:
path (str): Pfad zur CSV-Datei
Returns:
header (list): CSV-Header
"""
with open(path, 'r', newline='', encoding='utf-8') as f_in:
csv_reader = csv.reader(f_in, delimiter=delimiter)
return next(csv_reader) | [
"def",
"read_csv_header",
"(",
"path",
",",
"delimiter",
"=",
"','",
")",
":",
"with",
"open",
"(",
"path",
",",
"'r'",
",",
"newline",
"=",
"''",
",",
"encoding",
"=",
"'utf-8'",
")",
"as",
"f_in",
":",
"csv_reader",
"=",
"csv",
".",
"reader",
"(",
"f_in",
",",
"delimiter",
"=",
"delimiter",
")",
"return",
"next",
"(",
"csv_reader",
")"
] | [
41,
0
] | [
53,
31
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
ImageTaggingCNN.build | (input_shape) | return Model(
inputs=input_layer,
outputs=[color_output, category_output],
name="imageTaggingCNN") | Baut das CNN zusammen
:3Tupel input_shape: Das Format des Input Tensors
:rtype: Model
| Baut das CNN zusammen
:3Tupel input_shape: Das Format des Input Tensors
:rtype: Model
| def build(input_shape):
""" Baut das CNN zusammen
:3Tupel input_shape: Das Format des Input Tensors
:rtype: Model
"""
imgCNN = ImageTaggingCNN()
input_layer = Input(shape=input_shape)
x = imgCNN._input_conv_layer(input_layer, 32)
x = imgCNN._residual_block(x, 32)
x = imgCNN._residual_block(x, 32)
x = imgCNN._downsampling_layer(x, 64)
x = imgCNN._residual_block(x, 64)
x = imgCNN._residual_block(x, 64)
x = imgCNN._downsampling_layer(x, 128)
x = imgCNN._residual_block(x, 128)
x = imgCNN._residual_block(x, 128)
x = imgCNN._downsampling_layer(x, 256)
x = imgCNN._residual_block(x, 256)
x = imgCNN._residual_block(x, 256)
x = AveragePooling2D(
pool_size=2,
padding='same')(x)
x = Flatten()(x)
color_output = imgCNN._classifier_block(x, 'color_output', 3)
category_output = imgCNN._classifier_block(x, 'category_output', 3)
return Model(
inputs=input_layer,
outputs=[color_output, category_output],
name="imageTaggingCNN") | [
"def",
"build",
"(",
"input_shape",
")",
":",
"imgCNN",
"=",
"ImageTaggingCNN",
"(",
")",
"input_layer",
"=",
"Input",
"(",
"shape",
"=",
"input_shape",
")",
"x",
"=",
"imgCNN",
".",
"_input_conv_layer",
"(",
"input_layer",
",",
"32",
")",
"x",
"=",
"imgCNN",
".",
"_residual_block",
"(",
"x",
",",
"32",
")",
"x",
"=",
"imgCNN",
".",
"_residual_block",
"(",
"x",
",",
"32",
")",
"x",
"=",
"imgCNN",
".",
"_downsampling_layer",
"(",
"x",
",",
"64",
")",
"x",
"=",
"imgCNN",
".",
"_residual_block",
"(",
"x",
",",
"64",
")",
"x",
"=",
"imgCNN",
".",
"_residual_block",
"(",
"x",
",",
"64",
")",
"x",
"=",
"imgCNN",
".",
"_downsampling_layer",
"(",
"x",
",",
"128",
")",
"x",
"=",
"imgCNN",
".",
"_residual_block",
"(",
"x",
",",
"128",
")",
"x",
"=",
"imgCNN",
".",
"_residual_block",
"(",
"x",
",",
"128",
")",
"x",
"=",
"imgCNN",
".",
"_downsampling_layer",
"(",
"x",
",",
"256",
")",
"x",
"=",
"imgCNN",
".",
"_residual_block",
"(",
"x",
",",
"256",
")",
"x",
"=",
"imgCNN",
".",
"_residual_block",
"(",
"x",
",",
"256",
")",
"x",
"=",
"AveragePooling2D",
"(",
"pool_size",
"=",
"2",
",",
"padding",
"=",
"'same'",
")",
"(",
"x",
")",
"x",
"=",
"Flatten",
"(",
")",
"(",
"x",
")",
"color_output",
"=",
"imgCNN",
".",
"_classifier_block",
"(",
"x",
",",
"'color_output'",
",",
"3",
")",
"category_output",
"=",
"imgCNN",
".",
"_classifier_block",
"(",
"x",
",",
"'category_output'",
",",
"3",
")",
"return",
"Model",
"(",
"inputs",
"=",
"input_layer",
",",
"outputs",
"=",
"[",
"color_output",
",",
"category_output",
"]",
",",
"name",
"=",
"\"imageTaggingCNN\"",
")"
] | [
91,
4
] | [
120,
26
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
index | () | return render_template("index.html") | zeige dem Benutzer die HTML-Datei aus dem Ordner `templates` an | zeige dem Benutzer die HTML-Datei aus dem Ordner `templates` an | def index():
"""zeige dem Benutzer die HTML-Datei aus dem Ordner `templates` an"""
return render_template("index.html") | [
"def",
"index",
"(",
")",
":",
"return",
"render_template",
"(",
"\"index.html\"",
")"
] | [
80,
0
] | [
82,
40
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
ispCheckClass.normalize | ( self, normalize: str="diff" ) | Normalisiert checkField mit baseField
in self.image.array liegen anschließend die normalisierten Daten
Parameters
----------
normalize : str, optional
Art der Normalisierung. The default is "diff".
- none: keine Normalisierung durchführen
- diff: test / open
- prozent: (test - open) / open
Returns
-------
None.
| Normalisiert checkField mit baseField
in self.image.array liegen anschließend die normalisierten Daten
| def normalize( self, normalize: str="diff" ):
'''Normalisiert checkField mit baseField
in self.image.array liegen anschließend die normalisierten Daten
Parameters
----------
normalize : str, optional
Art der Normalisierung. The default is "diff".
- none: keine Normalisierung durchführen
- diff: test / open
- prozent: (test - open) / open
Returns
-------
None.
'''
# image.array als image.arrayOriginal merken
self.image.arrayOriginal = self.image.array.copy()
#print("### ispCheckClass.normalize", self.image, self.baseImage, normalize)
"""
if basefilename:
if self.debug:
print("---------------------------")
print("OpenImage: %s, min: %1.3f, max: %1.3f, DPMM: %1.3f, DPI: %1.3f, CAX-x: %1.3f CAX-y:%1.3f"
% (self.openfilename, np.amin(openImg.array), np.amax(openImg.array),
openImg.dpmm, openImg.dpi, openImg.cax.x, openImg.cax.y ) )
self.printMetaInfo( openImg.metadata )
if self.debug:
print("---------------------------")
print("CheckImage: %s, min: %1.3f, max: %1.3f, DPMM: %1.3f, DPI: %1.3f, CAX-x: %1.3f CAX-y:%1.3f"
% (testfilename, np.amin(checkImage.array), np.amax(checkImage.array),
checkImage.dpmm, checkImage.dpi, checkImage.cax.x, checkImage.cax.y ) )
self.printMetaInfo( checkImage.metadata )
"""
base = self.baseImage.array.copy()
check = self.image.array.copy()
if normalize == "diff":
# Beide Arrays um 0.000001 erhöhen und geschlossenes durch offene teilen
self.image.array = (check + 0.000001) / (base + 0.000001)
elif normalize == "prozent":
self.image.array = ( (check + 0.000001) - (base + 0.000001) ) / (base + 0.000001) | [
"def",
"normalize",
"(",
"self",
",",
"normalize",
":",
"str",
"=",
"\"diff\"",
")",
":",
"# image.array als image.arrayOriginal merken",
"self",
".",
"image",
".",
"arrayOriginal",
"=",
"self",
".",
"image",
".",
"array",
".",
"copy",
"(",
")",
"#print(\"### ispCheckClass.normalize\", self.image, self.baseImage, normalize)",
"\"\"\"\n if basefilename:\n\n if self.debug: \n print(\"---------------------------\")\n print(\"OpenImage: %s, min: %1.3f, max: %1.3f, DPMM: %1.3f, DPI: %1.3f, CAX-x: %1.3f CAX-y:%1.3f\" \n % (self.openfilename, np.amin(openImg.array), np.amax(openImg.array), \n openImg.dpmm, openImg.dpi, openImg.cax.x, openImg.cax.y ) )\n self.printMetaInfo( openImg.metadata )\n \n\n if self.debug: \n print(\"---------------------------\")\n print(\"CheckImage: %s, min: %1.3f, max: %1.3f, DPMM: %1.3f, DPI: %1.3f, CAX-x: %1.3f CAX-y:%1.3f\" \n % (testfilename, np.amin(checkImage.array), np.amax(checkImage.array), \n checkImage.dpmm, checkImage.dpi, checkImage.cax.x, checkImage.cax.y ) )\n self.printMetaInfo( checkImage.metadata )\n \n\n \"\"\"",
"base",
"=",
"self",
".",
"baseImage",
".",
"array",
".",
"copy",
"(",
")",
"check",
"=",
"self",
".",
"image",
".",
"array",
".",
"copy",
"(",
")",
"if",
"normalize",
"==",
"\"diff\"",
":",
"# Beide Arrays um 0.000001 erhöhen und geschlossenes durch offene teilen",
"self",
".",
"image",
".",
"array",
"=",
"(",
"check",
"+",
"0.000001",
")",
"/",
"(",
"base",
"+",
"0.000001",
")",
"elif",
"normalize",
"==",
"\"prozent\"",
":",
"self",
".",
"image",
".",
"array",
"=",
"(",
"(",
"check",
"+",
"0.000001",
")",
"-",
"(",
"base",
"+",
"0.000001",
")",
")",
"/",
"(",
"base",
"+",
"0.000001",
")"
] | [
73,
4
] | [
124,
93
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
handle_connect | () | erhoehe die Anzahl der verbundenen Benutzer um 1 | erhoehe die Anzahl der verbundenen Benutzer um 1 | def handle_connect():
"""erhoehe die Anzahl der verbundenen Benutzer um 1"""
global verbundene_benutzer
verbundene_benutzer += 1 | [
"def",
"handle_connect",
"(",
")",
":",
"global",
"verbundene_benutzer",
"verbundene_benutzer",
"+=",
"1"
] | [
86,
0
] | [
89,
28
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
dicomClass.archive_loadSOPInstanceUID | ( self, SOPInstanceUID ) | return ds | Lädt eine Dicomdatei mit SOPInstanceUID aus dem Archiv
Parameters
----------
SOPInstanceUID : str
Eine SOPInstanceUID.
Returns
-------
ds : TYPE
DESCRIPTION.
| Lädt eine Dicomdatei mit SOPInstanceUID aus dem Archiv
Parameters
----------
SOPInstanceUID : str
Eine SOPInstanceUID. | def archive_loadSOPInstanceUID( self, SOPInstanceUID ):
'''Lädt eine Dicomdatei mit SOPInstanceUID aus dem Archiv
Parameters
----------
SOPInstanceUID : str
Eine SOPInstanceUID.
Returns
-------
ds : TYPE
DESCRIPTION.
'''
ds = None
exists, filename = self.archive_hasSOPInstanceUID( SOPInstanceUID )
if exists:
try:
# mit force True einlesen um trotz fehlender headerdaten einzulesen
ds = dcmread(filename, force=True)
except:
# alle sonstigen Fehler abfangen
logger.error("Fehler beim lesen der DICOM Datei")
pass
if ds:
self.dicomData[ ds.SOPInstanceUID ] = ds
#print(ds.PatientID, ds.RadiationMachineName, ds.SOPInstanceUID)
return ds | [
"def",
"archive_loadSOPInstanceUID",
"(",
"self",
",",
"SOPInstanceUID",
")",
":",
"ds",
"=",
"None",
"exists",
",",
"filename",
"=",
"self",
".",
"archive_hasSOPInstanceUID",
"(",
"SOPInstanceUID",
")",
"if",
"exists",
":",
"try",
":",
"# mit force True einlesen um trotz fehlender headerdaten einzulesen",
"ds",
"=",
"dcmread",
"(",
"filename",
",",
"force",
"=",
"True",
")",
"except",
":",
"# alle sonstigen Fehler abfangen",
"logger",
".",
"error",
"(",
"\"Fehler beim lesen der DICOM Datei\"",
")",
"pass",
"if",
"ds",
":",
"self",
".",
"dicomData",
"[",
"ds",
".",
"SOPInstanceUID",
"]",
"=",
"ds",
"#print(ds.PatientID, ds.RadiationMachineName, ds.SOPInstanceUID)",
"return",
"ds"
] | [
823,
4
] | [
855,
17
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
get_job_schedules | () | Gibt alle angelegten Jobs mitsamt ihren Zeitplänen zurück.
| Gibt alle angelegten Jobs mitsamt ihren Zeitplänen zurück. | def get_job_schedules():
""" Gibt alle angelegten Jobs mitsamt ihren Zeitplänen zurück.
"""
with db.open_con() as con:
res = con.execute(
"""
SELECT DISTINCT job_id, job_name, schedule.type as s_type, date, time, group_concat(DISTINCT weekday) AS weekdays,
time_interval, delete_options.type as d_type, days, hours
FROM job
INNER JOIN schedule USING(schedule_id)
LEFT JOIN schedule_weekday USING(schedule_id)
INNER JOIN delete_options USING(delete_options_id)
GROUP BY(job_id)
""").fetchall()
return res | [
"def",
"get_job_schedules",
"(",
")",
":",
"with",
"db",
".",
"open_con",
"(",
")",
"as",
"con",
":",
"res",
"=",
"con",
".",
"execute",
"(",
"\"\"\"\n SELECT DISTINCT job_id, job_name, schedule.type as s_type, date, time, group_concat(DISTINCT weekday) AS weekdays, \n time_interval, delete_options.type as d_type, days, hours\n FROM job \n INNER JOIN schedule USING(schedule_id)\n LEFT JOIN schedule_weekday USING(schedule_id)\n INNER JOIN delete_options USING(delete_options_id)\n GROUP BY(job_id)\n \"\"\"",
")",
".",
"fetchall",
"(",
")",
"return",
"res"
] | [
14,
0
] | [
30,
18
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
checkMlc.doMT_8_02_3 | (self, fileData ) | return self._doLamellenpositioniergenauigkeit(fileData, md) | Lamellenpositioniergenauigkeit
Hysterese bei großem/kleinen Feld
Returns
-------
pdfFilename : str
Name der erzeugten Pdfdatei
result : list
list mit dicts der Testergebnisse
See Also
--------
isp.results : Aufbau von result
| Lamellenpositioniergenauigkeit
Hysterese bei großem/kleinen Feld | def doMT_8_02_3(self, fileData ):
"""Lamellenpositioniergenauigkeit
Hysterese bei großem/kleinen Feld
Returns
-------
pdfFilename : str
Name der erzeugten Pdfdatei
result : list
list mit dicts der Testergebnisse
See Also
--------
isp.results : Aufbau von result
"""
# metadata defaults vorbereiten
md = dict_merge( DotMap( {
"series_sort_values" : ['day'],
"series_groupby": ["day", "SeriesNumber"],
"manual": {
"filename": self.metadata.info["anleitung"],
"attrs": {"class":"layout-fill-width"},
},
"_leafPlot" : { "width" : 45, "height" : 45},
"_boxPlot" : { "width" : 90, "height" : 45},
"plotTitle" : "lfd:{lfd} - {Richtung}",
"table_fields" : [
{'field': 'lfd', 'label':'lfd', 'format':'{0:d}' },
{'field': 'Richtung', 'label':'Richtung', 'format':'{0}' },
{'field': 'Datum', 'label':'Datum', 'format':'{0:%d.%m.%Y %H:%M:%S}' },
{'field': 'fwxm.min', 'label':'FWXM<br>min', 'format':'{0:.3f}' },
{'field': 'fwxm.mean', 'label':'FWXM<br>mean', 'format':'{0:.3f}' },
{'field': 'fwxm.max', 'label':'FWXM<br>max', 'format':'{0:.3f}' },
{'field': 'fwxm.passed', 'label':'FWXM<br>passed' },
{'field': 'shift.min', 'label':'Shift<br>min', 'format':'{0:.3f}' },
{'field': 'shift.mean', 'label':'Shift<br>mean', 'format':'{0:.3f}' },
{'field': 'shift.max', 'label':'Shift<br>max', 'format':'{0:.3f}' },
{'field': 'shift.passed', 'label':'Shift<br>passed' },
],
"options":{
"leafs" : {
"from": 1,
"to" : 60
}
}
}), self.metadata )
# und mit den Angaben aus config (info) ergänzen / überschreiben
return self._doLamellenpositioniergenauigkeit(fileData, md) | [
"def",
"doMT_8_02_3",
"(",
"self",
",",
"fileData",
")",
":",
"# metadata defaults vorbereiten",
"md",
"=",
"dict_merge",
"(",
"DotMap",
"(",
"{",
"\"series_sort_values\"",
":",
"[",
"'day'",
"]",
",",
"\"series_groupby\"",
":",
"[",
"\"day\"",
",",
"\"SeriesNumber\"",
"]",
",",
"\"manual\"",
":",
"{",
"\"filename\"",
":",
"self",
".",
"metadata",
".",
"info",
"[",
"\"anleitung\"",
"]",
",",
"\"attrs\"",
":",
"{",
"\"class\"",
":",
"\"layout-fill-width\"",
"}",
",",
"}",
",",
"\"_leafPlot\"",
":",
"{",
"\"width\"",
":",
"45",
",",
"\"height\"",
":",
"45",
"}",
",",
"\"_boxPlot\"",
":",
"{",
"\"width\"",
":",
"90",
",",
"\"height\"",
":",
"45",
"}",
",",
"\"plotTitle\"",
":",
"\"lfd:{lfd} - {Richtung}\"",
",",
"\"table_fields\"",
":",
"[",
"{",
"'field'",
":",
"'lfd'",
",",
"'label'",
":",
"'lfd'",
",",
"'format'",
":",
"'{0:d}'",
"}",
",",
"{",
"'field'",
":",
"'Richtung'",
",",
"'label'",
":",
"'Richtung'",
",",
"'format'",
":",
"'{0}'",
"}",
",",
"{",
"'field'",
":",
"'Datum'",
",",
"'label'",
":",
"'Datum'",
",",
"'format'",
":",
"'{0:%d.%m.%Y %H:%M:%S}'",
"}",
",",
"{",
"'field'",
":",
"'fwxm.min'",
",",
"'label'",
":",
"'FWXM<br>min'",
",",
"'format'",
":",
"'{0:.3f}'",
"}",
",",
"{",
"'field'",
":",
"'fwxm.mean'",
",",
"'label'",
":",
"'FWXM<br>mean'",
",",
"'format'",
":",
"'{0:.3f}'",
"}",
",",
"{",
"'field'",
":",
"'fwxm.max'",
",",
"'label'",
":",
"'FWXM<br>max'",
",",
"'format'",
":",
"'{0:.3f}'",
"}",
",",
"{",
"'field'",
":",
"'fwxm.passed'",
",",
"'label'",
":",
"'FWXM<br>passed'",
"}",
",",
"{",
"'field'",
":",
"'shift.min'",
",",
"'label'",
":",
"'Shift<br>min'",
",",
"'format'",
":",
"'{0:.3f}'",
"}",
",",
"{",
"'field'",
":",
"'shift.mean'",
",",
"'label'",
":",
"'Shift<br>mean'",
",",
"'format'",
":",
"'{0:.3f}'",
"}",
",",
"{",
"'field'",
":",
"'shift.max'",
",",
"'label'",
":",
"'Shift<br>max'",
",",
"'format'",
":",
"'{0:.3f}'",
"}",
",",
"{",
"'field'",
":",
"'shift.passed'",
",",
"'label'",
":",
"'Shift<br>passed'",
"}",
",",
"]",
",",
"\"options\"",
":",
"{",
"\"leafs\"",
":",
"{",
"\"from\"",
":",
"1",
",",
"\"to\"",
":",
"60",
"}",
"}",
"}",
")",
",",
"self",
".",
"metadata",
")",
"# und mit den Angaben aus config (info) ergänzen / überschreiben",
"return",
"self",
".",
"_doLamellenpositioniergenauigkeit",
"(",
"fileData",
",",
"md",
")"
] | [
2214,
4
] | [
2266,
67
] | null | python | de | ['de', 'de', 'de'] | False | true | null |
qa_mlc._add_leaf_error_subplot | (self, ax: plt.Axes) | Überschreibt die ursprüngliche PicketFenceFunktion
Es werden jetzt beide (tolerance und action_tolerance) Linien gezeichnet
und das Chart hat jetzt bei UP_DOWN eine doppelte breite
| Überschreibt die ursprüngliche PicketFenceFunktion
Es werden jetzt beide (tolerance und action_tolerance) Linien gezeichnet
und das Chart hat jetzt bei UP_DOWN eine doppelte breite
| def _add_leaf_error_subplot(self, ax: plt.Axes):
"""Überschreibt die ursprüngliche PicketFenceFunktion
Es werden jetzt beide (tolerance und action_tolerance) Linien gezeichnet
und das Chart hat jetzt bei UP_DOWN eine doppelte breite
"""
"""Add a bar subplot showing the leaf error."""
tol_line_height = [self.settings.tolerance, self.settings.tolerance]
tol_line_width = [0, max(self.image.shape)]
atol_line_height = [self.settings.action_tolerance, self.settings.action_tolerance]
# make the new axis
divider = make_axes_locatable(ax)
if self.settings.orientation == UP_DOWN:
axtop = divider.append_axes('right', size=8, pad=1, sharey=ax)
else:
axtop = divider.append_axes('bottom', size=2, pad=1, sharex=ax)
# get leaf positions, errors, standard deviation, and leaf numbers
# error_plot_positions, error_means, error_stds, mlc_leaves
pos, mean, stds, leaf_nums = self.pickets.error_hist()
#print( "leaf_nums", pos, vals, err, leaf_nums)
leafs = []
for l in leaf_nums:
# image muss up/down getauscht werden, deshalb auch die MLC Nummern ändern
leafs.append( l[1]-60 )
#ax2 = axtop.twiny() # instantiate a second axes that shares the same x-axis
#print(leaf_nums)
# plot the leaf errors as a bar plot
if self.settings.orientation == UP_DOWN:
# ohne xerr
axtop.barh(pos, mean, height=self.pickets[0].sample_width * 2, alpha=0.4, align='center', tick_label=leafs)
#axtop.barh(pos, mean, xerr=stds, height=self.pickets[0].sample_width * 2, alpha=0.4, align='center')
# plot the tolerance line(s)
# TODO: replace .plot() calls with .axhline when mpld3 fixes funtionality
axtop.plot(tol_line_height, tol_line_width, 'r-', linewidth=3)
if self.settings.action_tolerance is not None:
axtop.plot(atol_line_height, tol_line_width, 'y-', linewidth=3)
# reset xlims to comfortably include the max error or tolerance value
axtop.set_xlim([0, max(max(mean), self.settings.tolerance) + 0.1])
#axtop.tick_params( 'y', colors='r' )
else:
# ohne yerr
axtop.barh(pos, mean, height=self.pickets[0].sample_width * 2, alpha=0.4, align='center', tick_label=leafs)
#axtop.bar(pos, mean, yerr=stds, width=self.pickets[0].sample_width * 2, alpha=0.4, align='center')
axtop.plot(tol_line_width, tol_line_height,
'r-', linewidth=3)
if self.settings.action_tolerance is not None:
axtop.plot(tol_line_width, tol_line_height, 'y-', linewidth=3)
axtop.set_ylim([0, max(max(mean), self.settings.tolerance) + 0.1])
# add formatting to axis
#axtop.grid(True)
axtop.set_title("Average Error (mm)") | [
"def",
"_add_leaf_error_subplot",
"(",
"self",
",",
"ax",
":",
"plt",
".",
"Axes",
")",
":",
"\"\"\"Add a bar subplot showing the leaf error.\"\"\"",
"tol_line_height",
"=",
"[",
"self",
".",
"settings",
".",
"tolerance",
",",
"self",
".",
"settings",
".",
"tolerance",
"]",
"tol_line_width",
"=",
"[",
"0",
",",
"max",
"(",
"self",
".",
"image",
".",
"shape",
")",
"]",
"atol_line_height",
"=",
"[",
"self",
".",
"settings",
".",
"action_tolerance",
",",
"self",
".",
"settings",
".",
"action_tolerance",
"]",
"# make the new axis",
"divider",
"=",
"make_axes_locatable",
"(",
"ax",
")",
"if",
"self",
".",
"settings",
".",
"orientation",
"==",
"UP_DOWN",
":",
"axtop",
"=",
"divider",
".",
"append_axes",
"(",
"'right'",
",",
"size",
"=",
"8",
",",
"pad",
"=",
"1",
",",
"sharey",
"=",
"ax",
")",
"else",
":",
"axtop",
"=",
"divider",
".",
"append_axes",
"(",
"'bottom'",
",",
"size",
"=",
"2",
",",
"pad",
"=",
"1",
",",
"sharex",
"=",
"ax",
")",
"# get leaf positions, errors, standard deviation, and leaf numbers",
"# error_plot_positions, error_means, error_stds, mlc_leaves",
"pos",
",",
"mean",
",",
"stds",
",",
"leaf_nums",
"=",
"self",
".",
"pickets",
".",
"error_hist",
"(",
")",
"#print( \"leaf_nums\", pos, vals, err, leaf_nums)",
"leafs",
"=",
"[",
"]",
"for",
"l",
"in",
"leaf_nums",
":",
"# image muss up/down getauscht werden, deshalb auch die MLC Nummern ändern",
"leafs",
".",
"append",
"(",
"l",
"[",
"1",
"]",
"-",
"60",
")",
"#ax2 = axtop.twiny() # instantiate a second axes that shares the same x-axis",
"#print(leaf_nums)",
"# plot the leaf errors as a bar plot",
"if",
"self",
".",
"settings",
".",
"orientation",
"==",
"UP_DOWN",
":",
"# ohne xerr",
"axtop",
".",
"barh",
"(",
"pos",
",",
"mean",
",",
"height",
"=",
"self",
".",
"pickets",
"[",
"0",
"]",
".",
"sample_width",
"*",
"2",
",",
"alpha",
"=",
"0.4",
",",
"align",
"=",
"'center'",
",",
"tick_label",
"=",
"leafs",
")",
"#axtop.barh(pos, mean, xerr=stds, height=self.pickets[0].sample_width * 2, alpha=0.4, align='center')",
"# plot the tolerance line(s)",
"# TODO: replace .plot() calls with .axhline when mpld3 fixes funtionality",
"axtop",
".",
"plot",
"(",
"tol_line_height",
",",
"tol_line_width",
",",
"'r-'",
",",
"linewidth",
"=",
"3",
")",
"if",
"self",
".",
"settings",
".",
"action_tolerance",
"is",
"not",
"None",
":",
"axtop",
".",
"plot",
"(",
"atol_line_height",
",",
"tol_line_width",
",",
"'y-'",
",",
"linewidth",
"=",
"3",
")",
"# reset xlims to comfortably include the max error or tolerance value",
"axtop",
".",
"set_xlim",
"(",
"[",
"0",
",",
"max",
"(",
"max",
"(",
"mean",
")",
",",
"self",
".",
"settings",
".",
"tolerance",
")",
"+",
"0.1",
"]",
")",
"#axtop.tick_params( 'y', colors='r' )",
"else",
":",
"# ohne yerr",
"axtop",
".",
"barh",
"(",
"pos",
",",
"mean",
",",
"height",
"=",
"self",
".",
"pickets",
"[",
"0",
"]",
".",
"sample_width",
"*",
"2",
",",
"alpha",
"=",
"0.4",
",",
"align",
"=",
"'center'",
",",
"tick_label",
"=",
"leafs",
")",
"#axtop.bar(pos, mean, yerr=stds, width=self.pickets[0].sample_width * 2, alpha=0.4, align='center')",
"axtop",
".",
"plot",
"(",
"tol_line_width",
",",
"tol_line_height",
",",
"'r-'",
",",
"linewidth",
"=",
"3",
")",
"if",
"self",
".",
"settings",
".",
"action_tolerance",
"is",
"not",
"None",
":",
"axtop",
".",
"plot",
"(",
"tol_line_width",
",",
"tol_line_height",
",",
"'y-'",
",",
"linewidth",
"=",
"3",
")",
"axtop",
".",
"set_ylim",
"(",
"[",
"0",
",",
"max",
"(",
"max",
"(",
"mean",
")",
",",
"self",
".",
"settings",
".",
"tolerance",
")",
"+",
"0.1",
"]",
")",
"# add formatting to axis",
"#axtop.grid(True)",
"axtop",
".",
"set_title",
"(",
"\"Average Error (mm)\"",
")"
] | [
575,
4
] | [
636,
45
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
Zeitrechnung.name | (self) | return self.__name | Der Name der Zeitrechnung
:return str | Der Name der Zeitrechnung | def name(self) -> str:
"""Der Name der Zeitrechnung
:return str"""
return self.__name | [
"def",
"name",
"(",
"self",
")",
"->",
"str",
":",
"return",
"self",
".",
"__name"
] | [
54,
4
] | [
58,
26
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
DicomImage.doRescaleSlope | ( self ) | RescaleSlope anwenden wenn es ein RT Image Storage ist
Wird nur durchgeführt wenn self.isRescaled false ist und setzt isRescaled
| RescaleSlope anwenden wenn es ein RT Image Storage ist
Wird nur durchgeführt wenn self.isRescaled false ist und setzt isRescaled
| def doRescaleSlope( self ):
""" RescaleSlope anwenden wenn es ein RT Image Storage ist
Wird nur durchgeführt wenn self.isRescaled false ist und setzt isRescaled
"""
if not self.isRescaled and self.infos["SOPClassUID"] == 'RT Image Storage':
#print("doRescaleSlope", self.base_path, self.metadata.RescaleSlope )
self.array = self.array * self.metadata.RescaleSlope
self.isRescaled = True | [
"def",
"doRescaleSlope",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"isRescaled",
"and",
"self",
".",
"infos",
"[",
"\"SOPClassUID\"",
"]",
"==",
"'RT Image Storage'",
":",
"#print(\"doRescaleSlope\", self.base_path, self.metadata.RescaleSlope )",
"self",
".",
"array",
"=",
"self",
".",
"array",
"*",
"self",
".",
"metadata",
".",
"RescaleSlope",
"self",
".",
"isRescaled",
"=",
"True"
] | [
353,
4
] | [
361,
34
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
PdfGenerator.textFile | ( self, filename:str=None, area:dict={}, attrs:dict={}, render=None, replaceNewLine=False ) | r"""Lädt aus self._data["resources"] eine Datei und zeigt sie wie bei add_text an.
Bei der Dateiendung .txt wird eine Ersetztung von ``\n`` zu ``<br>`` vorgenommen
Parameters
----------
filename : str
Der zu ladende Dateiname
area : Area {left,top,with,height}
die Größe der Ausgabe
attrs : dict
zu ändernde id class oder Style Angaben
render : bool
sofort rendern oder nur zurückgeben ohne Angabe wird self.autoRender verwendet
replaceNewLine : bool
nur doppelte ``\n\n`` durch ``<br>`` ersetzten oder alle newLine ``\n`` ersetzen
Returns
-------
element_html: str
HTML des erzeugten Elements
| r"""Lädt aus self._data["resources"] eine Datei und zeigt sie wie bei add_text an. | def textFile( self, filename:str=None, area:dict={}, attrs:dict={}, render=None, replaceNewLine=False ):
r"""Lädt aus self._data["resources"] eine Datei und zeigt sie wie bei add_text an.
Bei der Dateiendung .txt wird eine Ersetztung von ``\n`` zu ``<br>`` vorgenommen
Parameters
----------
filename : str
Der zu ladende Dateiname
area : Area {left,top,with,height}
die Größe der Ausgabe
attrs : dict
zu ändernde id class oder Style Angaben
render : bool
sofort rendern oder nur zurückgeben ohne Angabe wird self.autoRender verwendet
replaceNewLine : bool
nur doppelte ``\n\n`` durch ``<br>`` ersetzten oder alle newLine ``\n`` ersetzen
Returns
-------
element_html: str
HTML des erzeugten Elements
"""
if not filename:
return
text = None
filepath = osp.join( self._variables["resources"], filename )
if osp.exists( filepath ):
with open( filepath, 'r', encoding="utf-8") as myfile:
text = myfile.read()
if text:
root, ext = osp.splitext( filename )
if ext.lower() == ".txt":
replaceNewLine = True
elif ext.lower() == ".md":
return self.markdown( text, area, attrs, render )
return self._text( text, area, attrs, render, replaceNewLine ) | [
"def",
"textFile",
"(",
"self",
",",
"filename",
":",
"str",
"=",
"None",
",",
"area",
":",
"dict",
"=",
"{",
"}",
",",
"attrs",
":",
"dict",
"=",
"{",
"}",
",",
"render",
"=",
"None",
",",
"replaceNewLine",
"=",
"False",
")",
":",
"if",
"not",
"filename",
":",
"return",
"text",
"=",
"None",
"filepath",
"=",
"osp",
".",
"join",
"(",
"self",
".",
"_variables",
"[",
"\"resources\"",
"]",
",",
"filename",
")",
"if",
"osp",
".",
"exists",
"(",
"filepath",
")",
":",
"with",
"open",
"(",
"filepath",
",",
"'r'",
",",
"encoding",
"=",
"\"utf-8\"",
")",
"as",
"myfile",
":",
"text",
"=",
"myfile",
".",
"read",
"(",
")",
"if",
"text",
":",
"root",
",",
"ext",
"=",
"osp",
".",
"splitext",
"(",
"filename",
")",
"if",
"ext",
".",
"lower",
"(",
")",
"==",
"\".txt\"",
":",
"replaceNewLine",
"=",
"True",
"elif",
"ext",
".",
"lower",
"(",
")",
"==",
"\".md\"",
":",
"return",
"self",
".",
"markdown",
"(",
"text",
",",
"area",
",",
"attrs",
",",
"render",
")",
"return",
"self",
".",
"_text",
"(",
"text",
",",
"area",
",",
"attrs",
",",
"render",
",",
"replaceNewLine",
")"
] | [
983,
4
] | [
1023,
74
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
DatumsNotation.convert | (self, *args, **kwargs) | Konvertiert eine Angabe eines Datums in einer gewissen Notation in ein Datum | Konvertiert eine Angabe eines Datums in einer gewissen Notation in ein Datum | def convert(self, *args, **kwargs) -> "Datum":
"""Konvertiert eine Angabe eines Datums in einer gewissen Notation in ein Datum"""
pass | [
"def",
"convert",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"->",
"\"Datum\"",
":",
"pass"
] | [
8,
4
] | [
10,
12
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
checkField.doJT_7_2 | (self, fileData) | return self._doField_one2n(fileData, md=md ) | Jahrestest: 7.2. ()
Abhängigkeit der Kalibrierfaktoren von der Monitorrate
| Jahrestest: 7.2. ()
Abhängigkeit der Kalibrierfaktoren von der Monitorrate
| def doJT_7_2(self, fileData):
"""Jahrestest: 7.2. ()
Abhängigkeit der Kalibrierfaktoren von der Monitorrate
"""
# place for testing parameters
md = dict_merge( DotMap( {
} ), self.metadata )
return self._doField_one2n(fileData, md=md ) | [
"def",
"doJT_7_2",
"(",
"self",
",",
"fileData",
")",
":",
"# place for testing parameters",
"md",
"=",
"dict_merge",
"(",
"DotMap",
"(",
"{",
"}",
")",
",",
"self",
".",
"metadata",
")",
"return",
"self",
".",
"_doField_one2n",
"(",
"fileData",
",",
"md",
"=",
"md",
")"
] | [
629,
4
] | [
639,
52
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
checkMlc.doMT_VMAT_0_2 | ( self, fileData ) | return self._doMLC_VMAT( fileData, overrideMD=md, withOffsets=True ) | PicketFence statisch eines 80x100 großen Feldes
Auswertung wie in VMAT 1.1 und 1.2 Ergebnisse in einer Tabelle
Toleranz Angaben in der Config mit value, da value Werte für analyze benötigt werden
| PicketFence statisch eines 80x100 großen Feldes
Auswertung wie in VMAT 1.1 und 1.2 Ergebnisse in einer Tabelle
Toleranz Angaben in der Config mit value, da value Werte für analyze benötigt werden | def doMT_VMAT_0_2( self, fileData ):
"""PicketFence statisch eines 80x100 großen Feldes
Auswertung wie in VMAT 1.1 und 1.2 Ergebnisse in einer Tabelle
Toleranz Angaben in der Config mit value, da value Werte für analyze benötigt werden
"""
md = {
"_imgSize" : {"width" : 45, "height" : 55},
"_imgField": {"border": 10 },
"_chartSize" : { "width" : 90, "height" : 55}
}
return self._doMLC_VMAT( fileData, overrideMD=md, withOffsets=True ) | [
"def",
"doMT_VMAT_0_2",
"(",
"self",
",",
"fileData",
")",
":",
"md",
"=",
"{",
"\"_imgSize\"",
":",
"{",
"\"width\"",
":",
"45",
",",
"\"height\"",
":",
"55",
"}",
",",
"\"_imgField\"",
":",
"{",
"\"border\"",
":",
"10",
"}",
",",
"\"_chartSize\"",
":",
"{",
"\"width\"",
":",
"90",
",",
"\"height\"",
":",
"55",
"}",
"}",
"return",
"self",
".",
"_doMLC_VMAT",
"(",
"fileData",
",",
"overrideMD",
"=",
"md",
",",
"withOffsets",
"=",
"True",
")"
] | [
2512,
4
] | [
2523,
76
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
Argument.text_sentiment | (self) | return tools.sentiment_clean(self.text_raw) | Text für die Sentiment Analyse | Text für die Sentiment Analyse | def text_sentiment(self):
"""Text für die Sentiment Analyse"""
return tools.sentiment_clean(self.text_raw) | [
"def",
"text_sentiment",
"(",
"self",
")",
":",
"return",
"tools",
".",
"sentiment_clean",
"(",
"self",
".",
"text_raw",
")"
] | [
72,
4
] | [
74,
51
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
HeistSystem.game_EndHeist | (self) | return | Beende das aktuelle Spiel | Beende das aktuelle Spiel | def game_EndHeist(self):
''' Beende das aktuelle Spiel '''
thisActionName = "game_EndHeist"
winnerData = []
# Daten aller Spieler ermitteln
allPlayerData = self.DB_get_PlayersData()
# Anzahl der Spieler
numberOfPlayers = len(allPlayerData)
# Mindest Spieleranzahl wurde nicht erreicht - Beenden
if numberOfPlayers < self.Settings.Game_Min_Participant:
# Nachricht in den Chat schreiben
self.WriteMessage_NotEnoughPlayer(allPlayerData=allPlayerData)
# Nachricht ins Log schreiben
self.Logger.WriteLog(
" {0} - Unzureichende Anzahl Spieler".format(thisActionName))
# Genügend Spieler vorhanden
else:
# Intro Nachricht in den Chat schreiben
self.Writemessage_GameStartIntroMessage()
# Gewinner ermitteln
for singlePlayerData in allPlayerData:
# Zufällig über Tod oder Leben entscheiden
result = int(self.Parent.GetRandom(0, 2))
# Spieler hat überlebt
if result == 1:
# Spieler in Gewinnerliste übernehmen
winnerData.append(singlePlayerData)
# Anzahl der Gewinner
numberOfWinner = len(winnerData)
# Prozentuale Anzahl der Gewinner berechnen und abrunden
probability = int(float(float(numberOfWinner) /
float(numberOfPlayers) * 100))
# Ausgabe des Textes für den Spielausgang
self.WriteMessage_GameResult(percentage=probability)
# Gewinner auszahlen
resultData = self.game_PayReward(winnerdata=winnerData)
# Nur in den Chat schreiben, wenn ein Teilnehmer überlebt hat
if not probability == 0:
# Auszahlungsdetails in den Chat schreiben
self.WriteMessage_GamePayout(payoutdata=resultData)
# Nachricht ins Log schreiben
self.Logger.WriteLog(
" {0} - Spiel beendet, {1} Prozent Gewinner".format(
thisActionName,
probability
))
# Zeitstempel zurücksetzen
self.GameStartTimestamp = ""
# Ziel zurücksetzen
self.GameTargetName = ""
# Zeitstempel für Cooldown-Benachrichtigung setzen
self.LastGameFinishTimestamp = time.time()
# Setze Cooldown-Zeit bevor das Spiel neu gestartet werden kann
self.CD.AddCooldown(scriptname=self.ScriptName,
command=self.ClassName,
cooldownTime=int(self.Settings.Game_Cooldown_Time)
)
return | [
"def",
"game_EndHeist",
"(",
"self",
")",
":",
"thisActionName",
"=",
"\"game_EndHeist\"",
"winnerData",
"=",
"[",
"]",
"# Daten aller Spieler ermitteln\r",
"allPlayerData",
"=",
"self",
".",
"DB_get_PlayersData",
"(",
")",
"# Anzahl der Spieler\r",
"numberOfPlayers",
"=",
"len",
"(",
"allPlayerData",
")",
"# Mindest Spieleranzahl wurde nicht erreicht - Beenden\r",
"if",
"numberOfPlayers",
"<",
"self",
".",
"Settings",
".",
"Game_Min_Participant",
":",
"# Nachricht in den Chat schreiben\r",
"self",
".",
"WriteMessage_NotEnoughPlayer",
"(",
"allPlayerData",
"=",
"allPlayerData",
")",
"# Nachricht ins Log schreiben\r",
"self",
".",
"Logger",
".",
"WriteLog",
"(",
"\" {0} - Unzureichende Anzahl Spieler\"",
".",
"format",
"(",
"thisActionName",
")",
")",
"# Genügend Spieler vorhanden\r",
"else",
":",
"# Intro Nachricht in den Chat schreiben\r",
"self",
".",
"Writemessage_GameStartIntroMessage",
"(",
")",
"# Gewinner ermitteln\r",
"for",
"singlePlayerData",
"in",
"allPlayerData",
":",
"# Zufällig über Tod oder Leben entscheiden\r",
"result",
"=",
"int",
"(",
"self",
".",
"Parent",
".",
"GetRandom",
"(",
"0",
",",
"2",
")",
")",
"# Spieler hat überlebt\r",
"if",
"result",
"==",
"1",
":",
"# Spieler in Gewinnerliste übernehmen\r",
"winnerData",
".",
"append",
"(",
"singlePlayerData",
")",
"# Anzahl der Gewinner\r",
"numberOfWinner",
"=",
"len",
"(",
"winnerData",
")",
"# Prozentuale Anzahl der Gewinner berechnen und abrunden\r",
"probability",
"=",
"int",
"(",
"float",
"(",
"float",
"(",
"numberOfWinner",
")",
"/",
"float",
"(",
"numberOfPlayers",
")",
"*",
"100",
")",
")",
"# Ausgabe des Textes für den Spielausgang\r",
"self",
".",
"WriteMessage_GameResult",
"(",
"percentage",
"=",
"probability",
")",
"# Gewinner auszahlen\r",
"resultData",
"=",
"self",
".",
"game_PayReward",
"(",
"winnerdata",
"=",
"winnerData",
")",
"# Nur in den Chat schreiben, wenn ein Teilnehmer überlebt hat\r",
"if",
"not",
"probability",
"==",
"0",
":",
"# Auszahlungsdetails in den Chat schreiben\r",
"self",
".",
"WriteMessage_GamePayout",
"(",
"payoutdata",
"=",
"resultData",
")",
"# Nachricht ins Log schreiben\r",
"self",
".",
"Logger",
".",
"WriteLog",
"(",
"\" {0} - Spiel beendet, {1} Prozent Gewinner\"",
".",
"format",
"(",
"thisActionName",
",",
"probability",
")",
")",
"# Zeitstempel zurücksetzen\r",
"self",
".",
"GameStartTimestamp",
"=",
"\"\"",
"# Ziel zurücksetzen\r",
"self",
".",
"GameTargetName",
"=",
"\"\"",
"# Zeitstempel für Cooldown-Benachrichtigung setzen\r",
"self",
".",
"LastGameFinishTimestamp",
"=",
"time",
".",
"time",
"(",
")",
"# Setze Cooldown-Zeit bevor das Spiel neu gestartet werden kann\r",
"self",
".",
"CD",
".",
"AddCooldown",
"(",
"scriptname",
"=",
"self",
".",
"ScriptName",
",",
"command",
"=",
"self",
".",
"ClassName",
",",
"cooldownTime",
"=",
"int",
"(",
"self",
".",
"Settings",
".",
"Game_Cooldown_Time",
")",
")",
"return"
] | [
1607,
4
] | [
1684,
14
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
DataTable.filter | (self, e: ColExpr) | return self | Ich bin eine gute Beschreibung | Ich bin eine gute Beschreibung | def filter(self, e: ColExpr) -> Self:
'''Ich bin eine gute Beschreibung'''
# Collect indices for which e is True
idx = e._collect(self)
if isinstance(idx, DataColumn): idx = idx._data
# Return rows where idx == True
for k,v in self._data.items():
self._data[k] = v[idx]
return self | [
"def",
"filter",
"(",
"self",
",",
"e",
":",
"ColExpr",
")",
"->",
"Self",
":",
"# Collect indices for which e is True",
"idx",
"=",
"e",
".",
"_collect",
"(",
"self",
")",
"if",
"isinstance",
"(",
"idx",
",",
"DataColumn",
")",
":",
"idx",
"=",
"idx",
".",
"_data",
"# Return rows where idx == True",
"for",
"k",
",",
"v",
"in",
"self",
".",
"_data",
".",
"items",
"(",
")",
":",
"self",
".",
"_data",
"[",
"k",
"]",
"=",
"v",
"[",
"idx",
"]",
"return",
"self"
] | [
109,
4
] | [
117,
19
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
calculate_subtract | (values: dict, data: StepData) | Die jeweiligen Werte, die in subtract stehen, werden von den Werten, die in key stehen, subtrahiert.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
| Die jeweiligen Werte, die in subtract stehen, werden von den Werten, die in key stehen, subtrahiert. | def calculate_subtract(values: dict, data: StepData):
"""Die jeweiligen Werte, die in subtract stehen, werden von den Werten, die in key stehen, subtrahiert.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
"""
_bi_calculate(values, data, operator.sub) | [
"def",
"calculate_subtract",
"(",
"values",
":",
"dict",
",",
"data",
":",
"StepData",
")",
":",
"_bi_calculate",
"(",
"values",
",",
"data",
",",
"operator",
".",
"sub",
")"
] | [
239,
0
] | [
245,
45
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
Geometry.detect_airgaps | (self, center, startangle, endangle, atol) | return airgaps | Die Funktion sucht Luftspalt-Kandidaten und liefert eine Liste
von Möglichkeiten mit jeweils einem minimalen und einem maximalen
Radius als Begrenzung des Luftspalts.
| Die Funktion sucht Luftspalt-Kandidaten und liefert eine Liste
von Möglichkeiten mit jeweils einem minimalen und einem maximalen
Radius als Begrenzung des Luftspalts.
| def detect_airgaps(self, center, startangle, endangle, atol):
""" Die Funktion sucht Luftspalt-Kandidaten und liefert eine Liste
von Möglichkeiten mit jeweils einem minimalen und einem maximalen
Radius als Begrenzung des Luftspalts.
"""
gaplist = []
for e in self.elements(Shape):
if not self.is_border_line(center, startangle, endangle, e, atol):
gaplist += [e.minmax_from_center(center)]
gaplist.sort()
self.min_radius = gaplist[0][0]
self.max_radius = gaplist[-1][1]
airgaps = []
min_radius = self.min_radius + 1.0
cur_radius = gaplist[0][1]
max_radius = self.max_radius - 1.0
for g in gaplist:
if greater(g[0], cur_radius) and \
greater(cur_radius, min_radius) and \
less(g[0], max_radius):
airgaps.append((cur_radius, g[0]))
cur_radius = max(cur_radius, g[1])
return airgaps | [
"def",
"detect_airgaps",
"(",
"self",
",",
"center",
",",
"startangle",
",",
"endangle",
",",
"atol",
")",
":",
"gaplist",
"=",
"[",
"]",
"for",
"e",
"in",
"self",
".",
"elements",
"(",
"Shape",
")",
":",
"if",
"not",
"self",
".",
"is_border_line",
"(",
"center",
",",
"startangle",
",",
"endangle",
",",
"e",
",",
"atol",
")",
":",
"gaplist",
"+=",
"[",
"e",
".",
"minmax_from_center",
"(",
"center",
")",
"]",
"gaplist",
".",
"sort",
"(",
")",
"self",
".",
"min_radius",
"=",
"gaplist",
"[",
"0",
"]",
"[",
"0",
"]",
"self",
".",
"max_radius",
"=",
"gaplist",
"[",
"-",
"1",
"]",
"[",
"1",
"]",
"airgaps",
"=",
"[",
"]",
"min_radius",
"=",
"self",
".",
"min_radius",
"+",
"1.0",
"cur_radius",
"=",
"gaplist",
"[",
"0",
"]",
"[",
"1",
"]",
"max_radius",
"=",
"self",
".",
"max_radius",
"-",
"1.0",
"for",
"g",
"in",
"gaplist",
":",
"if",
"greater",
"(",
"g",
"[",
"0",
"]",
",",
"cur_radius",
")",
"and",
"greater",
"(",
"cur_radius",
",",
"min_radius",
")",
"and",
"less",
"(",
"g",
"[",
"0",
"]",
",",
"max_radius",
")",
":",
"airgaps",
".",
"append",
"(",
"(",
"cur_radius",
",",
"g",
"[",
"0",
"]",
")",
")",
"cur_radius",
"=",
"max",
"(",
"cur_radius",
",",
"g",
"[",
"1",
"]",
")",
"return",
"airgaps"
] | [
2506,
4
] | [
2535,
22
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
WebAppTest.test_other_Tagging | (self) | Gibt eine Liste alle Testbeschreibungen (config) mit Anleitungen
| Gibt eine Liste alle Testbeschreibungen (config) mit Anleitungen
| def test_other_Tagging(self):
''' Gibt eine Liste alle Testbeschreibungen (config) mit Anleitungen
'''
url = '/api/gqa/tagging'
# als json
params = {}
response = self.app.get( url, query_string = params )
self.assertEqual(response.status_code, 200, "Api Rückgabe fehlerhaft")
# als html
params = {
"art" : "sum",
"format": "html"
}
response = self.app.get( url, query_string = params )
self.assertEqual(response.status_code, 200, "Api Rückgabe fehlerhaft")
# als json
params = {
"art" : "test",
"format": "html"
}
response = self.app.get( url, query_string = params )
self.assertEqual(response.status_code, 200, "Api Rückgabe fehlerhaft")
# als json
params = {
"art" : "tags",
"format": "html"
}
response = self.app.get( url, query_string = params )
self.assertEqual(response.status_code, 200, "Api Rückgabe fehlerhaft") | [
"def",
"test_other_Tagging",
"(",
"self",
")",
":",
"url",
"=",
"'/api/gqa/tagging'",
"# als json",
"params",
"=",
"{",
"}",
"response",
"=",
"self",
".",
"app",
".",
"get",
"(",
"url",
",",
"query_string",
"=",
"params",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"200",
",",
"\"Api Rückgabe fehlerhaft\")",
" ",
"# als html",
"params",
"=",
"{",
"\"art\"",
":",
"\"sum\"",
",",
"\"format\"",
":",
"\"html\"",
"}",
"response",
"=",
"self",
".",
"app",
".",
"get",
"(",
"url",
",",
"query_string",
"=",
"params",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"200",
",",
"\"Api Rückgabe fehlerhaft\")",
" ",
"# als json",
"params",
"=",
"{",
"\"art\"",
":",
"\"test\"",
",",
"\"format\"",
":",
"\"html\"",
"}",
"response",
"=",
"self",
".",
"app",
".",
"get",
"(",
"url",
",",
"query_string",
"=",
"params",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"200",
",",
"\"Api Rückgabe fehlerhaft\")",
" ",
"# als json",
"params",
"=",
"{",
"\"art\"",
":",
"\"tags\"",
",",
"\"format\"",
":",
"\"html\"",
"}",
"response",
"=",
"self",
".",
"app",
".",
"get",
"(",
"url",
",",
"query_string",
"=",
"params",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"200",
",",
"\"Api Rückgabe fehlerhaft\")",
" "
] | [
395,
4
] | [
428,
79
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
get_resource_path | (path: str) | return path_from_root(os.path.join(RESOURCES_LOCATION, path)) | Erstellt einen absoluten Pfad zu der übergebenen Ressource.
Erstellt den Pfad aus `RESOURCES_LOCATION` und dem übergebenen Pfad.
:param path: Pfad zur Ressource, relativ zum `resources`-Ordner.
:return: Absoluter Pfad zur übergebenen Ressource.
| Erstellt einen absoluten Pfad zu der übergebenen Ressource. | def get_resource_path(path: str):
"""Erstellt einen absoluten Pfad zu der übergebenen Ressource.
Erstellt den Pfad aus `RESOURCES_LOCATION` und dem übergebenen Pfad.
:param path: Pfad zur Ressource, relativ zum `resources`-Ordner.
:return: Absoluter Pfad zur übergebenen Ressource.
"""
return path_from_root(os.path.join(RESOURCES_LOCATION, path)) | [
"def",
"get_resource_path",
"(",
"path",
":",
"str",
")",
":",
"return",
"path_from_root",
"(",
"os",
".",
"path",
".",
"join",
"(",
"RESOURCES_LOCATION",
",",
"path",
")",
")"
] | [
76,
0
] | [
84,
65
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
GUI.aktualisieren_zeit | (self) | Sekündliches aktualisieren von Datum und Zeit | Sekündliches aktualisieren von Datum und Zeit | def aktualisieren_zeit(self):
""" Sekündliches aktualisieren von Datum und Zeit"""
zeit = time.strftime("%H:%M:%S")
self.zeit.set(zeit)
datum = time.strftime("%d.%m.%Y")
self.datum.set(datum) | [
"def",
"aktualisieren_zeit",
"(",
"self",
")",
":",
"zeit",
"=",
"time",
".",
"strftime",
"(",
"\"%H:%M:%S\"",
")",
"self",
".",
"zeit",
".",
"set",
"(",
"zeit",
")",
"datum",
"=",
"time",
".",
"strftime",
"(",
"\"%d.%m.%Y\"",
")",
"self",
".",
"datum",
".",
"set",
"(",
"datum",
")"
] | [
160,
4
] | [
165,
29
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
calculate_divide | (values: dict, data: StepData) | Dividiert gegebene Werte durch Werte, die in divide_by stehen und rundet auf die gewünschte Nachkommastelle,
die unter decimal angegeben wird.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
| Dividiert gegebene Werte durch Werte, die in divide_by stehen und rundet auf die gewünschte Nachkommastelle,
die unter decimal angegeben wird. | def calculate_divide(values: dict, data: StepData):
"""Dividiert gegebene Werte durch Werte, die in divide_by stehen und rundet auf die gewünschte Nachkommastelle,
die unter decimal angegeben wird.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
"""
_bi_calculate(values, data, operator.truediv) | [
"def",
"calculate_divide",
"(",
"values",
":",
"dict",
",",
"data",
":",
"StepData",
")",
":",
"_bi_calculate",
"(",
"values",
",",
"data",
",",
"operator",
".",
"truediv",
")"
] | [
228,
0
] | [
235,
49
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
date_weekday | (values: dict, data: StepData) | Wandelt das angegebene Datum in den jeweiligen Wochentag um.
Wandelt das angegebene Datum, im unter `"given_format"` angegebenen Format, in den jeweiligen Wochentag um.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
| Wandelt das angegebene Datum in den jeweiligen Wochentag um. | def date_weekday(values: dict, data: StepData):
"""Wandelt das angegebene Datum in den jeweiligen Wochentag um.
Wandelt das angegebene Datum, im unter `"given_format"` angegebenen Format, in den jeweiligen Wochentag um.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
"""
day_weekday = {
0: "Montag",
1: "Dienstag",
2: "Mittwoch",
3: "Donnerstag",
4: "Freitag",
5: "Samstag",
6: "Sonntag"
}
for idx, key in data.loop_key(values["keys"], values):
value = data.get_data(values["keys"][idx], values)
given_format = data.format(values["given_format"], values)
date = datetime.strptime(value, given_format).date()
new_key = get_new_keys(values, idx)
new_value = day_weekday[date.weekday()]
data.insert_data(new_key, new_value, values) | [
"def",
"date_weekday",
"(",
"values",
":",
"dict",
",",
"data",
":",
"StepData",
")",
":",
"day_weekday",
"=",
"{",
"0",
":",
"\"Montag\"",
",",
"1",
":",
"\"Dienstag\"",
",",
"2",
":",
"\"Mittwoch\"",
",",
"3",
":",
"\"Donnerstag\"",
",",
"4",
":",
"\"Freitag\"",
",",
"5",
":",
"\"Samstag\"",
",",
"6",
":",
"\"Sonntag\"",
"}",
"for",
"idx",
",",
"key",
"in",
"data",
".",
"loop_key",
"(",
"values",
"[",
"\"keys\"",
"]",
",",
"values",
")",
":",
"value",
"=",
"data",
".",
"get_data",
"(",
"values",
"[",
"\"keys\"",
"]",
"[",
"idx",
"]",
",",
"values",
")",
"given_format",
"=",
"data",
".",
"format",
"(",
"values",
"[",
"\"given_format\"",
"]",
",",
"values",
")",
"date",
"=",
"datetime",
".",
"strptime",
"(",
"value",
",",
"given_format",
")",
".",
"date",
"(",
")",
"new_key",
"=",
"get_new_keys",
"(",
"values",
",",
"idx",
")",
"new_value",
"=",
"day_weekday",
"[",
"date",
".",
"weekday",
"(",
")",
"]",
"data",
".",
"insert_data",
"(",
"new_key",
",",
"new_value",
",",
"values",
")"
] | [
324,
0
] | [
348,
52
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
new_memory_resource_path | (job_name: str, name: str) | return get_memory_path(f"{datetime.now().strftime('%Y-%m-%d-%H-%M-%S')}.json", name, job_name) | Erstellt einen absoluten Pfad für eine neue Memory-Resource.
Generiert einen neuen Namen mit aktuellem Zeitstempel.
Verwendet um den Pfad zu generieren :func:`get_memory_path` mit dem Ordner des `job_name`.
:param job_name: Name des Jobs von der die Funktion aufgerufen wurde.
:param name: Name der Datei (ohne Datum)
| Erstellt einen absoluten Pfad für eine neue Memory-Resource. | def new_memory_resource_path(job_name: str, name: str):
"""Erstellt einen absoluten Pfad für eine neue Memory-Resource.
Generiert einen neuen Namen mit aktuellem Zeitstempel.
Verwendet um den Pfad zu generieren :func:`get_memory_path` mit dem Ordner des `job_name`.
:param job_name: Name des Jobs von der die Funktion aufgerufen wurde.
:param name: Name der Datei (ohne Datum)
"""
os.makedirs(get_memory_path("", name, job_name), exist_ok=True)
return get_memory_path(f"{datetime.now().strftime('%Y-%m-%d-%H-%M-%S')}.json", name, job_name) | [
"def",
"new_memory_resource_path",
"(",
"job_name",
":",
"str",
",",
"name",
":",
"str",
")",
":",
"os",
".",
"makedirs",
"(",
"get_memory_path",
"(",
"\"\"",
",",
"name",
",",
"job_name",
")",
",",
"exist_ok",
"=",
"True",
")",
"return",
"get_memory_path",
"(",
"f\"{datetime.now().strftime('%Y-%m-%d-%H-%M-%S')}.json\"",
",",
"name",
",",
"job_name",
")"
] | [
217,
0
] | [
227,
98
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
HeistSystem.WriteMessage_WrongCommandOption | (self, data) | return | Schreibt eine Benachrichtigung in den Chat, dass der Spieler
das Kommando fehlerhaft aufgerufen hat.
| Schreibt eine Benachrichtigung in den Chat, dass der Spieler
das Kommando fehlerhaft aufgerufen hat.
| def WriteMessage_WrongCommandOption(self, data):
''' Schreibt eine Benachrichtigung in den Chat, dass der Spieler
das Kommando fehlerhaft aufgerufen hat.
'''
thisActionName = "WriteMessage_StakeOverMaximum"
# Benachrichtigung aus der Datenbank auslesen
messageText = self.RandomMessage_ByType(
messageType=self.MessageType_WrongCommandOption
)
# Nachricht in den Chat schreiben
self.chat_WriteTextMessage(
messageText=str(messageText).format(
user=data.UserName,
command=self.Settings.Game_Command,
maxpoints=TransformLocale_Decimals(
int(self.Settings.Game_Settings_MaxStake)
),
pointsname=self.Parent.GetCurrencyName()
)
)
return | [
"def",
"WriteMessage_WrongCommandOption",
"(",
"self",
",",
"data",
")",
":",
"thisActionName",
"=",
"\"WriteMessage_StakeOverMaximum\"",
"# Benachrichtigung aus der Datenbank auslesen\r",
"messageText",
"=",
"self",
".",
"RandomMessage_ByType",
"(",
"messageType",
"=",
"self",
".",
"MessageType_WrongCommandOption",
")",
"# Nachricht in den Chat schreiben\r",
"self",
".",
"chat_WriteTextMessage",
"(",
"messageText",
"=",
"str",
"(",
"messageText",
")",
".",
"format",
"(",
"user",
"=",
"data",
".",
"UserName",
",",
"command",
"=",
"self",
".",
"Settings",
".",
"Game_Command",
",",
"maxpoints",
"=",
"TransformLocale_Decimals",
"(",
"int",
"(",
"self",
".",
"Settings",
".",
"Game_Settings_MaxStake",
")",
")",
",",
"pointsname",
"=",
"self",
".",
"Parent",
".",
"GetCurrencyName",
"(",
")",
")",
")",
"return"
] | [
1304,
4
] | [
1327,
14
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
PdfGenerator.set_page_defaults | (self) | Setzt die default Einstellungen für die Seiten.
* ersetzt variables in resources, filename, path
* Berechnet page Angaben header_height und footer_height
* füllt OVERLAY_STYLE, PAGE_STYLE, header_html und footer_html
| Setzt die default Einstellungen für die Seiten. | def set_page_defaults(self):
"""Setzt die default Einstellungen für die Seiten.
* ersetzt variables in resources, filename, path
* Berechnet page Angaben header_height und footer_height
* füllt OVERLAY_STYLE, PAGE_STYLE, header_html und footer_html
"""
# resources render_template für wichtige _variables durchführen
self._variables["resources"] = self._config.render_template( self._variables["resources"], self._variables)
self._variables["filename"] = self._config.render_template( self._variables["filename"], self._variables )
self._variables["path"] = self._config.render_template( self._variables["path"], self._variables )
# title Ersetzung durchführen
self.title = self._config.render_template( self.title, self._variables, deep_replace=True )
# ränder für die Seiteninhalte berechnen - PAGE_STYLE
page = self._variables["page"]
self._variables["page"]["header_height"] = page["top"] + page["header"] + page["header-margin"]
self._variables["page"]["footer_height"] = page["bottom"] + page["footer"] + page["footer-margin"]
# styles bereitstellen
self.PAGE_STYLE = self._config.get("templates.PDF-PAGE_STYLE", DEFAULT_TEMPLATES["PAGE_STYLE"])
self.OVERLAY_STYLE = self._config.get("templates.PDF-OVERLAY_STYLE", DEFAULT_TEMPLATES["OVERLAY_STYLE"])
# html Gerüst bereitstellen
self.header_html = self._config.get("templates.PDF-HEADER", DEFAULT_TEMPLATES["header_html"])
self.footer_html = self._config.get("templates.PDF-FOOTER", DEFAULT_TEMPLATES["footer_html"]) | [
"def",
"set_page_defaults",
"(",
"self",
")",
":",
"# resources render_template für wichtige _variables durchführen",
"self",
".",
"_variables",
"[",
"\"resources\"",
"]",
"=",
"self",
".",
"_config",
".",
"render_template",
"(",
"self",
".",
"_variables",
"[",
"\"resources\"",
"]",
",",
"self",
".",
"_variables",
")",
"self",
".",
"_variables",
"[",
"\"filename\"",
"]",
"=",
"self",
".",
"_config",
".",
"render_template",
"(",
"self",
".",
"_variables",
"[",
"\"filename\"",
"]",
",",
"self",
".",
"_variables",
")",
"self",
".",
"_variables",
"[",
"\"path\"",
"]",
"=",
"self",
".",
"_config",
".",
"render_template",
"(",
"self",
".",
"_variables",
"[",
"\"path\"",
"]",
",",
"self",
".",
"_variables",
")",
"# title Ersetzung durchführen",
"self",
".",
"title",
"=",
"self",
".",
"_config",
".",
"render_template",
"(",
"self",
".",
"title",
",",
"self",
".",
"_variables",
",",
"deep_replace",
"=",
"True",
")",
"# ränder für die Seiteninhalte berechnen - PAGE_STYLE",
"page",
"=",
"self",
".",
"_variables",
"[",
"\"page\"",
"]",
"self",
".",
"_variables",
"[",
"\"page\"",
"]",
"[",
"\"header_height\"",
"]",
"=",
"page",
"[",
"\"top\"",
"]",
"+",
"page",
"[",
"\"header\"",
"]",
"+",
"page",
"[",
"\"header-margin\"",
"]",
"self",
".",
"_variables",
"[",
"\"page\"",
"]",
"[",
"\"footer_height\"",
"]",
"=",
"page",
"[",
"\"bottom\"",
"]",
"+",
"page",
"[",
"\"footer\"",
"]",
"+",
"page",
"[",
"\"footer-margin\"",
"]",
"# styles bereitstellen",
"self",
".",
"PAGE_STYLE",
"=",
"self",
".",
"_config",
".",
"get",
"(",
"\"templates.PDF-PAGE_STYLE\"",
",",
"DEFAULT_TEMPLATES",
"[",
"\"PAGE_STYLE\"",
"]",
")",
"self",
".",
"OVERLAY_STYLE",
"=",
"self",
".",
"_config",
".",
"get",
"(",
"\"templates.PDF-OVERLAY_STYLE\"",
",",
"DEFAULT_TEMPLATES",
"[",
"\"OVERLAY_STYLE\"",
"]",
")",
"# html Gerüst bereitstellen",
"self",
".",
"header_html",
"=",
"self",
".",
"_config",
".",
"get",
"(",
"\"templates.PDF-HEADER\"",
",",
"DEFAULT_TEMPLATES",
"[",
"\"header_html\"",
"]",
")",
"self",
".",
"footer_html",
"=",
"self",
".",
"_config",
".",
"get",
"(",
"\"templates.PDF-FOOTER\"",
",",
"DEFAULT_TEMPLATES",
"[",
"\"footer_html\"",
"]",
")"
] | [
303,
4
] | [
331,
101
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
plot_entlastungswerte | (res) | plot entlastunggsrate und -häufigkeit
entlastungsrate between 10-40
entlastungshäufigkeit between 20-50
| plot entlastunggsrate und -häufigkeit
entlastungsrate between 10-40
entlastungshäufigkeit between 20-50
| def plot_entlastungswerte(res):
"""plot entlastunggsrate und -häufigkeit
entlastungsrate between 10-40
entlastungshäufigkeit between 20-50
"""
mwb = res['mischwasserbauwerke']
# mwb = mwb.set_index('BEZEICHNUNG')
# make 2 subplots with horizontal alignment
fig, axes = plt.subplots(nrows=1, ncols=2, sharey="all")
fig.suptitle('Entlastungsrate und -häufigkeit')
# plot entlastungsrate
colors1 = get_color_by_thresholds(mwb["E0"], 10, 40)
mwb.plot.barh(ax=axes[0], y='E0', color=colors1, legend=None)
axes[0].set(ylabel="Bauwerk",
xlabel='Entlastungsrate [%]')
axes[0].axvline(x=10, linestyle='--', linewidth=0.5)
axes[0].axvline(x=40, linestyle='--', linewidth=0.5)
# plot enlastunghäufigkeit
colors2 = get_color_by_thresholds(mwb["NUED"], 20, 50)
mwb.plot.barh(ax=axes[1], y='NUED', color=colors2, legend=None)
axes[1].set(xlabel='Entlastungshäufigkeit [d/a]')
axes[1].axvline(x=20, linestyle='--', linewidth=0.5)
axes[1].axvline(x=50, linestyle='--', linewidth=0.5)
# move legende to bottom righthand corner
plt.show() | [
"def",
"plot_entlastungswerte",
"(",
"res",
")",
":",
"mwb",
"=",
"res",
"[",
"'mischwasserbauwerke'",
"]",
"# mwb = mwb.set_index('BEZEICHNUNG')",
"# make 2 subplots with horizontal alignment",
"fig",
",",
"axes",
"=",
"plt",
".",
"subplots",
"(",
"nrows",
"=",
"1",
",",
"ncols",
"=",
"2",
",",
"sharey",
"=",
"\"all\"",
")",
"fig",
".",
"suptitle",
"(",
"'Entlastungsrate und -häufigkeit')",
"",
"# plot entlastungsrate",
"colors1",
"=",
"get_color_by_thresholds",
"(",
"mwb",
"[",
"\"E0\"",
"]",
",",
"10",
",",
"40",
")",
"mwb",
".",
"plot",
".",
"barh",
"(",
"ax",
"=",
"axes",
"[",
"0",
"]",
",",
"y",
"=",
"'E0'",
",",
"color",
"=",
"colors1",
",",
"legend",
"=",
"None",
")",
"axes",
"[",
"0",
"]",
".",
"set",
"(",
"ylabel",
"=",
"\"Bauwerk\"",
",",
"xlabel",
"=",
"'Entlastungsrate [%]'",
")",
"axes",
"[",
"0",
"]",
".",
"axvline",
"(",
"x",
"=",
"10",
",",
"linestyle",
"=",
"'--'",
",",
"linewidth",
"=",
"0.5",
")",
"axes",
"[",
"0",
"]",
".",
"axvline",
"(",
"x",
"=",
"40",
",",
"linestyle",
"=",
"'--'",
",",
"linewidth",
"=",
"0.5",
")",
"# plot enlastunghäufigkeit",
"colors2",
"=",
"get_color_by_thresholds",
"(",
"mwb",
"[",
"\"NUED\"",
"]",
",",
"20",
",",
"50",
")",
"mwb",
".",
"plot",
".",
"barh",
"(",
"ax",
"=",
"axes",
"[",
"1",
"]",
",",
"y",
"=",
"'NUED'",
",",
"color",
"=",
"colors2",
",",
"legend",
"=",
"None",
")",
"axes",
"[",
"1",
"]",
".",
"set",
"(",
"xlabel",
"=",
"'Entlastungshäufigkeit [d/a]')",
"",
"axes",
"[",
"1",
"]",
".",
"axvline",
"(",
"x",
"=",
"20",
",",
"linestyle",
"=",
"'--'",
",",
"linewidth",
"=",
"0.5",
")",
"axes",
"[",
"1",
"]",
".",
"axvline",
"(",
"x",
"=",
"50",
",",
"linestyle",
"=",
"'--'",
",",
"linewidth",
"=",
"0.5",
")",
"# move legende to bottom righthand corner",
"plt",
".",
"show",
"(",
")"
] | [
213,
0
] | [
241,
14
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
testBase.check_pdf_data | ( self, data, contents=-1, pages=-1, intern_check:bool=False ) | Prüft pdf data mit vorher gespeicherten data
Erzeugt in unittest dir auf dem Server ein dir 'check', um dort die Vergleichsdaten zu speichern
Parameters
----------
data : dict
- content: dict
page_names : dict
- overlays: dict
- pages: int
- pdf_filename: string
- pdf_filepath: string
- png_filename: string
- png_filepath: string
contents : int
Anzahl der Seiten im Content
pages : int
Anzahl der Seiten im PDF
intern_check:
Wenn True wird in tests und nicht im normalem pdf Ablegeort geprüft. Default is False
Returns
-------
None.
| Prüft pdf data mit vorher gespeicherten data
Erzeugt in unittest dir auf dem Server ein dir 'check', um dort die Vergleichsdaten zu speichern
Parameters
----------
data : dict
- content: dict
page_names : dict
- overlays: dict
- pages: int
- pdf_filename: string
- pdf_filepath: string
- png_filename: string
- png_filepath: string
contents : int
Anzahl der Seiten im Content
pages : int
Anzahl der Seiten im PDF
intern_check:
Wenn True wird in tests und nicht im normalem pdf Ablegeort geprüft. Default is False
Returns
-------
None.
| def check_pdf_data( self, data, contents=-1, pages=-1, intern_check:bool=False ):
''' Prüft pdf data mit vorher gespeicherten data
Erzeugt in unittest dir auf dem Server ein dir 'check', um dort die Vergleichsdaten zu speichern
Parameters
----------
data : dict
- content: dict
page_names : dict
- overlays: dict
- pages: int
- pdf_filename: string
- pdf_filepath: string
- png_filename: string
- png_filepath: string
contents : int
Anzahl der Seiten im Content
pages : int
Anzahl der Seiten im PDF
intern_check:
Wenn True wird in tests und nicht im normalem pdf Ablegeort geprüft. Default is False
Returns
-------
None.
'''
self.assertIn("pdf_filepath", data,
"PDF data fehlerhaft filename fehlt"
)
self.assertIn("png_filepath", data,
"PNG data fehlerhaft filepath fehlt"
)
check = {}
#
# Vorbereitungen
#
if intern_check == True:
test_dir = osp.join( ABSPATH, "resources" )
else:
test_dir = os.path.dirname( data["pdf_filepath"] )
check_dir = osp.join( test_dir, "check" )
# create the folders if not already exists
if not os.path.exists( check_dir ):
try:
os.makedirs( check_dir )
except IOError as e:
print("Unable to create dir.", e)
# Dateiname für den Inhalt festlegen
json_test_name = osp.join( test_dir, data["pdf_filename"] ) + ".json"
json_check_name = osp.join( check_dir, data["pdf_filename"] ) + ".json"
pdf_check_name = osp.join( check_dir, data["pdf_filename"] )
png_check_name = osp.join( check_dir, data["png_filename"] )
png_new_name = data["png_filepath"]
# immer den content in unittest ablegen
with open(json_test_name, "w" ) as json_file:
json.dump( data["content"] , json_file, indent=2 )
# beim erstenmal content nach check kopieren
if not os.path.exists( json_check_name ):
try:
copyfile(json_test_name, json_check_name)
except IOError as e:
print("Unable to copy file.", e)
# beim erstenmal pdf nach check kopieren
if not os.path.exists( pdf_check_name ):
try:
copyfile(data["pdf_filepath"], pdf_check_name)
except IOError as e:
print("Unable to copy file.", e)
# beim erstenmal png nach check kopieren
if not os.path.exists( png_check_name ):
try:
copyfile(png_new_name, png_check_name)
except IOError as e:
print("Unable to copy file.", e)
#
# Überprüfungen
#
# passende check daten (json_check_name) laden
with open( json_check_name ) as json_file:
check = json.load( json_file )
page_names = data["content"].keys()
# Anzahl der Bereiche prüfen
if contents > -1:
self.assertEqual(
len( page_names ),
contents,
"Anzahl der content Bereiche in '{}' stimmt nicht.".format( data["pdf_filepath"] )
)
# Namen der Bereiche
self.assertEqual(
page_names,
check.keys(),
"Namen der Bereiche '{}' stimmt nicht.".format( data["pdf_filepath"] )
)
# Anzahl der Seiten prüfen
if pages > -1:
self.assertEqual(
data["pages"],
pages,
"Anzahl der Seiten in '{}' stimmt nicht.".format( data["pdf_filepath"] )
)
# einige content Inhalte prüfen
from bs4 import BeautifulSoup
for page_name, content in data["content"].items():
bs_data = BeautifulSoup( content, 'html.parser')
bs_check = BeautifulSoup( check[ page_name ], 'html.parser')
# die text Bereiche
data_text_list = bs_data.find_all('div', {"class": "text"} )
check_text_list = bs_check.find_all('div', {"class": "text"} )
self.assertEqual(
data_text_list,
check_text_list,
"PDF content .text in '{}' ist fehlerhaft".format( data["pdf_filepath"] )
)
# erzeugte png vergleichen und diff speichern
png_check = img_io.imread( png_check_name )
png_new = img_io.imread( png_new_name )
self.assertEqual(
png_check.shape,
png_new.shape,
"Die Bildgrößen in '{}' stimmen nicht.".format( data["pdf_filepath"] )
)
# Bild verleich erstellen und speichern
compare = compare_images(png_check, png_new, method='diff')
img_io.imsave( png_new_name + ".diff.png", compare )
# gesamt check der Bilder
def check_mse(imageA, imageB):
# the 'Mean Squared Error' between the two images is the
# sum of the squared difference between the two images;
# NOTE: the two images must have the same dimension
err = np.sum((imageA.astype("float") - imageB.astype("float")) ** 2)
err /= float(imageA.shape[0] * imageA.shape[1])
# return the MSE, the lower the error, the more "similar"
# the two images are
return err
# MeanCheck durchführen
try:
mse = check_mse( png_check, png_new )
except:
mse = -1
self.assertEqual( 0.0, mse,
"Der PNG Vergleichsbild MSE stimmt nicht. Diff image '{}' prüfen".format( data["png_filepath"] + ".diff.png" )
) | [
"def",
"check_pdf_data",
"(",
"self",
",",
"data",
",",
"contents",
"=",
"-",
"1",
",",
"pages",
"=",
"-",
"1",
",",
"intern_check",
":",
"bool",
"=",
"False",
")",
":",
"self",
".",
"assertIn",
"(",
"\"pdf_filepath\"",
",",
"data",
",",
"\"PDF data fehlerhaft filename fehlt\"",
")",
"self",
".",
"assertIn",
"(",
"\"png_filepath\"",
",",
"data",
",",
"\"PNG data fehlerhaft filepath fehlt\"",
")",
"check",
"=",
"{",
"}",
"#",
"# Vorbereitungen",
"#",
"if",
"intern_check",
"==",
"True",
":",
"test_dir",
"=",
"osp",
".",
"join",
"(",
"ABSPATH",
",",
"\"resources\"",
")",
"else",
":",
"test_dir",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"data",
"[",
"\"pdf_filepath\"",
"]",
")",
"check_dir",
"=",
"osp",
".",
"join",
"(",
"test_dir",
",",
"\"check\"",
")",
"# create the folders if not already exists",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"check_dir",
")",
":",
"try",
":",
"os",
".",
"makedirs",
"(",
"check_dir",
")",
"except",
"IOError",
"as",
"e",
":",
"print",
"(",
"\"Unable to create dir.\"",
",",
"e",
")",
"# Dateiname für den Inhalt festlegen",
"json_test_name",
"=",
"osp",
".",
"join",
"(",
"test_dir",
",",
"data",
"[",
"\"pdf_filename\"",
"]",
")",
"+",
"\".json\"",
"json_check_name",
"=",
"osp",
".",
"join",
"(",
"check_dir",
",",
"data",
"[",
"\"pdf_filename\"",
"]",
")",
"+",
"\".json\"",
"pdf_check_name",
"=",
"osp",
".",
"join",
"(",
"check_dir",
",",
"data",
"[",
"\"pdf_filename\"",
"]",
")",
"png_check_name",
"=",
"osp",
".",
"join",
"(",
"check_dir",
",",
"data",
"[",
"\"png_filename\"",
"]",
")",
"png_new_name",
"=",
"data",
"[",
"\"png_filepath\"",
"]",
"# immer den content in unittest ablegen",
"with",
"open",
"(",
"json_test_name",
",",
"\"w\"",
")",
"as",
"json_file",
":",
"json",
".",
"dump",
"(",
"data",
"[",
"\"content\"",
"]",
",",
"json_file",
",",
"indent",
"=",
"2",
")",
"# beim erstenmal content nach check kopieren",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"json_check_name",
")",
":",
"try",
":",
"copyfile",
"(",
"json_test_name",
",",
"json_check_name",
")",
"except",
"IOError",
"as",
"e",
":",
"print",
"(",
"\"Unable to copy file.\"",
",",
"e",
")",
"# beim erstenmal pdf nach check kopieren",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"pdf_check_name",
")",
":",
"try",
":",
"copyfile",
"(",
"data",
"[",
"\"pdf_filepath\"",
"]",
",",
"pdf_check_name",
")",
"except",
"IOError",
"as",
"e",
":",
"print",
"(",
"\"Unable to copy file.\"",
",",
"e",
")",
"# beim erstenmal png nach check kopieren",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"png_check_name",
")",
":",
"try",
":",
"copyfile",
"(",
"png_new_name",
",",
"png_check_name",
")",
"except",
"IOError",
"as",
"e",
":",
"print",
"(",
"\"Unable to copy file.\"",
",",
"e",
")",
"# ",
"# Überprüfungen",
"#",
"# passende check daten (json_check_name) laden",
"with",
"open",
"(",
"json_check_name",
")",
"as",
"json_file",
":",
"check",
"=",
"json",
".",
"load",
"(",
"json_file",
")",
"page_names",
"=",
"data",
"[",
"\"content\"",
"]",
".",
"keys",
"(",
")",
"# Anzahl der Bereiche prüfen",
"if",
"contents",
">",
"-",
"1",
":",
"self",
".",
"assertEqual",
"(",
"len",
"(",
"page_names",
")",
",",
"contents",
",",
"\"Anzahl der content Bereiche in '{}' stimmt nicht.\"",
".",
"format",
"(",
"data",
"[",
"\"pdf_filepath\"",
"]",
")",
")",
"# Namen der Bereiche",
"self",
".",
"assertEqual",
"(",
"page_names",
",",
"check",
".",
"keys",
"(",
")",
",",
"\"Namen der Bereiche '{}' stimmt nicht.\"",
".",
"format",
"(",
"data",
"[",
"\"pdf_filepath\"",
"]",
")",
")",
"# Anzahl der Seiten prüfen",
"if",
"pages",
">",
"-",
"1",
":",
"self",
".",
"assertEqual",
"(",
"data",
"[",
"\"pages\"",
"]",
",",
"pages",
",",
"\"Anzahl der Seiten in '{}' stimmt nicht.\"",
".",
"format",
"(",
"data",
"[",
"\"pdf_filepath\"",
"]",
")",
")",
"# einige content Inhalte prüfen ",
"from",
"bs4",
"import",
"BeautifulSoup",
"for",
"page_name",
",",
"content",
"in",
"data",
"[",
"\"content\"",
"]",
".",
"items",
"(",
")",
":",
"bs_data",
"=",
"BeautifulSoup",
"(",
"content",
",",
"'html.parser'",
")",
"bs_check",
"=",
"BeautifulSoup",
"(",
"check",
"[",
"page_name",
"]",
",",
"'html.parser'",
")",
"# die text Bereiche",
"data_text_list",
"=",
"bs_data",
".",
"find_all",
"(",
"'div'",
",",
"{",
"\"class\"",
":",
"\"text\"",
"}",
")",
"check_text_list",
"=",
"bs_check",
".",
"find_all",
"(",
"'div'",
",",
"{",
"\"class\"",
":",
"\"text\"",
"}",
")",
"self",
".",
"assertEqual",
"(",
"data_text_list",
",",
"check_text_list",
",",
"\"PDF content .text in '{}' ist fehlerhaft\"",
".",
"format",
"(",
"data",
"[",
"\"pdf_filepath\"",
"]",
")",
")",
"# erzeugte png vergleichen und diff speichern ",
"png_check",
"=",
"img_io",
".",
"imread",
"(",
"png_check_name",
")",
"png_new",
"=",
"img_io",
".",
"imread",
"(",
"png_new_name",
")",
"self",
".",
"assertEqual",
"(",
"png_check",
".",
"shape",
",",
"png_new",
".",
"shape",
",",
"\"Die Bildgrößen in '{}' stimmen nicht.\".f",
"o",
"rmat( ",
"d",
"ta[\"",
"p",
"df_filepath\"] ",
")",
"",
")",
"# Bild verleich erstellen und speichern",
"compare",
"=",
"compare_images",
"(",
"png_check",
",",
"png_new",
",",
"method",
"=",
"'diff'",
")",
"img_io",
".",
"imsave",
"(",
"png_new_name",
"+",
"\".diff.png\"",
",",
"compare",
")",
"# gesamt check der Bilder",
"def",
"check_mse",
"(",
"imageA",
",",
"imageB",
")",
":",
"# the 'Mean Squared Error' between the two images is the",
"# sum of the squared difference between the two images;",
"# NOTE: the two images must have the same dimension",
"err",
"=",
"np",
".",
"sum",
"(",
"(",
"imageA",
".",
"astype",
"(",
"\"float\"",
")",
"-",
"imageB",
".",
"astype",
"(",
"\"float\"",
")",
")",
"**",
"2",
")",
"err",
"/=",
"float",
"(",
"imageA",
".",
"shape",
"[",
"0",
"]",
"*",
"imageA",
".",
"shape",
"[",
"1",
"]",
")",
"# return the MSE, the lower the error, the more \"similar\"",
"# the two images are",
"return",
"err",
"# MeanCheck durchführen ",
"try",
":",
"mse",
"=",
"check_mse",
"(",
"png_check",
",",
"png_new",
")",
"except",
":",
"mse",
"=",
"-",
"1",
"self",
".",
"assertEqual",
"(",
"0.0",
",",
"mse",
",",
"\"Der PNG Vergleichsbild MSE stimmt nicht. Diff image '{}' prüfen\".",
"f",
"ormat(",
" ",
"ata[",
"\"",
"png_filepath\"]",
" ",
" ",
".diff.png\" ",
"",
")"
] | [
212,
4
] | [
382,
9
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
HeistSystem.game_Timer | (self) | return | Timer für das Spiel | Timer für das Spiel | def game_Timer(self):
''' Timer für das Spiel '''
thisActionName = "game_Timer"
# Es läuft kein Heist und auch kein Cooldown - Abbruch
if (self.GameStartTimestamp == "") and (self.LastGameFinishTimestamp == ""):
return
# Es läuft ein Spiel - Timer prüfen
if not (self.GameStartTimestamp == ""):
# aktuelle Laufzeit berechnen
elapsedTime = (time.time() - self.GameStartTimestamp)
# Zeit zum Beenden des Spieles erreicht
if (elapsedTime >= int(self.Settings.Game_UntilStart_Time)):
self.Logger.WriteLog(
" {0} - Spiel wird beendet".format(thisActionName))
# Spiel beenden
self.game_EndHeist()
# Cooldown ist aktiv
if not (self.LastGameFinishTimestamp == ""):
# aktuelle Laufzeit berechnen
elapsedTime = (time.time() - self.LastGameFinishTimestamp)
# Heist ist nicht mehr im Cooldown
if (elapsedTime >= int(self.Settings.Game_Cooldown_Time)):
self.Logger.WriteLog(
" {0} - Cooldown-Zeit abgelaufen".format(thisActionName))
self.WriteMessage_GameCooldownOver()
# Zeitstempel zurücksetzen
self.LastGameFinishTimestamp = ""
return | [
"def",
"game_Timer",
"(",
"self",
")",
":",
"thisActionName",
"=",
"\"game_Timer\"",
"# Es läuft kein Heist und auch kein Cooldown - Abbruch\r",
"if",
"(",
"self",
".",
"GameStartTimestamp",
"==",
"\"\"",
")",
"and",
"(",
"self",
".",
"LastGameFinishTimestamp",
"==",
"\"\"",
")",
":",
"return",
"# Es läuft ein Spiel - Timer prüfen\r",
"if",
"not",
"(",
"self",
".",
"GameStartTimestamp",
"==",
"\"\"",
")",
":",
"# aktuelle Laufzeit berechnen\r",
"elapsedTime",
"=",
"(",
"time",
".",
"time",
"(",
")",
"-",
"self",
".",
"GameStartTimestamp",
")",
"# Zeit zum Beenden des Spieles erreicht\r",
"if",
"(",
"elapsedTime",
">=",
"int",
"(",
"self",
".",
"Settings",
".",
"Game_UntilStart_Time",
")",
")",
":",
"self",
".",
"Logger",
".",
"WriteLog",
"(",
"\" {0} - Spiel wird beendet\"",
".",
"format",
"(",
"thisActionName",
")",
")",
"# Spiel beenden\r",
"self",
".",
"game_EndHeist",
"(",
")",
"# Cooldown ist aktiv\r",
"if",
"not",
"(",
"self",
".",
"LastGameFinishTimestamp",
"==",
"\"\"",
")",
":",
"# aktuelle Laufzeit berechnen\r",
"elapsedTime",
"=",
"(",
"time",
".",
"time",
"(",
")",
"-",
"self",
".",
"LastGameFinishTimestamp",
")",
"# Heist ist nicht mehr im Cooldown\r",
"if",
"(",
"elapsedTime",
">=",
"int",
"(",
"self",
".",
"Settings",
".",
"Game_Cooldown_Time",
")",
")",
":",
"self",
".",
"Logger",
".",
"WriteLog",
"(",
"\" {0} - Cooldown-Zeit abgelaufen\"",
".",
"format",
"(",
"thisActionName",
")",
")",
"self",
".",
"WriteMessage_GameCooldownOver",
"(",
")",
"# Zeitstempel zurücksetzen\r",
"self",
".",
"LastGameFinishTimestamp",
"=",
"\"\"",
"return"
] | [
1686,
4
] | [
1727,
14
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
Graph.add_mid_point | (self, m1, m2) | m1 und m2 sind die Mittelpunkte, zwischen denen der Mittelpunkt gelegt werden soll | m1 und m2 sind die Mittelpunkte, zwischen denen der Mittelpunkt gelegt werden soll | def add_mid_point(self, m1, m2):
"""m1 und m2 sind die Mittelpunkte, zwischen denen der Mittelpunkt gelegt werden soll"""
mid = m1 + m2
mid = mid / np.linalg.norm(mid)
if self.mid_points is None:
self.mid_points = np.array(mid)
else:
self.mid_points = np.vstack((self.mid_points, mid)) | [
"def",
"add_mid_point",
"(",
"self",
",",
"m1",
",",
"m2",
")",
":",
"mid",
"=",
"m1",
"+",
"m2",
"mid",
"=",
"mid",
"/",
"np",
".",
"linalg",
".",
"norm",
"(",
"mid",
")",
"if",
"self",
".",
"mid_points",
"is",
"None",
":",
"self",
".",
"mid_points",
"=",
"np",
".",
"array",
"(",
"mid",
")",
"else",
":",
"self",
".",
"mid_points",
"=",
"np",
".",
"vstack",
"(",
"(",
"self",
".",
"mid_points",
",",
"mid",
")",
")"
] | [
52,
4
] | [
60,
63
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
ispTest.check_weasyprint | ( self ) | Ein PDF Dokument mit weasyprint erstellen
| Ein PDF Dokument mit weasyprint erstellen
| def check_weasyprint( self ):
''' Ein PDF Dokument mit weasyprint erstellen
'''
# pdf weasyprint test
from weasyprint import HTML, CSS
from weasyprint.fonts import FontConfiguration
font_config = FontConfiguration()
from weasyprint import default_url_fetcher
files_loaded = []
def log_url_fetcher(url):
files_loaded.append( url )
return default_url_fetcher(url)
# HTML('<h1>foo') would be filename
base_dir = os.path.join( ABSPATH, "..", "resources" )
html = HTML(string='''
<h1>The title</h1>
<div class="blue-text">blauer Text</div>
<span>mdi-check-outline: </span><span><i class="mdi mdi-check-outline"></></span><span> Oder?</span>
''')
css = CSS(string='''
@import url(mpdf_styles.css);
h1 { font-family: Arial,"Helvetica Neue",Helvetica,sans-serif }
''', font_config=font_config, url_fetcher=log_url_fetcher, base_url=base_dir )
pdf_file_name = os.path.join( ABSPATH, 'files', 'weasyprint.pdf')
html.write_pdf( pdf_file_name, stylesheets=[css], font_config=font_config)
# es sollten min. 5 Dateien eingelesen werden
self.assertGreaterEqual(len(files_loaded), 5, "Anzahl nicht >= 5")
# only test 4
response = self.app.get( "api/dummy/pdf", query_string={
"name" : "test-4"
} )
self.assertEqual(response.status_code, 200, "Status nicht 200")
# kommt es hier zu einem Fehler stimmt die font Einbindung von weasyprint nicht
self.check_pdf_data( response.json["data"], contents=2, pages=3, intern_check=True ) | [
"def",
"check_weasyprint",
"(",
"self",
")",
":",
"# pdf weasyprint test ",
"from",
"weasyprint",
"import",
"HTML",
",",
"CSS",
"from",
"weasyprint",
".",
"fonts",
"import",
"FontConfiguration",
"font_config",
"=",
"FontConfiguration",
"(",
")",
"from",
"weasyprint",
"import",
"default_url_fetcher",
"files_loaded",
"=",
"[",
"]",
"def",
"log_url_fetcher",
"(",
"url",
")",
":",
"files_loaded",
".",
"append",
"(",
"url",
")",
"return",
"default_url_fetcher",
"(",
"url",
")",
"# HTML('<h1>foo') would be filename",
"base_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"ABSPATH",
",",
"\"..\"",
",",
"\"resources\"",
")",
"html",
"=",
"HTML",
"(",
"string",
"=",
"'''\n <h1>The title</h1>\n <div class=\"blue-text\">blauer Text</div>\n <span>mdi-check-outline: </span><span><i class=\"mdi mdi-check-outline\"></></span><span> Oder?</span>\n '''",
")",
"css",
"=",
"CSS",
"(",
"string",
"=",
"'''\n @import url(mpdf_styles.css);\n h1 { font-family: Arial,\"Helvetica Neue\",Helvetica,sans-serif }\n '''",
",",
"font_config",
"=",
"font_config",
",",
"url_fetcher",
"=",
"log_url_fetcher",
",",
"base_url",
"=",
"base_dir",
")",
"pdf_file_name",
"=",
"os",
".",
"path",
".",
"join",
"(",
"ABSPATH",
",",
"'files'",
",",
"'weasyprint.pdf'",
")",
"html",
".",
"write_pdf",
"(",
"pdf_file_name",
",",
"stylesheets",
"=",
"[",
"css",
"]",
",",
"font_config",
"=",
"font_config",
")",
"# es sollten min. 5 Dateien eingelesen werden",
"self",
".",
"assertGreaterEqual",
"(",
"len",
"(",
"files_loaded",
")",
",",
"5",
",",
"\"Anzahl nicht >= 5\"",
")",
"# only test 4",
"response",
"=",
"self",
".",
"app",
".",
"get",
"(",
"\"api/dummy/pdf\"",
",",
"query_string",
"=",
"{",
"\"name\"",
":",
"\"test-4\"",
"}",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"200",
",",
"\"Status nicht 200\"",
")",
"# kommt es hier zu einem Fehler stimmt die font Einbindung von weasyprint nicht",
"self",
".",
"check_pdf_data",
"(",
"response",
".",
"json",
"[",
"\"data\"",
"]",
",",
"contents",
"=",
"2",
",",
"pages",
"=",
"3",
",",
"intern_check",
"=",
"True",
")"
] | [
2509,
4
] | [
2557,
92
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
OpenDatabaseDir | () | return | Oeffnet das Datenbank-Verzeichnis | Oeffnet das Datenbank-Verzeichnis | def OpenDatabaseDir():
''' Oeffnet das Datenbank-Verzeichnis'''
global myDatabaseDir
# Logfile-Verzeichnis öffnen
os.startfile(myDatabaseDir)
return | [
"def",
"OpenDatabaseDir",
"(",
")",
":",
"global",
"myDatabaseDir",
"# Logfile-Verzeichnis öffnen\r",
"os",
".",
"startfile",
"(",
"myDatabaseDir",
")",
"return"
] | [
210,
0
] | [
217,
10
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
HeistSystem.DB_get_TargetNames | (self) | return resultList | Auslesen der möglichen Ziele aus der Datenbank | Auslesen der möglichen Ziele aus der Datenbank | def DB_get_TargetNames(self):
''' Auslesen der möglichen Ziele aus der Datenbank '''
thisActionName = "DB_get_TargetNames"
resultList = []
# SQL-Abfrage vorbereiten
sql = "SELECT targetName FROM game_heist_targets"
try:
# SQL-Abfrage ausführen
rows = self.GameDB.execute(sql).fetchall()
except Exception as e:
# Fehler in Log-Datei schreiben
self.Logger.WriteLog(
" --- FEHLER - {0} ---".format(thisActionName))
self.Logger.WriteLog(
" --- EXCEPTION: {0}".format(str(sys.exc_info())))
return resultList
# Abfrage lieferte Daten
if rows:
# Für jeden Datensatz ausführen
for row in rows:
# Übergebe Daten an Liste
resultList.append(row[0])
return resultList | [
"def",
"DB_get_TargetNames",
"(",
"self",
")",
":",
"thisActionName",
"=",
"\"DB_get_TargetNames\"",
"resultList",
"=",
"[",
"]",
"# SQL-Abfrage vorbereiten\r",
"sql",
"=",
"\"SELECT targetName FROM game_heist_targets\"",
"try",
":",
"# SQL-Abfrage ausführen\r",
"rows",
"=",
"self",
".",
"GameDB",
".",
"execute",
"(",
"sql",
")",
".",
"fetchall",
"(",
")",
"except",
"Exception",
"as",
"e",
":",
"# Fehler in Log-Datei schreiben\r",
"self",
".",
"Logger",
".",
"WriteLog",
"(",
"\" --- FEHLER - {0} ---\"",
".",
"format",
"(",
"thisActionName",
")",
")",
"self",
".",
"Logger",
".",
"WriteLog",
"(",
"\" --- EXCEPTION: {0}\"",
".",
"format",
"(",
"str",
"(",
"sys",
".",
"exc_info",
"(",
")",
")",
")",
")",
"return",
"resultList",
"# Abfrage lieferte Daten\r",
"if",
"rows",
":",
"# Für jeden Datensatz ausführen\r",
"for",
"row",
"in",
"rows",
":",
"# Übergebe Daten an Liste\r",
"resultList",
".",
"append",
"(",
"row",
"[",
"0",
"]",
")",
"return",
"resultList"
] | [
965,
4
] | [
996,
25
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
get_job_run_info | (job_id) | Gibt den Namen eines Jobs, dessen Parameter sowie den Namen der zugehörigen JSON-Datei zurück.
:param job_id: id des Jobs
| Gibt den Namen eines Jobs, dessen Parameter sowie den Namen der zugehörigen JSON-Datei zurück. | def get_job_run_info(job_id):
"""Gibt den Namen eines Jobs, dessen Parameter sowie den Namen der zugehörigen JSON-Datei zurück.
:param job_id: id des Jobs
"""
with db.open_con() as con:
res = con.execute("""
SELECT job_name, json_file_name, key, value, job_config.type as type, position, delete_options.type as d_type, k_count, fix_names_count
FROM job
INNER JOIN delete_options USING(delete_options_id)
INNER JOIN job_topic_position USING(job_id)
LEFT JOIN job_config USING(position_id)
INNER JOIN steps USING(steps_id)
WHERE job_id=?
ORDER BY(position)
""", [job_id]).fetchall()
job_name = res[0]["job_name"]
steps_name = res[0]["json_file_name"]
config = {}
# Init Config with deletion settings
if res[0]["d_type"] == "keep_count":
config["keep_count"] = res[0]["k_count"]
if res[0]["d_type"] == "fix_names":
config["fix_names"] = {"count": res[0]["fix_names_count"]}
# Handle Multiple Topics
if len(res) > 0:
topic_count = int(res[len(res) - 1]["position"] + 1)
attach = [{"config": {}, "steps": ""}] * (topic_count - 1)
for row in res:
key = row["key"]
type = row["type"]
value = queries.to_typed_value(row["value"], type)
sub_steps_name = row["json_file_name"]
position = int(row["position"]) - 1
if position < 0:
if key is not None:
config = {**config, key: value}
else:
attach_config = {**attach[position]["config"]}
if key is not None:
attach_config = {**attach_config, key: value}
attach[position] = {**attach[position], "config": attach_config, "steps": sub_steps_name}
if len(attach) > 0:
config = {**config, "attach": attach}
return job_name, steps_name, config | [
"def",
"get_job_run_info",
"(",
"job_id",
")",
":",
"with",
"db",
".",
"open_con",
"(",
")",
"as",
"con",
":",
"res",
"=",
"con",
".",
"execute",
"(",
"\"\"\"\n SELECT job_name, json_file_name, key, value, job_config.type as type, position, delete_options.type as d_type, k_count, fix_names_count\n FROM job \n INNER JOIN delete_options USING(delete_options_id)\n INNER JOIN job_topic_position USING(job_id)\n LEFT JOIN job_config USING(position_id) \n INNER JOIN steps USING(steps_id)\n WHERE job_id=?\n ORDER BY(position)\n \"\"\"",
",",
"[",
"job_id",
"]",
")",
".",
"fetchall",
"(",
")",
"job_name",
"=",
"res",
"[",
"0",
"]",
"[",
"\"job_name\"",
"]",
"steps_name",
"=",
"res",
"[",
"0",
"]",
"[",
"\"json_file_name\"",
"]",
"config",
"=",
"{",
"}",
"# Init Config with deletion settings",
"if",
"res",
"[",
"0",
"]",
"[",
"\"d_type\"",
"]",
"==",
"\"keep_count\"",
":",
"config",
"[",
"\"keep_count\"",
"]",
"=",
"res",
"[",
"0",
"]",
"[",
"\"k_count\"",
"]",
"if",
"res",
"[",
"0",
"]",
"[",
"\"d_type\"",
"]",
"==",
"\"fix_names\"",
":",
"config",
"[",
"\"fix_names\"",
"]",
"=",
"{",
"\"count\"",
":",
"res",
"[",
"0",
"]",
"[",
"\"fix_names_count\"",
"]",
"}",
"# Handle Multiple Topics",
"if",
"len",
"(",
"res",
")",
">",
"0",
":",
"topic_count",
"=",
"int",
"(",
"res",
"[",
"len",
"(",
"res",
")",
"-",
"1",
"]",
"[",
"\"position\"",
"]",
"+",
"1",
")",
"attach",
"=",
"[",
"{",
"\"config\"",
":",
"{",
"}",
",",
"\"steps\"",
":",
"\"\"",
"}",
"]",
"*",
"(",
"topic_count",
"-",
"1",
")",
"for",
"row",
"in",
"res",
":",
"key",
"=",
"row",
"[",
"\"key\"",
"]",
"type",
"=",
"row",
"[",
"\"type\"",
"]",
"value",
"=",
"queries",
".",
"to_typed_value",
"(",
"row",
"[",
"\"value\"",
"]",
",",
"type",
")",
"sub_steps_name",
"=",
"row",
"[",
"\"json_file_name\"",
"]",
"position",
"=",
"int",
"(",
"row",
"[",
"\"position\"",
"]",
")",
"-",
"1",
"if",
"position",
"<",
"0",
":",
"if",
"key",
"is",
"not",
"None",
":",
"config",
"=",
"{",
"*",
"*",
"config",
",",
"key",
":",
"value",
"}",
"else",
":",
"attach_config",
"=",
"{",
"*",
"*",
"attach",
"[",
"position",
"]",
"[",
"\"config\"",
"]",
"}",
"if",
"key",
"is",
"not",
"None",
":",
"attach_config",
"=",
"{",
"*",
"*",
"attach_config",
",",
"key",
":",
"value",
"}",
"attach",
"[",
"position",
"]",
"=",
"{",
"*",
"*",
"attach",
"[",
"position",
"]",
",",
"\"config\"",
":",
"attach_config",
",",
"\"steps\"",
":",
"sub_steps_name",
"}",
"if",
"len",
"(",
"attach",
")",
">",
"0",
":",
"config",
"=",
"{",
"*",
"*",
"config",
",",
"\"attach\"",
":",
"attach",
"}",
"return",
"job_name",
",",
"steps_name",
",",
"config"
] | [
33,
0
] | [
83,
43
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
plotClass.initPlot | (self, imgSize=None, getPlot=True, **args ) | return self.fig, self.ax | Figure und axis initialisieren.
Parameters
----------
imgSize: dict
größe des Plotbereichs - default: ``{"width": 90, "height": 90 }``
getPlot: boolean
bei true plt.ioff aufrufen
args:
zusätzliche Angaben für plt.subplots
| Figure und axis initialisieren. | def initPlot(self, imgSize=None, getPlot=True, **args ):
"""Figure und axis initialisieren.
Parameters
----------
imgSize: dict
größe des Plotbereichs - default: ``{"width": 90, "height": 90 }``
getPlot: boolean
bei true plt.ioff aufrufen
args:
zusätzliche Angaben für plt.subplots
"""
# defaults für plt setzen
plt.rcParams.update( rcParams )
# soll der plot zurückgegeben werden ioff setzen
if getPlot:
plt.ioff()
# figsize immer angeben
if not "figsize" in args:
# scalierung für die figure größe
figscale = 0.4 # 0.2
if not imgSize:
imgSize = {"width": 90, "height": 90 }
args["figsize"] = ( imgSize["width"] / mm * figscale, imgSize["height"] / mm * figscale )
# plotbereiche erstellen
self.fig, self.ax = plt.subplots( **args )
return self.fig, self.ax | [
"def",
"initPlot",
"(",
"self",
",",
"imgSize",
"=",
"None",
",",
"getPlot",
"=",
"True",
",",
"*",
"*",
"args",
")",
":",
"# defaults für plt setzen",
"plt",
".",
"rcParams",
".",
"update",
"(",
"rcParams",
")",
"# soll der plot zurückgegeben werden ioff setzen",
"if",
"getPlot",
":",
"plt",
".",
"ioff",
"(",
")",
"# figsize immer angeben",
"if",
"not",
"\"figsize\"",
"in",
"args",
":",
"# scalierung für die figure größe",
"figscale",
"=",
"0.4",
"# 0.2",
"if",
"not",
"imgSize",
":",
"imgSize",
"=",
"{",
"\"width\"",
":",
"90",
",",
"\"height\"",
":",
"90",
"}",
"args",
"[",
"\"figsize\"",
"]",
"=",
"(",
"imgSize",
"[",
"\"width\"",
"]",
"/",
"mm",
"*",
"figscale",
",",
"imgSize",
"[",
"\"height\"",
"]",
"/",
"mm",
"*",
"figscale",
")",
"# plotbereiche erstellen",
"self",
".",
"fig",
",",
"self",
".",
"ax",
"=",
"plt",
".",
"subplots",
"(",
"*",
"*",
"args",
")",
"return",
"self",
".",
"fig",
",",
"self",
".",
"ax"
] | [
65,
4
] | [
99,
32
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
register_image | (func) | return register_type_func(IMAGE_TYPES, ImageError, func) | Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block.
Fügt eine Typ-Funktion dem Dictionary IMAGE_TYPES hinzu.
:param func: die zu registrierende Funktion
:return: Funktion mit try/except-Block
| Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block.
Fügt eine Typ-Funktion dem Dictionary IMAGE_TYPES hinzu. | def register_image(func):
"""Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block.
Fügt eine Typ-Funktion dem Dictionary IMAGE_TYPES hinzu.
:param func: die zu registrierende Funktion
:return: Funktion mit try/except-Block
"""
return register_type_func(IMAGE_TYPES, ImageError, func) | [
"def",
"register_image",
"(",
"func",
")",
":",
"return",
"register_type_func",
"(",
"IMAGE_TYPES",
",",
"ImageError",
",",
"func",
")"
] | [
21,
0
] | [
28,
60
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
PdfGenerator.pandas | (self, df=None, area:dict={}, attrs:dict={}, fields:list=[], render=None ) | return html | Ein Pandas Dataframe einfügen.
Parameters
----------
df: pandas.DataFrame
area : Area {left,top,with,height}
die Größe der Ausgabe
attrs : dict
zu ändernde id class oder Style Angaben
fields : list
Liste von dict mit Angaben zu den auszugebenden Feldern::
{"field": "", "label":"", "format":"", "style": [('text-align', 'left')] }
render : bool
sofort rendern oder nur zurückgeben ohne Angabe wird self.autoRender verwendet
Returns
-------
html: str
HTML des erzeugten Elements
| Ein Pandas Dataframe einfügen. | def pandas(self, df=None, area:dict={}, attrs:dict={}, fields:list=[], render=None ):
"""Ein Pandas Dataframe einfügen.
Parameters
----------
df: pandas.DataFrame
area : Area {left,top,with,height}
die Größe der Ausgabe
attrs : dict
zu ändernde id class oder Style Angaben
fields : list
Liste von dict mit Angaben zu den auszugebenden Feldern::
{"field": "", "label":"", "format":"", "style": [('text-align', 'left')] }
render : bool
sofort rendern oder nur zurückgeben ohne Angabe wird self.autoRender verwendet
Returns
-------
html: str
HTML des erzeugten Elements
"""
if not isinstance(df, pd.DataFrame) or df.empty:
return ""
# uuid festlegen
if "id" in attrs:
uuid = "{}_".format( attrs["id"] )
else:
self.pandas_table_id = self.pandas_table_id + 1
uuid = "{}_".format( self.pandas_table_id )
# Felder holen
pf = self._getPandasFields( df, fields )
if pf:
html = self.html( df[ pf["names"] ].rename(columns=pf["columns"]).style
.set_table_attributes('class="layout-fill-width"')
.format( pf["field_format"] )
.set_table_styles( pf["table_styles"] )
.hide_index()
.set_uuid( uuid )
.render().replace('nan','')
, area=area
, attrs=attrs
, render=render
)
else:
html = self.html( df.style
.set_table_attributes('class="layout-fill-width"')
.hide_index()
.set_uuid( uuid )
.render().replace('nan','')
, area=area
, attrs=attrs,
render=render
)
return html | [
"def",
"pandas",
"(",
"self",
",",
"df",
"=",
"None",
",",
"area",
":",
"dict",
"=",
"{",
"}",
",",
"attrs",
":",
"dict",
"=",
"{",
"}",
",",
"fields",
":",
"list",
"=",
"[",
"]",
",",
"render",
"=",
"None",
")",
":",
"if",
"not",
"isinstance",
"(",
"df",
",",
"pd",
".",
"DataFrame",
")",
"or",
"df",
".",
"empty",
":",
"return",
"\"\"",
"# uuid festlegen",
"if",
"\"id\"",
"in",
"attrs",
":",
"uuid",
"=",
"\"{}_\"",
".",
"format",
"(",
"attrs",
"[",
"\"id\"",
"]",
")",
"else",
":",
"self",
".",
"pandas_table_id",
"=",
"self",
".",
"pandas_table_id",
"+",
"1",
"uuid",
"=",
"\"{}_\"",
".",
"format",
"(",
"self",
".",
"pandas_table_id",
")",
"# Felder holen",
"pf",
"=",
"self",
".",
"_getPandasFields",
"(",
"df",
",",
"fields",
")",
"if",
"pf",
":",
"html",
"=",
"self",
".",
"html",
"(",
"df",
"[",
"pf",
"[",
"\"names\"",
"]",
"]",
".",
"rename",
"(",
"columns",
"=",
"pf",
"[",
"\"columns\"",
"]",
")",
".",
"style",
".",
"set_table_attributes",
"(",
"'class=\"layout-fill-width\"'",
")",
".",
"format",
"(",
"pf",
"[",
"\"field_format\"",
"]",
")",
".",
"set_table_styles",
"(",
"pf",
"[",
"\"table_styles\"",
"]",
")",
".",
"hide_index",
"(",
")",
".",
"set_uuid",
"(",
"uuid",
")",
".",
"render",
"(",
")",
".",
"replace",
"(",
"'nan'",
",",
"''",
")",
",",
"area",
"=",
"area",
",",
"attrs",
"=",
"attrs",
",",
"render",
"=",
"render",
")",
"else",
":",
"html",
"=",
"self",
".",
"html",
"(",
"df",
".",
"style",
".",
"set_table_attributes",
"(",
"'class=\"layout-fill-width\"'",
")",
".",
"hide_index",
"(",
")",
".",
"set_uuid",
"(",
"uuid",
")",
".",
"render",
"(",
")",
".",
"replace",
"(",
"'nan'",
",",
"''",
")",
",",
"area",
"=",
"area",
",",
"attrs",
"=",
"attrs",
",",
"render",
"=",
"render",
")",
"return",
"html"
] | [
1200,
4
] | [
1260,
19
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
HeistSystem.RandomTarget_ByName | (self) | return | Ein zufälliges Ziel für den Heist auswählen | Ein zufälliges Ziel für den Heist auswählen | def RandomTarget_ByName(self):
''' Ein zufälliges Ziel für den Heist auswählen '''
thisActionName = "RandomTarget_ByName"
# Ziele aus der Datenbank auslesen
targetList = self.DB_get_TargetNames()
# Die Liste enthält Nachrichten-Texte
if targetList:
# Liste durchmischen
random.shuffle(targetList)
# Anzahl der Listenelemente bestimmen
listLength = int(len(targetList))
# Nachrichten-Text übernehmen
target = targetList[self.Parent.GetRandom(0, listLength)]
# Rückgabe der zufälligen Nachricht an aufrufende Funktion
return target
return | [
"def",
"RandomTarget_ByName",
"(",
"self",
")",
":",
"thisActionName",
"=",
"\"RandomTarget_ByName\"",
"# Ziele aus der Datenbank auslesen\r",
"targetList",
"=",
"self",
".",
"DB_get_TargetNames",
"(",
")",
"# Die Liste enthält Nachrichten-Texte\r",
"if",
"targetList",
":",
"# Liste durchmischen\r",
"random",
".",
"shuffle",
"(",
"targetList",
")",
"# Anzahl der Listenelemente bestimmen\r",
"listLength",
"=",
"int",
"(",
"len",
"(",
"targetList",
")",
")",
"# Nachrichten-Text übernehmen\r",
"target",
"=",
"targetList",
"[",
"self",
".",
"Parent",
".",
"GetRandom",
"(",
"0",
",",
"listLength",
")",
"]",
"# Rückgabe der zufälligen Nachricht an aufrufende Funktion\r",
"return",
"target",
"return"
] | [
1066,
4
] | [
1086,
14
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
Jahr.jahr | (self, jahr: int) | Das Jahr setzen | Das Jahr setzen | def jahr(self, jahr: int):
"""Das Jahr setzen"""
exception_geworfen = False
try:
# Das Argument konvertieren
self.__jahr: int = int(jahr)
except ValueError:
exception_geworfen = True
except TypeError:
exception_geworfen = True
# Falls das Argument nicht konvertiert werden konnte
if exception_geworfen:
raise JahrKeineGanzeZahl(f"Das Attribut \"{jahr}\" ist kein gueltiges Jahr, "
f"da es keine (ganze) Zahl ist oder nicht konvertiert werden kann.")
self.__jahr = jahr
pass | [
"def",
"jahr",
"(",
"self",
",",
"jahr",
":",
"int",
")",
":",
"exception_geworfen",
"=",
"False",
"try",
":",
"# Das Argument konvertieren",
"self",
".",
"__jahr",
":",
"int",
"=",
"int",
"(",
"jahr",
")",
"except",
"ValueError",
":",
"exception_geworfen",
"=",
"True",
"except",
"TypeError",
":",
"exception_geworfen",
"=",
"True",
"# Falls das Argument nicht konvertiert werden konnte",
"if",
"exception_geworfen",
":",
"raise",
"JahrKeineGanzeZahl",
"(",
"f\"Das Attribut \\\"{jahr}\\\" ist kein gueltiges Jahr, \"",
"f\"da es keine (ganze) Zahl ist oder nicht konvertiert werden kann.\"",
")",
"self",
".",
"__jahr",
"=",
"jahr",
"pass"
] | [
50,
4
] | [
65,
12
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
Line.intersect_circle | (self, circle, rtol=1e-03, atol=1e-03,
include_end=False) | return circle.intersect_line(self, rtol, atol, include_end) | Von einem Line-Objekt und einem Circle-Objekt werden die
Schnittpunkte bestimmt und in einer Liste ausgegeben.
| Von einem Line-Objekt und einem Circle-Objekt werden die
Schnittpunkte bestimmt und in einer Liste ausgegeben.
| def intersect_circle(self, circle, rtol=1e-03, atol=1e-03,
include_end=False):
""" Von einem Line-Objekt und einem Circle-Objekt werden die
Schnittpunkte bestimmt und in einer Liste ausgegeben.
"""
return circle.intersect_line(self, rtol, atol, include_end) | [
"def",
"intersect_circle",
"(",
"self",
",",
"circle",
",",
"rtol",
"=",
"1e-03",
",",
"atol",
"=",
"1e-03",
",",
"include_end",
"=",
"False",
")",
":",
"return",
"circle",
".",
"intersect_line",
"(",
"self",
",",
"rtol",
",",
"atol",
",",
"include_end",
")"
] | [
1071,
4
] | [
1076,
67
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
add_data | (values: dict, data: StepData) | Fügt Daten zu einem neuen Key hinzu.
Fügt die unter `"data"` angegebenen Daten zu einem neuen Key hinzu.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
| Fügt Daten zu einem neuen Key hinzu. | def add_data(values: dict, data: StepData):
"""Fügt Daten zu einem neuen Key hinzu.
Fügt die unter `"data"` angegebenen Daten zu einem neuen Key hinzu.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
"""
for new_key in values["new_keys"]:
value = data.deep_format(values["data"], values=values)
data.insert_data(new_key, value, values) | [
"def",
"add_data",
"(",
"values",
":",
"dict",
",",
"data",
":",
"StepData",
")",
":",
"for",
"new_key",
"in",
"values",
"[",
"\"new_keys\"",
"]",
":",
"value",
"=",
"data",
".",
"deep_format",
"(",
"values",
"[",
"\"data\"",
"]",
",",
"values",
"=",
"values",
")",
"data",
".",
"insert_data",
"(",
"new_key",
",",
"value",
",",
"values",
")"
] | [
414,
0
] | [
424,
48
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
qa_mlc.isoTransmissions | ( self, positions ) | return transmissions | leaf und interleaf an den angegebenen Positionen ermitteln
Attributes
----------
positions : Positionen an denen ermittelt werden soll
Returns
-------
dict - mit positions
| leaf und interleaf an den angegebenen Positionen ermitteln | def isoTransmissions( self, positions ):
"""leaf und interleaf an den angegebenen Positionen ermitteln
Attributes
----------
positions : Positionen an denen ermittelt werden soll
Returns
-------
dict - mit positions
"""
transmissions = {}
for idx in positions:
transmissions[idx] = self.findTransmissions( idx )
return transmissions | [
"def",
"isoTransmissions",
"(",
"self",
",",
"positions",
")",
":",
"transmissions",
"=",
"{",
"}",
"for",
"idx",
"in",
"positions",
":",
"transmissions",
"[",
"idx",
"]",
"=",
"self",
".",
"findTransmissions",
"(",
"idx",
")",
"return",
"transmissions"
] | [
120,
4
] | [
136,
28
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
get_datasource_schedules | () | Gibt alle angelegten Datenquellen mitsamt ihren Zeitplänen zurück.
| Gibt alle angelegten Datenquellen mitsamt ihren Zeitplänen zurück. | def get_datasource_schedules():
""" Gibt alle angelegten Datenquellen mitsamt ihren Zeitplänen zurück.
"""
with db.open_con() as con:
res = con.execute(
"""
SELECT DISTINCT datasource_id, datasource_name, schedule_historisation.type as s_type, date, time, group_concat(DISTINCT weekday) AS weekdays,
time_interval
FROM datasource
INNER JOIN schedule_historisation USING(schedule_historisation_id)
LEFT JOIN schedule_historisation_weekday USING(schedule_historisation_id)
GROUP BY(datasource_id)
""").fetchall()
return res | [
"def",
"get_datasource_schedules",
"(",
")",
":",
"with",
"db",
".",
"open_con",
"(",
")",
"as",
"con",
":",
"res",
"=",
"con",
".",
"execute",
"(",
"\"\"\"\n SELECT DISTINCT datasource_id, datasource_name, schedule_historisation.type as s_type, date, time, group_concat(DISTINCT weekday) AS weekdays, \n time_interval\n FROM datasource\n INNER JOIN schedule_historisation USING(schedule_historisation_id)\n LEFT JOIN schedule_historisation_weekday USING(schedule_historisation_id)\n GROUP BY(datasource_id)\n \"\"\"",
")",
".",
"fetchall",
"(",
")",
"return",
"res"
] | [
86,
0
] | [
101,
18
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
kreisSegmente | (n,r,M) | return segmente | Kreis mit Radius r um Punkt M=[x,y], aufgeteilt in n Segmente, welche jeweils
aus Bezierkurven zusammengesetzt sind. Liefert Liste dieser n Segmente. | Kreis mit Radius r um Punkt M=[x,y], aufgeteilt in n Segmente, welche jeweils
aus Bezierkurven zusammengesetzt sind. Liefert Liste dieser n Segmente. | def kreisSegmente(n,r,M):
'''Kreis mit Radius r um Punkt M=[x,y], aufgeteilt in n Segmente, welche jeweils
aus Bezierkurven zusammengesetzt sind. Liefert Liste dieser n Segmente.'''
viertelKreise = bezierKreisNRM(1,r,M)
viertelKreisRest = 0
segmente = []
# segmente nacheinander aufbauen
for i in range(0,n):
segmente.append([])
# Teile Kreis in 4*n Teile
# --> je Segment werden 4 Teile benoetigt, Viertelkreis entspricht n Teile
# Kreisanteile, die noch fuer aktuelles Segment benoetigt werden
segmentRest = 4
while segmentRest > 0:
if viertelKreisRest == 0:
# hole naechsten Viertelkreis
restVomViertelKreis = viertelKreise.pop(0)
viertelKreisRest = n # Anteile, die restVomViertelKreis representiert
# nehme Teil vom Viertelkreis fuer aktuelles Segment
# --> Anzahl Teile = min(viertelKreisRest, segmentRest)
x = min(viertelKreisRest, segmentRest)
# --> Parameter bezieht sich auf Laenge von Viertelkreis-Restkurve
t = x*1./viertelKreisRest # \in[0,1]
# restVomVierteilkreis wird bei t\in[0,1] aufgeteilt
bezierFuerSegment, restVomViertelKreis = _bezierKurveAnPunktTeilen(restVomViertelKreis, t)
# x Teile fuer aktuelles Segment
segmente[i].append(bezierFuerSegment)
segmentRest = segmentRest-x
# Anzahl Teile im Rest vom Viertelkreis
viertelKreisRest = viertelKreisRest-x
# jetzt: Segment fertig (segmentRest=0) oder Viertelkreis zu Ende (viertelKreisRest=0)
assert segmentRest==0 or viertelKreisRest==0
return segmente | [
"def",
"kreisSegmente",
"(",
"n",
",",
"r",
",",
"M",
")",
":",
"viertelKreise",
"=",
"bezierKreisNRM",
"(",
"1",
",",
"r",
",",
"M",
")",
"viertelKreisRest",
"=",
"0",
"segmente",
"=",
"[",
"]",
"# segmente nacheinander aufbauen",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"n",
")",
":",
"segmente",
".",
"append",
"(",
"[",
"]",
")",
"# Teile Kreis in 4*n Teile",
"# --> je Segment werden 4 Teile benoetigt, Viertelkreis entspricht n Teile",
"# Kreisanteile, die noch fuer aktuelles Segment benoetigt werden",
"segmentRest",
"=",
"4",
"while",
"segmentRest",
">",
"0",
":",
"if",
"viertelKreisRest",
"==",
"0",
":",
"# hole naechsten Viertelkreis",
"restVomViertelKreis",
"=",
"viertelKreise",
".",
"pop",
"(",
"0",
")",
"viertelKreisRest",
"=",
"n",
"# Anteile, die restVomViertelKreis representiert",
"# nehme Teil vom Viertelkreis fuer aktuelles Segment",
"# --> Anzahl Teile = min(viertelKreisRest, segmentRest)",
"x",
"=",
"min",
"(",
"viertelKreisRest",
",",
"segmentRest",
")",
"# --> Parameter bezieht sich auf Laenge von Viertelkreis-Restkurve",
"t",
"=",
"x",
"*",
"1.",
"/",
"viertelKreisRest",
"# \\in[0,1]",
"# restVomVierteilkreis wird bei t\\in[0,1] aufgeteilt",
"bezierFuerSegment",
",",
"restVomViertelKreis",
"=",
"_bezierKurveAnPunktTeilen",
"(",
"restVomViertelKreis",
",",
"t",
")",
"# x Teile fuer aktuelles Segment",
"segmente",
"[",
"i",
"]",
".",
"append",
"(",
"bezierFuerSegment",
")",
"segmentRest",
"=",
"segmentRest",
"-",
"x",
"# Anzahl Teile im Rest vom Viertelkreis",
"viertelKreisRest",
"=",
"viertelKreisRest",
"-",
"x",
"# jetzt: Segment fertig (segmentRest=0) oder Viertelkreis zu Ende (viertelKreisRest=0)",
"assert",
"segmentRest",
"==",
"0",
"or",
"viertelKreisRest",
"==",
"0",
"return",
"segmente"
] | [
75,
0
] | [
112,
19
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
MQTTclass.progress_start | ( self, topic:str=None, payload={} ) | Startet über mqtt einen Bearbeitungsfortschritt.
setzt in self.progress den übergebenen payload
mit mind. count und index
Parameters
----------
topic: str
der in ``{stat}/progress/{topic}/start`` eingefügte topic
payload: dict
zusätzliche payload Angaben - default: ``{"maxprogress" : 100, "progress" : 0}``
| Startet über mqtt einen Bearbeitungsfortschritt. | def progress_start( self, topic:str=None, payload={} ):
"""Startet über mqtt einen Bearbeitungsfortschritt.
setzt in self.progress den übergebenen payload
mit mind. count und index
Parameters
----------
topic: str
der in ``{stat}/progress/{topic}/start`` eingefügte topic
payload: dict
zusätzliche payload Angaben - default: ``{"maxprogress" : 100, "progress" : 0}``
"""
self._progress = {
"maxprogress" : 100,
"progress" : 0
}
self._progress.update( payload )
msg = {
"topic" : "{stat}/progress/{topic}/start".format( topic=topic, **self.defaults ),
"payload" : self._progress
}
self.publish( msg ) | [
"def",
"progress_start",
"(",
"self",
",",
"topic",
":",
"str",
"=",
"None",
",",
"payload",
"=",
"{",
"}",
")",
":",
"self",
".",
"_progress",
"=",
"{",
"\"maxprogress\"",
":",
"100",
",",
"\"progress\"",
":",
"0",
"}",
"self",
".",
"_progress",
".",
"update",
"(",
"payload",
")",
"msg",
"=",
"{",
"\"topic\"",
":",
"\"{stat}/progress/{topic}/start\"",
".",
"format",
"(",
"topic",
"=",
"topic",
",",
"*",
"*",
"self",
".",
"defaults",
")",
",",
"\"payload\"",
":",
"self",
".",
"_progress",
"}",
"self",
".",
"publish",
"(",
"msg",
")"
] | [
601,
4
] | [
626,
27
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
ResetDefaultMessageData | () | return | Lösche alle Daten in der Tabelle 'game_heist_messages'
und schreibe die Default Daten in die Tabelle
| Lösche alle Daten in der Tabelle 'game_heist_messages'
und schreibe die Default Daten in die Tabelle
| def ResetDefaultMessageData():
''' Lösche alle Daten in der Tabelle 'game_heist_messages'
und schreibe die Default Daten in die Tabelle
'''
global myGameHeist
# Benachrichtigung, dass alle Daten gelöscht werden
winsound.MessageBeep()
returnValue = MessageBox(0, u"Du willst die ursprünglichen Benachrichtigungen wiederherstellen?"
"\r\nAlle bisherigen Einstellungen werden dabei gelöscht!"
, u"Bist du sicher?", 4)
if returnValue == MB_YES:
# Funktion zum Zurücksetzen aufrufen
myGameHeist.DB_create_DefaultMessages()
# Nachrichtenbox
MessageBox(0, u"Die Benachrichtigungen wurden auf die Grundeinstellungen zurückgesetzt!"
"\r\nLade das Skript neu und aktualisiere die Einstellungen."
, u"Reset wurde abgeschlossen!", 0)
# Information ins Log schreiben
myLogger.WriteLog("")
myLogger.WriteLog(
" ---------------------------------------------------------------")
myLogger.WriteLog(
" ---- BENACHRICHTIGSTEXTE WURDEN ZURÜCKGESETZT ----")
myLogger.WriteLog(
" ---------------------------------------------------------------")
myLogger.WriteLog("")
return | [
"def",
"ResetDefaultMessageData",
"(",
")",
":",
"global",
"myGameHeist",
"# Benachrichtigung, dass alle Daten gelöscht werden\r",
"winsound",
".",
"MessageBeep",
"(",
")",
"returnValue",
"=",
"MessageBox",
"(",
"0",
",",
"u\"Du willst die ursprünglichen Benachrichtigungen wiederherstellen?\"\r",
"\"\\r\\nAlle bisherigen Einstellungen werden dabei gelöscht!\"\r",
",",
"u\"Bist du sicher?\"",
",",
"4",
")",
"if",
"returnValue",
"==",
"MB_YES",
":",
"# Funktion zum Zurücksetzen aufrufen\r",
"myGameHeist",
".",
"DB_create_DefaultMessages",
"(",
")",
"# Nachrichtenbox\r",
"MessageBox",
"(",
"0",
",",
"u\"Die Benachrichtigungen wurden auf die Grundeinstellungen zurückgesetzt!\"\r",
"\"\\r\\nLade das Skript neu und aktualisiere die Einstellungen.\"",
",",
"u\"Reset wurde abgeschlossen!\"",
",",
"0",
")",
"# Information ins Log schreiben\r",
"myLogger",
".",
"WriteLog",
"(",
"\"\"",
")",
"myLogger",
".",
"WriteLog",
"(",
"\" ---------------------------------------------------------------\"",
")",
"myLogger",
".",
"WriteLog",
"(",
"\" ---- BENACHRICHTIGSTEXTE WURDEN ZURÜCKGESETZT ----\")",
"\r",
"myLogger",
".",
"WriteLog",
"(",
"\" ---------------------------------------------------------------\"",
")",
"myLogger",
".",
"WriteLog",
"(",
"\"\"",
")",
"return"
] | [
230,
0
] | [
262,
10
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
get_datasource_run_info | (datasource_id) | Gibt den Namen einer Datenquelle und den Namen der zugehörigen JSON-Datei zurück.
:param datasource_id: id des Jobs
:type datasource_id: int
| Gibt den Namen einer Datenquelle und den Namen der zugehörigen JSON-Datei zurück. | def get_datasource_run_info(datasource_id):
"""Gibt den Namen einer Datenquelle und den Namen der zugehörigen JSON-Datei zurück.
:param datasource_id: id des Jobs
:type datasource_id: int
"""
with db.open_con() as con:
res = con.execute("SELECT datasource_name FROM datasource WHERE datasource_id=?",
[datasource_id]).fetchall()
infoprovider_name = con.execute("SELECT infoprovider_name FROM infoprovider INNER JOIN datasource "
"USING (infoprovider_id) WHERE datasource_id=?",
[datasource_id]).fetchone()["infoprovider_name"]
datasource_name = infoprovider_name.replace(" ", "-") + "_" + res[0]["datasource_name"].replace(" ", "-")
return datasource_name, datasource_name, {} | [
"def",
"get_datasource_run_info",
"(",
"datasource_id",
")",
":",
"with",
"db",
".",
"open_con",
"(",
")",
"as",
"con",
":",
"res",
"=",
"con",
".",
"execute",
"(",
"\"SELECT datasource_name FROM datasource WHERE datasource_id=?\"",
",",
"[",
"datasource_id",
"]",
")",
".",
"fetchall",
"(",
")",
"infoprovider_name",
"=",
"con",
".",
"execute",
"(",
"\"SELECT infoprovider_name FROM infoprovider INNER JOIN datasource \"",
"\"USING (infoprovider_id) WHERE datasource_id=?\"",
",",
"[",
"datasource_id",
"]",
")",
".",
"fetchone",
"(",
")",
"[",
"\"infoprovider_name\"",
"]",
"datasource_name",
"=",
"infoprovider_name",
".",
"replace",
"(",
"\" \"",
",",
"\"-\"",
")",
"+",
"\"_\"",
"+",
"res",
"[",
"0",
"]",
"[",
"\"datasource_name\"",
"]",
".",
"replace",
"(",
"\" \"",
",",
"\"-\"",
")",
"return",
"datasource_name",
",",
"datasource_name",
",",
"{",
"}"
] | [
104,
0
] | [
119,
51
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
nachrichten_handler | (nachricht, bot, users) | Handling der vorliegenden Nachricht | Handling der vorliegenden Nachricht | def nachrichten_handler(nachricht, bot, users):
"""Handling der vorliegenden Nachricht"""
telegram_id = nachricht["message"]["from"]["id"]
if telegram_id not in users.keys():
bot.send_message(telegram_id, "Permission denied")
return
if "message" in nachricht:
# Prüfen ob es sich um ein Botkommando handelt
if "bot_command" in nachricht["message"].get("entities", [{}])[0].get("type", ""):
bot_command(nachricht, bot, users, telegram_id)
elif users[telegram_id].menue is not None:
users[telegram_id].menue(nachricht, bot, users, telegram_id) | [
"def",
"nachrichten_handler",
"(",
"nachricht",
",",
"bot",
",",
"users",
")",
":",
"telegram_id",
"=",
"nachricht",
"[",
"\"message\"",
"]",
"[",
"\"from\"",
"]",
"[",
"\"id\"",
"]",
"if",
"telegram_id",
"not",
"in",
"users",
".",
"keys",
"(",
")",
":",
"bot",
".",
"send_message",
"(",
"telegram_id",
",",
"\"Permission denied\"",
")",
"return",
"if",
"\"message\"",
"in",
"nachricht",
":",
"# Prüfen ob es sich um ein Botkommando handelt",
"if",
"\"bot_command\"",
"in",
"nachricht",
"[",
"\"message\"",
"]",
".",
"get",
"(",
"\"entities\"",
",",
"[",
"{",
"}",
"]",
")",
"[",
"0",
"]",
".",
"get",
"(",
"\"type\"",
",",
"\"\"",
")",
":",
"bot_command",
"(",
"nachricht",
",",
"bot",
",",
"users",
",",
"telegram_id",
")",
"elif",
"users",
"[",
"telegram_id",
"]",
".",
"menue",
"is",
"not",
"None",
":",
"users",
"[",
"telegram_id",
"]",
".",
"menue",
"(",
"nachricht",
",",
"bot",
",",
"users",
",",
"telegram_id",
")"
] | [
154,
0
] | [
165,
72
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
DateiZugriff.schreiben_alle | (self, daten) | return | Komplettes zurückschreiben aller Daten einer Austauschdatei | Komplettes zurückschreiben aller Daten einer Austauschdatei | def schreiben_alle(self, daten):
""" Komplettes zurückschreiben aller Daten einer Austauschdatei """
with open(self.dateiname, "w") as f:
for wert in daten:
zeile = str(wert)+"\n"
zeile = zeile.replace(".",",") # deutsches Komma in Austauschdatei
f.write(zeile)
return | [
"def",
"schreiben_alle",
"(",
"self",
",",
"daten",
")",
":",
"with",
"open",
"(",
"self",
".",
"dateiname",
",",
"\"w\"",
")",
"as",
"f",
":",
"for",
"wert",
"in",
"daten",
":",
"zeile",
"=",
"str",
"(",
"wert",
")",
"+",
"\"\\n\"",
"zeile",
"=",
"zeile",
".",
"replace",
"(",
"\".\"",
",",
"\",\"",
")",
"# deutsches Komma in Austauschdatei",
"f",
".",
"write",
"(",
"zeile",
")",
"return"
] | [
30,
4
] | [
37,
14
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
qa_field.find4Qdata | ( self, field=None ) | return {
'filename': self.infos["filename"],
'Kennung': self.infos["Kennung"],
'type': self.infos['testTags'],
'unit': self.infos['unit'],
'energy': self.infos['energy'],
'gantry' : self.infos['gantry'],
'collimator': self.infos['collimator'],
'field' : field,
'result' : result
} | Die transmissions eines 4 Quadranten Feldes im angegebenem Bereich ermitteln
Reihenfolge in result 'Q2Q1','Q2Q3','Q3Q4','Q1Q4'
[start:stop:step, start:stop:step ]
roi = np.array([
[11, 12, 13, 14, 15],
[21, 22, 23, 24, 25],
[31, 32, 33, 34, 35],
[41, 42, 43, 44, 45],
[51, 52, 53, 54, 55]])
print( roi[ : , 0:1 ] )
[[11] [21] [31] [41] [51]] - ( 1 Line2D gezeichnet LU-RO)
print( roi[ 0 ] )
[11 12 13 14 15] - ( 1 Line2D gezeichnet LU-RO)
print( roi[ 0:1, ] )
[[11 12 13 14 15]] - ( 5 Line2D nicht gezeichnet)
print( roi[ 0:1, ][0] )
[11 12 13 14 15] - ( 1 Line2D gezeichnet LU-RO)
print( roi[ :, -1: ] )
[[15] [25] [35] [45] [55]] - ( 1 Line2D gezeichnet LU-RO)
print( roi[ -1 ] )
[51 52 53 54 55] - ( 1 Line2D gezeichnet LU-RO)
print( roi[ -1:, : ][0] )
[51 52 53 54 55] - ( 1 Line2D gezeichnet LU-RO)
# richtungsumkehr
print( roi[ ::-1, -1: ] )
[[55] [45] [35] [25] [15]] - ( 1 Line2D gezeichnet LO-RU)
| Die transmissions eines 4 Quadranten Feldes im angegebenem Bereich ermitteln | def find4Qdata( self, field=None ):
""" Die transmissions eines 4 Quadranten Feldes im angegebenem Bereich ermitteln
Reihenfolge in result 'Q2Q1','Q2Q3','Q3Q4','Q1Q4'
[start:stop:step, start:stop:step ]
roi = np.array([
[11, 12, 13, 14, 15],
[21, 22, 23, 24, 25],
[31, 32, 33, 34, 35],
[41, 42, 43, 44, 45],
[51, 52, 53, 54, 55]])
print( roi[ : , 0:1 ] )
[[11] [21] [31] [41] [51]] - ( 1 Line2D gezeichnet LU-RO)
print( roi[ 0 ] )
[11 12 13 14 15] - ( 1 Line2D gezeichnet LU-RO)
print( roi[ 0:1, ] )
[[11 12 13 14 15]] - ( 5 Line2D nicht gezeichnet)
print( roi[ 0:1, ][0] )
[11 12 13 14 15] - ( 1 Line2D gezeichnet LU-RO)
print( roi[ :, -1: ] )
[[15] [25] [35] [45] [55]] - ( 1 Line2D gezeichnet LU-RO)
print( roi[ -1 ] )
[51 52 53 54 55] - ( 1 Line2D gezeichnet LU-RO)
print( roi[ -1:, : ][0] )
[51 52 53 54 55] - ( 1 Line2D gezeichnet LU-RO)
# richtungsumkehr
print( roi[ ::-1, -1: ] )
[[55] [45] [35] [25] [15]] - ( 1 Line2D gezeichnet LO-RU)
"""
if not field:
field = { "X1":-50, "X2": 50, "Y1": -50, "Y2": 50 }
roi = self.image.getRoi( field ).copy()
result = {}
result['Q2Q1'] = {
'name' : 'Q2 - Q1',
'profile' : MultiProfile( roi[:, 0:1] ),
'field' : field
}
result['Q2Q3'] = {
'name' : 'Q2 - Q3',
'profile' : MultiProfile( roi[ 0:1, ][0] ),
'field' : field
}
result['Q3Q4'] = {
'name' : 'Q3 - Q4',
'profile' : MultiProfile( roi[ :, -1: ] ),
'field' : field
}
result['Q1Q4'] = {
'name' : 'Q1 - Q4',
'profile' : MultiProfile( roi[ -1:, : ][0] ),
'field' : field
}
#print( result )
for k in result:
#print(k)
p_min = np.min( result[k]["profile"] )
p_max = np.max( result[k]["profile"] )
result[k]["min"] = p_min
result[k]["max"] = p_max
result[k]["value"] = (lambda x: p_min if x < 0.9 else p_max )(p_min)
return {
'filename': self.infos["filename"],
'Kennung': self.infos["Kennung"],
'type': self.infos['testTags'],
'unit': self.infos['unit'],
'energy': self.infos['energy'],
'gantry' : self.infos['gantry'],
'collimator': self.infos['collimator'],
'field' : field,
'result' : result
} | [
"def",
"find4Qdata",
"(",
"self",
",",
"field",
"=",
"None",
")",
":",
"if",
"not",
"field",
":",
"field",
"=",
"{",
"\"X1\"",
":",
"-",
"50",
",",
"\"X2\"",
":",
"50",
",",
"\"Y1\"",
":",
"-",
"50",
",",
"\"Y2\"",
":",
"50",
"}",
"roi",
"=",
"self",
".",
"image",
".",
"getRoi",
"(",
"field",
")",
".",
"copy",
"(",
")",
"result",
"=",
"{",
"}",
"result",
"[",
"'Q2Q1'",
"]",
"=",
"{",
"'name'",
":",
"'Q2 - Q1'",
",",
"'profile'",
":",
"MultiProfile",
"(",
"roi",
"[",
":",
",",
"0",
":",
"1",
"]",
")",
",",
"'field'",
":",
"field",
"}",
"result",
"[",
"'Q2Q3'",
"]",
"=",
"{",
"'name'",
":",
"'Q2 - Q3'",
",",
"'profile'",
":",
"MultiProfile",
"(",
"roi",
"[",
"0",
":",
"1",
",",
"]",
"[",
"0",
"]",
")",
",",
"'field'",
":",
"field",
"}",
"result",
"[",
"'Q3Q4'",
"]",
"=",
"{",
"'name'",
":",
"'Q3 - Q4'",
",",
"'profile'",
":",
"MultiProfile",
"(",
"roi",
"[",
":",
",",
"-",
"1",
":",
"]",
")",
",",
"'field'",
":",
"field",
"}",
"result",
"[",
"'Q1Q4'",
"]",
"=",
"{",
"'name'",
":",
"'Q1 - Q4'",
",",
"'profile'",
":",
"MultiProfile",
"(",
"roi",
"[",
"-",
"1",
":",
",",
":",
"]",
"[",
"0",
"]",
")",
",",
"'field'",
":",
"field",
"}",
"#print( result )",
"for",
"k",
"in",
"result",
":",
"#print(k)",
"p_min",
"=",
"np",
".",
"min",
"(",
"result",
"[",
"k",
"]",
"[",
"\"profile\"",
"]",
")",
"p_max",
"=",
"np",
".",
"max",
"(",
"result",
"[",
"k",
"]",
"[",
"\"profile\"",
"]",
")",
"result",
"[",
"k",
"]",
"[",
"\"min\"",
"]",
"=",
"p_min",
"result",
"[",
"k",
"]",
"[",
"\"max\"",
"]",
"=",
"p_max",
"result",
"[",
"k",
"]",
"[",
"\"value\"",
"]",
"=",
"(",
"lambda",
"x",
":",
"p_min",
"if",
"x",
"<",
"0.9",
"else",
"p_max",
")",
"(",
"p_min",
")",
"return",
"{",
"'filename'",
":",
"self",
".",
"infos",
"[",
"\"filename\"",
"]",
",",
"'Kennung'",
":",
"self",
".",
"infos",
"[",
"\"Kennung\"",
"]",
",",
"'type'",
":",
"self",
".",
"infos",
"[",
"'testTags'",
"]",
",",
"'unit'",
":",
"self",
".",
"infos",
"[",
"'unit'",
"]",
",",
"'energy'",
":",
"self",
".",
"infos",
"[",
"'energy'",
"]",
",",
"'gantry'",
":",
"self",
".",
"infos",
"[",
"'gantry'",
"]",
",",
"'collimator'",
":",
"self",
".",
"infos",
"[",
"'collimator'",
"]",
",",
"'field'",
":",
"field",
",",
"'result'",
":",
"result",
"}"
] | [
261,
4
] | [
345,
9
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
test_himmelblau_bound_fail | (outbound) | Test himmelblau bound exception | Test himmelblau bound exception | def test_himmelblau_bound_fail(outbound):
"""Test himmelblau bound exception"""
with pytest.raises(ValueError):
x = outbound(b["himmelblau"].low, b["himmelblau"].high, size=(3, 2))
fx.himmelblau(x) | [
"def",
"test_himmelblau_bound_fail",
"(",
"outbound",
")",
":",
"with",
"pytest",
".",
"raises",
"(",
"ValueError",
")",
":",
"x",
"=",
"outbound",
"(",
"b",
"[",
"\"himmelblau\"",
"]",
".",
"low",
",",
"b",
"[",
"\"himmelblau\"",
"]",
".",
"high",
",",
"size",
"=",
"(",
"3",
",",
"2",
")",
")",
"fx",
".",
"himmelblau",
"(",
"x",
")"
] | [
100,
0
] | [
104,
24
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
prozent_conv | (s) | return float_conv(s[:-1]) | Umwandlung eines Prozentwerts
:param s: Prozentwert mit Dezimalkomma und "%"
:type s: string
:raises ValueError: bei Fehler
:return: Prozentwert (0.0-100.0)
:rtype: float
| Umwandlung eines Prozentwerts | def prozent_conv(s):
""" Umwandlung eines Prozentwerts
:param s: Prozentwert mit Dezimalkomma und "%"
:type s: string
:raises ValueError: bei Fehler
:return: Prozentwert (0.0-100.0)
:rtype: float
"""
if s is None:
return None
s = s.strip()
if not s.endswith("%"):
raise ValueError("Wert endet nicht mit %")
return float_conv(s[:-1]) | [
"def",
"prozent_conv",
"(",
"s",
")",
":",
"if",
"s",
"is",
"None",
":",
"return",
"None",
"s",
"=",
"s",
".",
"strip",
"(",
")",
"if",
"not",
"s",
".",
"endswith",
"(",
"\"%\"",
")",
":",
"raise",
"ValueError",
"(",
"\"Wert endet nicht mit %\"",
")",
"return",
"float_conv",
"(",
"s",
"[",
":",
"-",
"1",
"]",
")"
] | [
57,
0
] | [
74,
29
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
handle_get | (_daten) | gib die gesamte gespeicherte Konfiguration zurueck | gib die gesamte gespeicherte Konfiguration zurueck | def handle_get(_daten):
"""gib die gesamte gespeicherte Konfiguration zurueck"""
emit("get_all", konfiguration) | [
"def",
"handle_get",
"(",
"_daten",
")",
":",
"emit",
"(",
"\"get_all\"",
",",
"konfiguration",
")"
] | [
109,
0
] | [
111,
34
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
ispSAFRS._int_groupby_query | (cls, query, params:dict={} ) | return _result | Führt eine group query aus und gibt das Abfrage Ergebnis zurück.
Wird delimiter angegeben, wird nur das erste group Feld mit delimiter gesplittet zurückgegeben
Parameters
----------
query : obj
Das bisherige query Object
params : dict, optional
The default is::
{
"groups": {},
"fields": { "<tablename>":[ <fieldname1...fieldnameX>] },
"labels": {"<tablename>.<fieldname1>":"<label1>", ... },
"filter": "",
"delimiter": ""
}
Returns
-------
result : dict
data
| Führt eine group query aus und gibt das Abfrage Ergebnis zurück. | def _int_groupby_query(cls, query, params:dict={} ):
"""Führt eine group query aus und gibt das Abfrage Ergebnis zurück.
Wird delimiter angegeben, wird nur das erste group Feld mit delimiter gesplittet zurückgegeben
Parameters
----------
query : obj
Das bisherige query Object
params : dict, optional
The default is::
{
"groups": {},
"fields": { "<tablename>":[ <fieldname1...fieldnameX>] },
"labels": {"<tablename>.<fieldname1>":"<label1>", ... },
"filter": "",
"delimiter": ""
}
Returns
-------
result : dict
data
"""
_params = {
"groups": {},
"fields": {},
"labels": {},
"filter": "",
"delimiter": ""
}
_params.update( params )
query, group_entities, ok = cls._int_groupby( query, _params )
if ok == False: # pragma: no cover
_result = {
'errors' : [ "Fehler in _int_group" ]
}
return _result
# zusätzlich noch die Anzahl mitgeben
query = query.add_columns( func.count( cls.id ).label('hasChildren') )
# filter berücksichtigen
if not _params["filter"] == "":
query = cls._int_filter( query, _params["filter"] )
# die query durchführen
_result = cls._int_query( query )
# wenn angegeben nach delimter splitten
if _params["delimiter"] and not _params["delimiter"] == "":
words = {}
if len(group_entities) > 0:
# das erste feld bestimmen
s_field = str( group_entities[0] )
field = s_field.split(".")
for item in _result["data"]:
if field[0] == item["type"] and field[1] in item["attributes"]:
# den feldinhalt spliten und anfügen
val = item["attributes"][ field[1] ]
if type(val) is str:
p = val.split( _params[ "delimiter" ])
for s in p:
words[ s.strip() ] = s.strip()
data = []
for d in sorted( words.keys() ):
if not d == None and not d=="" and not d=="None":
data.append( { "attributes":{ field[1]:d } } )
_result["data"] = data
_result["count"] = len( data )
return _result | [
"def",
"_int_groupby_query",
"(",
"cls",
",",
"query",
",",
"params",
":",
"dict",
"=",
"{",
"}",
")",
":",
"_params",
"=",
"{",
"\"groups\"",
":",
"{",
"}",
",",
"\"fields\"",
":",
"{",
"}",
",",
"\"labels\"",
":",
"{",
"}",
",",
"\"filter\"",
":",
"\"\"",
",",
"\"delimiter\"",
":",
"\"\"",
"}",
"_params",
".",
"update",
"(",
"params",
")",
"query",
",",
"group_entities",
",",
"ok",
"=",
"cls",
".",
"_int_groupby",
"(",
"query",
",",
"_params",
")",
"if",
"ok",
"==",
"False",
":",
"# pragma: no cover",
"_result",
"=",
"{",
"'errors'",
":",
"[",
"\"Fehler in _int_group\"",
"]",
"}",
"return",
"_result",
"# zusätzlich noch die Anzahl mitgeben",
"query",
"=",
"query",
".",
"add_columns",
"(",
"func",
".",
"count",
"(",
"cls",
".",
"id",
")",
".",
"label",
"(",
"'hasChildren'",
")",
")",
"# filter berücksichtigen",
"if",
"not",
"_params",
"[",
"\"filter\"",
"]",
"==",
"\"\"",
":",
"query",
"=",
"cls",
".",
"_int_filter",
"(",
"query",
",",
"_params",
"[",
"\"filter\"",
"]",
")",
"# die query durchführen",
"_result",
"=",
"cls",
".",
"_int_query",
"(",
"query",
")",
"# wenn angegeben nach delimter splitten",
"if",
"_params",
"[",
"\"delimiter\"",
"]",
"and",
"not",
"_params",
"[",
"\"delimiter\"",
"]",
"==",
"\"\"",
":",
"words",
"=",
"{",
"}",
"if",
"len",
"(",
"group_entities",
")",
">",
"0",
":",
"# das erste feld bestimmen",
"s_field",
"=",
"str",
"(",
"group_entities",
"[",
"0",
"]",
")",
"field",
"=",
"s_field",
".",
"split",
"(",
"\".\"",
")",
"for",
"item",
"in",
"_result",
"[",
"\"data\"",
"]",
":",
"if",
"field",
"[",
"0",
"]",
"==",
"item",
"[",
"\"type\"",
"]",
"and",
"field",
"[",
"1",
"]",
"in",
"item",
"[",
"\"attributes\"",
"]",
":",
"# den feldinhalt spliten und anfügen",
"val",
"=",
"item",
"[",
"\"attributes\"",
"]",
"[",
"field",
"[",
"1",
"]",
"]",
"if",
"type",
"(",
"val",
")",
"is",
"str",
":",
"p",
"=",
"val",
".",
"split",
"(",
"_params",
"[",
"\"delimiter\"",
"]",
")",
"for",
"s",
"in",
"p",
":",
"words",
"[",
"s",
".",
"strip",
"(",
")",
"]",
"=",
"s",
".",
"strip",
"(",
")",
"data",
"=",
"[",
"]",
"for",
"d",
"in",
"sorted",
"(",
"words",
".",
"keys",
"(",
")",
")",
":",
"if",
"not",
"d",
"==",
"None",
"and",
"not",
"d",
"==",
"\"\"",
"and",
"not",
"d",
"==",
"\"None\"",
":",
"data",
".",
"append",
"(",
"{",
"\"attributes\"",
":",
"{",
"field",
"[",
"1",
"]",
":",
"d",
"}",
"}",
")",
"_result",
"[",
"\"data\"",
"]",
"=",
"data",
"_result",
"[",
"\"count\"",
"]",
"=",
"len",
"(",
"data",
")",
"return",
"_result"
] | [
1197,
4
] | [
1272,
22
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
register_type_func | (types: dict, error: Type[StepError], func) | return type_func | Registriert die übergebene Funktion
und versieht sie mit einem try-except-Block.
:param types: Dictionary, in dem der Typ registriert werden soll.
:param error: Fehler, der geworfen werden soll.
:param func: Zu registrierende Funktion.
:return: Funktion mit try-catch-Block.
| Registriert die übergebene Funktion
und versieht sie mit einem try-except-Block. | def register_type_func(types: dict, error: Type[StepError], func):
""" Registriert die übergebene Funktion
und versieht sie mit einem try-except-Block.
:param types: Dictionary, in dem der Typ registriert werden soll.
:param error: Fehler, der geworfen werden soll.
:param func: Zu registrierende Funktion.
:return: Funktion mit try-catch-Block.
"""
func = raise_step_error(error)(func)
@functools.wraps(func)
def type_func(values: dict, data: StepData, *args, **kwargs):
# replace presets
if "preset" in values:
# TODO (Max) may give values a higher prio
merge_dict(values, data.get_preset(values["preset"]))
return func(values, data, *args, **kwargs)
types[func.__name__] = type_func
return type_func | [
"def",
"register_type_func",
"(",
"types",
":",
"dict",
",",
"error",
":",
"Type",
"[",
"StepError",
"]",
",",
"func",
")",
":",
"func",
"=",
"raise_step_error",
"(",
"error",
")",
"(",
"func",
")",
"@",
"functools",
".",
"wraps",
"(",
"func",
")",
"def",
"type_func",
"(",
"values",
":",
"dict",
",",
"data",
":",
"StepData",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# replace presets",
"if",
"\"preset\"",
"in",
"values",
":",
"# TODO (Max) may give values a higher prio",
"merge_dict",
"(",
"values",
",",
"data",
".",
"get_preset",
"(",
"values",
"[",
"\"preset\"",
"]",
")",
")",
"return",
"func",
"(",
"values",
",",
"data",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"types",
"[",
"func",
".",
"__name__",
"]",
"=",
"type_func",
"return",
"type_func"
] | [
8,
0
] | [
29,
20
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
ispConfig.__init__ | ( self, lastOverlay:int=None, development:bool=True,
rootlevel:int=logging.ERROR,
mqttlevel:int=logging.NOTSET,
cleanup:bool=False,
config:dict=None
) | Konfiguration initialisieren und laden.
Zuerst wird die Konfiguration config.json eingelesen
und anschließend sortiert von allen passenden config-*.json Dateien überlagert
Parameters
----------
lastOverlay : int
Gibt an bis zu welcher config Datei eingelesen wird.Default = 99999999 (config-99999999.json).
development : bool
Entwicklungszweig verwenden oder nicht. Default is True.
Wird die App in einem Unterverzeichnis mit dem Namen production/ oder development/ abgelegt,
so wird development autom. je nach Name gesetzt.
rootlevel: int - logging.ERROR
NOTSET=0, DEBUG=10, INFO=20, WARN=30, ERROR=40, and CRITICAL=50. Default: ERROR
mqttlevel: int - logging.NOTSET
NOTSET=0, DEBUG=10, INFO=20, WARN=30, ERROR=40, and CRITICAL=50. Default: NOTSET
cleanup: bool
MQTT Cleanup vor dem initialisieren durchführen. Default = False
config: dict
mit dieser Angabe wird keine Konfiguration geladen, sondern die angegebenen Daten verwendet
| Konfiguration initialisieren und laden. | def __init__( self, lastOverlay:int=None, development:bool=True,
rootlevel:int=logging.ERROR,
mqttlevel:int=logging.NOTSET,
cleanup:bool=False,
config:dict=None
):
"""Konfiguration initialisieren und laden.
Zuerst wird die Konfiguration config.json eingelesen
und anschließend sortiert von allen passenden config-*.json Dateien überlagert
Parameters
----------
lastOverlay : int
Gibt an bis zu welcher config Datei eingelesen wird.Default = 99999999 (config-99999999.json).
development : bool
Entwicklungszweig verwenden oder nicht. Default is True.
Wird die App in einem Unterverzeichnis mit dem Namen production/ oder development/ abgelegt,
so wird development autom. je nach Name gesetzt.
rootlevel: int - logging.ERROR
NOTSET=0, DEBUG=10, INFO=20, WARN=30, ERROR=40, and CRITICAL=50. Default: ERROR
mqttlevel: int - logging.NOTSET
NOTSET=0, DEBUG=10, INFO=20, WARN=30, ERROR=40, and CRITICAL=50. Default: NOTSET
cleanup: bool
MQTT Cleanup vor dem initialisieren durchführen. Default = False
config: dict
mit dieser Angabe wird keine Konfiguration geladen, sondern die angegebenen Daten verwendet
"""
# _basedir festlegen mit __file__ damit ein testaufruf von hier funktioniert
self._basedir = osp.abspath( osp.join( osp.dirname( osp.abspath( __file__ ) ) , "../" ) )
# name des startenden programms
self._name = osp.basename( sys.argv[0] )
# test auf Entwicklungsumgebung
self._development = development
if self._basedir.find( '/production/' ) > -1: # pragma: no cover
self._development = False
elif self._basedir.find( '/development/' ) > -1:
self._development = True
# lastOverlay auf das aktuelle Datum
if lastOverlay == None:
# ohne lastOverlay zuerst den Zahlenwert für das aktuelle Datum
lastOverlay = datetime.now().strftime("%Y%m%d")
# listet die Dateien auf bei denen es zu einem Fehler beim einlesen kam
self._loadErrors = []
# default werte setzen
self._config = DotMap( default_config )
self._configs = ["default"]
if config:
# config in self._config merken
self.update( config )
self._configs.append( "init" )
else:
# Konfiguration einlesen und in self._config merken
self._configLoad( int(lastOverlay) )
self._lastOverlay = lastOverlay
# die Konfiguration um BASE_DIR erweitern
self._config[ "BASE_DIR" ] = self._basedir
# default logger
self.rootInitLogger( rootlevel )
# logger für mqtt zugriff über self._mqtthdlr
self._mqtthdlr = None
# mqtt Logger bereitstellen oder initialisieren
self.mqttInitLogger( mqttlevel, cleanup )
# variables vorbelegen
self.setVariables()
# Jinja Environment bereitstellen
self._env = self.jinjaEnv() | [
"def",
"__init__",
"(",
"self",
",",
"lastOverlay",
":",
"int",
"=",
"None",
",",
"development",
":",
"bool",
"=",
"True",
",",
"rootlevel",
":",
"int",
"=",
"logging",
".",
"ERROR",
",",
"mqttlevel",
":",
"int",
"=",
"logging",
".",
"NOTSET",
",",
"cleanup",
":",
"bool",
"=",
"False",
",",
"config",
":",
"dict",
"=",
"None",
")",
":",
"# _basedir festlegen mit __file__ damit ein testaufruf von hier funktioniert",
"self",
".",
"_basedir",
"=",
"osp",
".",
"abspath",
"(",
"osp",
".",
"join",
"(",
"osp",
".",
"dirname",
"(",
"osp",
".",
"abspath",
"(",
"__file__",
")",
")",
",",
"\"../\"",
")",
")",
"# name des startenden programms",
"self",
".",
"_name",
"=",
"osp",
".",
"basename",
"(",
"sys",
".",
"argv",
"[",
"0",
"]",
")",
"# test auf Entwicklungsumgebung",
"self",
".",
"_development",
"=",
"development",
"if",
"self",
".",
"_basedir",
".",
"find",
"(",
"'/production/'",
")",
">",
"-",
"1",
":",
"# pragma: no cover",
"self",
".",
"_development",
"=",
"False",
"elif",
"self",
".",
"_basedir",
".",
"find",
"(",
"'/development/'",
")",
">",
"-",
"1",
":",
"self",
".",
"_development",
"=",
"True",
"# lastOverlay auf das aktuelle Datum",
"if",
"lastOverlay",
"==",
"None",
":",
"# ohne lastOverlay zuerst den Zahlenwert für das aktuelle Datum",
"lastOverlay",
"=",
"datetime",
".",
"now",
"(",
")",
".",
"strftime",
"(",
"\"%Y%m%d\"",
")",
"# listet die Dateien auf bei denen es zu einem Fehler beim einlesen kam",
"self",
".",
"_loadErrors",
"=",
"[",
"]",
"# default werte setzen",
"self",
".",
"_config",
"=",
"DotMap",
"(",
"default_config",
")",
"self",
".",
"_configs",
"=",
"[",
"\"default\"",
"]",
"if",
"config",
":",
"# config in self._config merken",
"self",
".",
"update",
"(",
"config",
")",
"self",
".",
"_configs",
".",
"append",
"(",
"\"init\"",
")",
"else",
":",
"# Konfiguration einlesen und in self._config merken",
"self",
".",
"_configLoad",
"(",
"int",
"(",
"lastOverlay",
")",
")",
"self",
".",
"_lastOverlay",
"=",
"lastOverlay",
"# die Konfiguration um BASE_DIR erweitern",
"self",
".",
"_config",
"[",
"\"BASE_DIR\"",
"]",
"=",
"self",
".",
"_basedir",
"# default logger",
"self",
".",
"rootInitLogger",
"(",
"rootlevel",
")",
"# logger für mqtt zugriff über self._mqtthdlr",
"self",
".",
"_mqtthdlr",
"=",
"None",
"# mqtt Logger bereitstellen oder initialisieren",
"self",
".",
"mqttInitLogger",
"(",
"mqttlevel",
",",
"cleanup",
")",
"# variables vorbelegen",
"self",
".",
"setVariables",
"(",
")",
"# Jinja Environment bereitstellen",
"self",
".",
"_env",
"=",
"self",
".",
"jinjaEnv",
"(",
")"
] | [
143,
4
] | [
227,
35
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
Playlists.copy_playlists | (self) | Versuche die Beispiel-Radio-Playlisten ins
mpd Playlisten Verzeichnis zu kopieren | Versuche die Beispiel-Radio-Playlisten ins
mpd Playlisten Verzeichnis zu kopieren | def copy_playlists(self):
""" Versuche die Beispiel-Radio-Playlisten ins
mpd Playlisten Verzeichnis zu kopieren """
print("copying some playlists to " + self.pl_dir)
try:
if os.path.isdir(self.pl_dir) and os.path.isdir(self.pl_src):
for pl_file in os.listdir(self.pl_src):
if pl_file.endswith(".m3u"):
shutil.copy2(os.path.join(self.pl_src, pl_file), self.pl_dir)
else:
print("missing " + self.pl_dir + " or " + self.pl_src)
if os.path.isdir(self.pl_dir):
MPC.update(os.path.basename(self.pl_dir))
except:
exctype, value = sys.exc_info()[:2]
print(str(exctype) + ': ' + str(value))
print("failed to copy playlists!") | [
"def",
"copy_playlists",
"(",
"self",
")",
":",
"print",
"(",
"\"copying some playlists to \"",
"+",
"self",
".",
"pl_dir",
")",
"try",
":",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"self",
".",
"pl_dir",
")",
"and",
"os",
".",
"path",
".",
"isdir",
"(",
"self",
".",
"pl_src",
")",
":",
"for",
"pl_file",
"in",
"os",
".",
"listdir",
"(",
"self",
".",
"pl_src",
")",
":",
"if",
"pl_file",
".",
"endswith",
"(",
"\".m3u\"",
")",
":",
"shutil",
".",
"copy2",
"(",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"pl_src",
",",
"pl_file",
")",
",",
"self",
".",
"pl_dir",
")",
"else",
":",
"print",
"(",
"\"missing \"",
"+",
"self",
".",
"pl_dir",
"+",
"\" or \"",
"+",
"self",
".",
"pl_src",
")",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"self",
".",
"pl_dir",
")",
":",
"MPC",
".",
"update",
"(",
"os",
".",
"path",
".",
"basename",
"(",
"self",
".",
"pl_dir",
")",
")",
"except",
":",
"exctype",
",",
"value",
"=",
"sys",
".",
"exc_info",
"(",
")",
"[",
":",
"2",
"]",
"print",
"(",
"str",
"(",
"exctype",
")",
"+",
"': '",
"+",
"str",
"(",
"value",
")",
")",
"print",
"(",
"\"failed to copy playlists!\"",
")"
] | [
650,
4
] | [
666,
46
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
sub_lists | (values: dict, data: StepData) | Extrahiert aus einem Array (Liste) kleinere Arrays (Listen).
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
:return:
| Extrahiert aus einem Array (Liste) kleinere Arrays (Listen). | def sub_lists(values: dict, data: StepData):
"""Extrahiert aus einem Array (Liste) kleinere Arrays (Listen).
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
:return:
"""
value = data.get_data(values["array_key"], values)
for sub_list in values["sub_lists"]:
start = data.get_data(sub_list.get("range_start", 0), values, numbers.Number)
end = data.get_data(sub_list.get("range_end", -1), values, numbers.Number)
new_key = get_new_key(sub_list)
new_value = value[start:end]
data.insert_data(new_key, new_value, values) | [
"def",
"sub_lists",
"(",
"values",
":",
"dict",
",",
"data",
":",
"StepData",
")",
":",
"value",
"=",
"data",
".",
"get_data",
"(",
"values",
"[",
"\"array_key\"",
"]",
",",
"values",
")",
"for",
"sub_list",
"in",
"values",
"[",
"\"sub_lists\"",
"]",
":",
"start",
"=",
"data",
".",
"get_data",
"(",
"sub_list",
".",
"get",
"(",
"\"range_start\"",
",",
"0",
")",
",",
"values",
",",
"numbers",
".",
"Number",
")",
"end",
"=",
"data",
".",
"get_data",
"(",
"sub_list",
".",
"get",
"(",
"\"range_end\"",
",",
"-",
"1",
")",
",",
"values",
",",
"numbers",
".",
"Number",
")",
"new_key",
"=",
"get_new_key",
"(",
"sub_list",
")",
"new_value",
"=",
"value",
"[",
"start",
":",
"end",
"]",
"data",
".",
"insert_data",
"(",
"new_key",
",",
"new_value",
",",
"values",
")"
] | [
565,
0
] | [
581,
52
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
HeistSystem.WriteMessage_GameResult | (self, percentage) | return | Schreibt die Endnachricht in den Chat
| Schreibt die Endnachricht in den Chat
| def WriteMessage_GameResult(self, percentage):
''' Schreibt die Endnachricht in den Chat
'''
thisActionName = "WriteMessage_GameResult"
# Nachricht nach der übergebenen Prozentzahlen auslesen
if (percentage == 0):
# Benachrichtigung aus der Datenbank auslesen
messageText = self.RandomMessage_ByType(
messageType=self.MessageType_Outcome_00Percent
)
elif (percentage >= 1) and (percentage <= 24):
# Benachrichtigung aus der Datenbank auslesen
messageText = self.RandomMessage_ByType(
messageType=self.MessageType_Outcome_01_24Percent
)
elif (percentage >= 25) and (percentage <= 74):
# Benachrichtigung aus der Datenbank auslesen
messageText = self.RandomMessage_ByType(
messageType=self.MessageType_Outcome_25_74Percent
)
elif (percentage >= 75) and (percentage <= 99):
# Benachrichtigung aus der Datenbank auslesen
messageText = self.RandomMessage_ByType(
messageType=self.MessageType_Outcome_75_99Percent
)
else:
# Benachrichtigung aus der Datenbank auslesen
messageText = self.RandomMessage_ByType(
messageType=self.MessageType_Outcome_100Percent
)
# Nachricht in den Chat schreiben
self.chat_WriteTextMessage(
messageText=str(messageText).format(
target=self.GameTargetName)
)
return | [
"def",
"WriteMessage_GameResult",
"(",
"self",
",",
"percentage",
")",
":",
"thisActionName",
"=",
"\"WriteMessage_GameResult\"",
"# Nachricht nach der übergebenen Prozentzahlen auslesen\r",
"if",
"(",
"percentage",
"==",
"0",
")",
":",
"# Benachrichtigung aus der Datenbank auslesen\r",
"messageText",
"=",
"self",
".",
"RandomMessage_ByType",
"(",
"messageType",
"=",
"self",
".",
"MessageType_Outcome_00Percent",
")",
"elif",
"(",
"percentage",
">=",
"1",
")",
"and",
"(",
"percentage",
"<=",
"24",
")",
":",
"# Benachrichtigung aus der Datenbank auslesen\r",
"messageText",
"=",
"self",
".",
"RandomMessage_ByType",
"(",
"messageType",
"=",
"self",
".",
"MessageType_Outcome_01_24Percent",
")",
"elif",
"(",
"percentage",
">=",
"25",
")",
"and",
"(",
"percentage",
"<=",
"74",
")",
":",
"# Benachrichtigung aus der Datenbank auslesen\r",
"messageText",
"=",
"self",
".",
"RandomMessage_ByType",
"(",
"messageType",
"=",
"self",
".",
"MessageType_Outcome_25_74Percent",
")",
"elif",
"(",
"percentage",
">=",
"75",
")",
"and",
"(",
"percentage",
"<=",
"99",
")",
":",
"# Benachrichtigung aus der Datenbank auslesen\r",
"messageText",
"=",
"self",
".",
"RandomMessage_ByType",
"(",
"messageType",
"=",
"self",
".",
"MessageType_Outcome_75_99Percent",
")",
"else",
":",
"# Benachrichtigung aus der Datenbank auslesen\r",
"messageText",
"=",
"self",
".",
"RandomMessage_ByType",
"(",
"messageType",
"=",
"self",
".",
"MessageType_Outcome_100Percent",
")",
"# Nachricht in den Chat schreiben\r",
"self",
".",
"chat_WriteTextMessage",
"(",
"messageText",
"=",
"str",
"(",
"messageText",
")",
".",
"format",
"(",
"target",
"=",
"self",
".",
"GameTargetName",
")",
")",
"return"
] | [
1329,
4
] | [
1376,
14
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
Monat.anzahl_tage_im_schaltjahr | (self) | return self.__anzahl_tage_schaltjahr | Die Anzahl Tage im Monat in einem Schaltjahr | Die Anzahl Tage im Monat in einem Schaltjahr | def anzahl_tage_im_schaltjahr(self) -> AnzahlTageImMonat:
"""Die Anzahl Tage im Monat in einem Schaltjahr"""
return self.__anzahl_tage_schaltjahr | [
"def",
"anzahl_tage_im_schaltjahr",
"(",
"self",
")",
"->",
"AnzahlTageImMonat",
":",
"return",
"self",
".",
"__anzahl_tage_schaltjahr"
] | [
75,
4
] | [
77,
44
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
RawData.write_CheerDataLog | ( self, cheerAmount = 0, userDisplayName = "" ) | return | Schreibt die Cheer-Daten in ein Logfile | Schreibt die Cheer-Daten in ein Logfile | def write_CheerDataLog( self, cheerAmount = 0, userDisplayName = "" ):
''' Schreibt die Cheer-Daten in ein Logfile '''
thisActionName = "write_CheerDataLog"
tmpText = "User = {0} ( Bits: {1} )".format( userDisplayName, str(cheerAmount) )
text = str( '[' + myTime.TimeStampLog() + '] : ' + str( tmpText ) )
# Daten nur Schreiben, wenn des Log-Files-Verzeichnis angegeben wurde
if self.LogFilesPath:
AppendDataToFile( self.CheerLogFile, text )
return | [
"def",
"write_CheerDataLog",
"(",
"self",
",",
"cheerAmount",
"=",
"0",
",",
"userDisplayName",
"=",
"\"\"",
")",
":",
"thisActionName",
"=",
"\"write_CheerDataLog\"",
"tmpText",
"=",
"\"User = {0} ( Bits: {1} )\"",
".",
"format",
"(",
"userDisplayName",
",",
"str",
"(",
"cheerAmount",
")",
")",
"text",
"=",
"str",
"(",
"'['",
"+",
"myTime",
".",
"TimeStampLog",
"(",
")",
"+",
"'] : '",
"+",
"str",
"(",
"tmpText",
")",
")",
"# Daten nur Schreiben, wenn des Log-Files-Verzeichnis angegeben wurde\r",
"if",
"self",
".",
"LogFilesPath",
":",
"AppendDataToFile",
"(",
"self",
".",
"CheerLogFile",
",",
"text",
")",
"return"
] | [
176,
4
] | [
187,
14
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
Train.load_and_preprocess_images | (self) | Laedt die Bilder, skaliert, normalisiert und speichert RGB Werte und Label in die jeweiligen Arrays | Laedt die Bilder, skaliert, normalisiert und speichert RGB Werte und Label in die jeweiligen Arrays | def load_and_preprocess_images(self):
"""Laedt die Bilder, skaliert, normalisiert und speichert RGB Werte und Label in die jeweiligen Arrays"""
print('[INFO] loading images...')
data_path = sorted(list(paths.list_images(self.PATH)))
random.seed(54)
random.shuffle(data_path)
data = []
labels_categories = []
labels_colors = []
for data_folder_path in data_path:
image = cv2.imread(data_folder_path)
image = cv2.resize(image, (self.INPUT_SHAPE[0], self.INPUT_SHAPE[1]))
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
image = img_to_array(image)
data.append(image)
(color, category) = data_folder_path.split(os.path.sep)[-2].split("_")
labels_categories.append(category)
labels_colors.append(color)
self.data = np.array(data, dtype="float") / 255.0
print("[INFO] data matrix: {} images ({:.2f}MB)".format(
len(data_path), self.data.nbytes / (1024 * 1000.0)))
labels_categories = np.array(labels_categories)
labels_colors = np.array(labels_colors)
print("[INFO] binarizing labels...")
LB_category = LabelBinarizer()
LB_color = LabelBinarizer()
self.labels_categories = LB_category.fit_transform(labels_categories)
self.labels_colors = LB_color.fit_transform(labels_colors)
split = train_test_split(self.data, self.labels_categories, self.labels_colors, test_size=0.2, random_state=54)
(self.x_train, self.x_test, self.y_train_category, self.y_test_category, self.y_train_color, self.y_test_color) = split | [
"def",
"load_and_preprocess_images",
"(",
"self",
")",
":",
"print",
"(",
"'[INFO] loading images...'",
")",
"data_path",
"=",
"sorted",
"(",
"list",
"(",
"paths",
".",
"list_images",
"(",
"self",
".",
"PATH",
")",
")",
")",
"random",
".",
"seed",
"(",
"54",
")",
"random",
".",
"shuffle",
"(",
"data_path",
")",
"data",
"=",
"[",
"]",
"labels_categories",
"=",
"[",
"]",
"labels_colors",
"=",
"[",
"]",
"for",
"data_folder_path",
"in",
"data_path",
":",
"image",
"=",
"cv2",
".",
"imread",
"(",
"data_folder_path",
")",
"image",
"=",
"cv2",
".",
"resize",
"(",
"image",
",",
"(",
"self",
".",
"INPUT_SHAPE",
"[",
"0",
"]",
",",
"self",
".",
"INPUT_SHAPE",
"[",
"1",
"]",
")",
")",
"image",
"=",
"cv2",
".",
"cvtColor",
"(",
"image",
",",
"cv2",
".",
"COLOR_BGR2RGB",
")",
"image",
"=",
"img_to_array",
"(",
"image",
")",
"data",
".",
"append",
"(",
"image",
")",
"(",
"color",
",",
"category",
")",
"=",
"data_folder_path",
".",
"split",
"(",
"os",
".",
"path",
".",
"sep",
")",
"[",
"-",
"2",
"]",
".",
"split",
"(",
"\"_\"",
")",
"labels_categories",
".",
"append",
"(",
"category",
")",
"labels_colors",
".",
"append",
"(",
"color",
")",
"self",
".",
"data",
"=",
"np",
".",
"array",
"(",
"data",
",",
"dtype",
"=",
"\"float\"",
")",
"/",
"255.0",
"print",
"(",
"\"[INFO] data matrix: {} images ({:.2f}MB)\"",
".",
"format",
"(",
"len",
"(",
"data_path",
")",
",",
"self",
".",
"data",
".",
"nbytes",
"/",
"(",
"1024",
"*",
"1000.0",
")",
")",
")",
"labels_categories",
"=",
"np",
".",
"array",
"(",
"labels_categories",
")",
"labels_colors",
"=",
"np",
".",
"array",
"(",
"labels_colors",
")",
"print",
"(",
"\"[INFO] binarizing labels...\"",
")",
"LB_category",
"=",
"LabelBinarizer",
"(",
")",
"LB_color",
"=",
"LabelBinarizer",
"(",
")",
"self",
".",
"labels_categories",
"=",
"LB_category",
".",
"fit_transform",
"(",
"labels_categories",
")",
"self",
".",
"labels_colors",
"=",
"LB_color",
".",
"fit_transform",
"(",
"labels_colors",
")",
"split",
"=",
"train_test_split",
"(",
"self",
".",
"data",
",",
"self",
".",
"labels_categories",
",",
"self",
".",
"labels_colors",
",",
"test_size",
"=",
"0.2",
",",
"random_state",
"=",
"54",
")",
"(",
"self",
".",
"x_train",
",",
"self",
".",
"x_test",
",",
"self",
".",
"y_train_category",
",",
"self",
".",
"y_test_category",
",",
"self",
".",
"y_train_color",
",",
"self",
".",
"y_test_color",
")",
"=",
"split"
] | [
35,
4
] | [
67,
127
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
CoolDownHelper.GetUserCooldownDuration | ( self, scriptname, command, user ) | return seconds | Zeit für Cooldown | Zeit für Cooldown | def GetUserCooldownDuration( self, scriptname, command, user ):
''' Zeit für Cooldown '''
thisActionName = "GetUserCooldownDuration"
seconds = self.Parent.GetUserCooldownDuration( scriptname, command, user )
return seconds | [
"def",
"GetUserCooldownDuration",
"(",
"self",
",",
"scriptname",
",",
"command",
",",
"user",
")",
":",
"thisActionName",
"=",
"\"GetUserCooldownDuration\"",
"seconds",
"=",
"self",
".",
"Parent",
".",
"GetUserCooldownDuration",
"(",
"scriptname",
",",
"command",
",",
"user",
")",
"return",
"seconds"
] | [
88,
4
] | [
93,
22
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
calculate_round | (values: dict, data: StepData) | Rundet gegebene Werte auf eine gewünschte Nachkommastelle.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
| Rundet gegebene Werte auf eine gewünschte Nachkommastelle. | def calculate_round(values: dict, data: StepData):
"""Rundet gegebene Werte auf eine gewünschte Nachkommastelle.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
"""
for idx, key in data.loop_key(values["keys"], values):
value = data.get_data(key, values)
new_key = get_new_keys(values, idx)
if values.get("decimal", None):
new_value = round(value, data.get_data(values["decimal"], values, numbers.Number))
else:
new_value = round(value)
data.insert_data(new_key, new_value, values) | [
"def",
"calculate_round",
"(",
"values",
":",
"dict",
",",
"data",
":",
"StepData",
")",
":",
"for",
"idx",
",",
"key",
"in",
"data",
".",
"loop_key",
"(",
"values",
"[",
"\"keys\"",
"]",
",",
"values",
")",
":",
"value",
"=",
"data",
".",
"get_data",
"(",
"key",
",",
"values",
")",
"new_key",
"=",
"get_new_keys",
"(",
"values",
",",
"idx",
")",
"if",
"values",
".",
"get",
"(",
"\"decimal\"",
",",
"None",
")",
":",
"new_value",
"=",
"round",
"(",
"value",
",",
"data",
".",
"get_data",
"(",
"values",
"[",
"\"decimal\"",
"]",
",",
"values",
",",
"numbers",
".",
"Number",
")",
")",
"else",
":",
"new_value",
"=",
"round",
"(",
"value",
")",
"data",
".",
"insert_data",
"(",
"new_key",
",",
"new_value",
",",
"values",
")"
] | [
123,
0
] | [
137,
52
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
Monat.position | (self) | return self.__monats_position | Die Position des Monats im Jahr (Bei 1 beginnend) | Die Position des Monats im Jahr (Bei 1 beginnend) | def position(self) -> Monatsposition:
"""Die Position des Monats im Jahr (Bei 1 beginnend)"""
return self.__monats_position | [
"def",
"position",
"(",
"self",
")",
"->",
"Monatsposition",
":",
"return",
"self",
".",
"__monats_position"
] | [
60,
4
] | [
62,
37
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
general.whois | (self, ctx, member: discord.Member=None) | Gibt Informationen über einen Benutzer aus
Beispiel:
-----------
:whois @Lou#XXXX
| Gibt Informationen über einen Benutzer aus
Beispiel:
-----------
:whois | async def whois(self, ctx, member: discord.Member=None):
'''Gibt Informationen über einen Benutzer aus
Beispiel:
-----------
:whois @Lou#XXXX
'''
await ctx.message.delete()
if member == None:
member = ctx.author
if member.top_role.is_default():
topRole = 'everyone' #to prevent @everyone spam
topRoleColour = '#000000'
else:
topRole = member.top_role
topRoleColour = member.top_role.colour
if member is not None:
embed = discord.Embed(color=member.top_role.colour)
embed.set_footer(text=f'UserID: {member.id}')
embed.set_thumbnail(url=member.avatar_url)
if member.name != member.display_name:
fullName = f'{member} ({member.display_name})'
else:
fullName = member
embed.add_field(name=member.name, value=fullName, inline=False)
embed.add_field(name='Discord beigetreten am', value='{}\n(Tage seitdem: {})'.format(member.created_at.strftime('%d.%m.%Y'), (datetime.now()-member.created_at).days), inline=True)
embed.add_field(name='Server beigetreten am', value='{}\n(Tage seitdem: {})'.format(member.joined_at.strftime('%d.%m.%Y'), (datetime.now()-member.joined_at).days), inline=True)
embed.add_field(name='Avatar Link', value=member.avatar_url, inline=False)
embed.add_field(name='Rollen', value=self._getRoles(member.roles), inline=True)
embed.add_field(name='Rollenfarbe', value='{} ({})'.format(topRoleColour, topRole), inline=True)
embed.add_field(name='Status', value=member.status, inline=True)
await ctx.send(embed=embed)
else:
msg = ':no_entry: Du hast keinen Benutzer angegeben!'
await ctx.send(msg) | [
"async",
"def",
"whois",
"(",
"self",
",",
"ctx",
",",
"member",
":",
"discord",
".",
"Member",
"=",
"None",
")",
":",
"await",
"ctx",
".",
"message",
".",
"delete",
"(",
")",
"if",
"member",
"==",
"None",
":",
"member",
"=",
"ctx",
".",
"author",
"if",
"member",
".",
"top_role",
".",
"is_default",
"(",
")",
":",
"topRole",
"=",
"'everyone'",
"#to prevent @everyone spam",
"topRoleColour",
"=",
"'#000000'",
"else",
":",
"topRole",
"=",
"member",
".",
"top_role",
"topRoleColour",
"=",
"member",
".",
"top_role",
".",
"colour",
"if",
"member",
"is",
"not",
"None",
":",
"embed",
"=",
"discord",
".",
"Embed",
"(",
"color",
"=",
"member",
".",
"top_role",
".",
"colour",
")",
"embed",
".",
"set_footer",
"(",
"text",
"=",
"f'UserID: {member.id}'",
")",
"embed",
".",
"set_thumbnail",
"(",
"url",
"=",
"member",
".",
"avatar_url",
")",
"if",
"member",
".",
"name",
"!=",
"member",
".",
"display_name",
":",
"fullName",
"=",
"f'{member} ({member.display_name})'",
"else",
":",
"fullName",
"=",
"member",
"embed",
".",
"add_field",
"(",
"name",
"=",
"member",
".",
"name",
",",
"value",
"=",
"fullName",
",",
"inline",
"=",
"False",
")",
"embed",
".",
"add_field",
"(",
"name",
"=",
"'Discord beigetreten am'",
",",
"value",
"=",
"'{}\\n(Tage seitdem: {})'",
".",
"format",
"(",
"member",
".",
"created_at",
".",
"strftime",
"(",
"'%d.%m.%Y'",
")",
",",
"(",
"datetime",
".",
"now",
"(",
")",
"-",
"member",
".",
"created_at",
")",
".",
"days",
")",
",",
"inline",
"=",
"True",
")",
"embed",
".",
"add_field",
"(",
"name",
"=",
"'Server beigetreten am'",
",",
"value",
"=",
"'{}\\n(Tage seitdem: {})'",
".",
"format",
"(",
"member",
".",
"joined_at",
".",
"strftime",
"(",
"'%d.%m.%Y'",
")",
",",
"(",
"datetime",
".",
"now",
"(",
")",
"-",
"member",
".",
"joined_at",
")",
".",
"days",
")",
",",
"inline",
"=",
"True",
")",
"embed",
".",
"add_field",
"(",
"name",
"=",
"'Avatar Link'",
",",
"value",
"=",
"member",
".",
"avatar_url",
",",
"inline",
"=",
"False",
")",
"embed",
".",
"add_field",
"(",
"name",
"=",
"'Rollen'",
",",
"value",
"=",
"self",
".",
"_getRoles",
"(",
"member",
".",
"roles",
")",
",",
"inline",
"=",
"True",
")",
"embed",
".",
"add_field",
"(",
"name",
"=",
"'Rollenfarbe'",
",",
"value",
"=",
"'{} ({})'",
".",
"format",
"(",
"topRoleColour",
",",
"topRole",
")",
",",
"inline",
"=",
"True",
")",
"embed",
".",
"add_field",
"(",
"name",
"=",
"'Status'",
",",
"value",
"=",
"member",
".",
"status",
",",
"inline",
"=",
"True",
")",
"await",
"ctx",
".",
"send",
"(",
"embed",
"=",
"embed",
")",
"else",
":",
"msg",
"=",
"':no_entry: Du hast keinen Benutzer angegeben!'",
"await",
"ctx",
".",
"send",
"(",
"msg",
")"
] | [
227,
4
] | [
262,
31
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
ariaDicomClass.initResultsPath | (self, AcquisitionYear=None ) | return dirname | Den Ablegeort zu den PDF Dateien bestimmen
in variables.path befindet sich jetzt der resultsPath ggf. mit angehängten AcquisitionYear
Parameters
----------
AcquisitionYear : TYPE, optional
DESCRIPTION. The default is None.
Returns
-------
dirname : str
der aktuelle PDF Pfad (auch in self.variables["path"] )
| Den Ablegeort zu den PDF Dateien bestimmen
in variables.path befindet sich jetzt der resultsPath ggf. mit angehängten AcquisitionYear | def initResultsPath(self, AcquisitionYear=None ):
'''Den Ablegeort zu den PDF Dateien bestimmen
in variables.path befindet sich jetzt der resultsPath ggf. mit angehängten AcquisitionYear
Parameters
----------
AcquisitionYear : TYPE, optional
DESCRIPTION. The default is None.
Returns
-------
dirname : str
der aktuelle PDF Pfad (auch in self.variables["path"] )
'''
paths = [ ]
# ist der Pfad relativ angegeben ab base path verwenden
if self.config["resultsPath"][0] == ".":
paths.append( self.config["BASE_DIR"] )
paths.append( self.config["resultsPath"] )
else:
paths.append( self.config["resultsPath"] )
# zusätzlich noch das AcquisitionYear anfügen
if AcquisitionYear:
paths.append( str(AcquisitionYear) )
# den Pfad in variables["path"] ablegen
dirname = osp.abspath( osp.join( *paths ) )
self.variables["path"] = dirname
return dirname | [
"def",
"initResultsPath",
"(",
"self",
",",
"AcquisitionYear",
"=",
"None",
")",
":",
"paths",
"=",
"[",
"]",
"# ist der Pfad relativ angegeben ab base path verwenden",
"if",
"self",
".",
"config",
"[",
"\"resultsPath\"",
"]",
"[",
"0",
"]",
"==",
"\".\"",
":",
"paths",
".",
"append",
"(",
"self",
".",
"config",
"[",
"\"BASE_DIR\"",
"]",
")",
"paths",
".",
"append",
"(",
"self",
".",
"config",
"[",
"\"resultsPath\"",
"]",
")",
"else",
":",
"paths",
".",
"append",
"(",
"self",
".",
"config",
"[",
"\"resultsPath\"",
"]",
")",
"# zusätzlich noch das AcquisitionYear anfügen",
"if",
"AcquisitionYear",
":",
"paths",
".",
"append",
"(",
"str",
"(",
"AcquisitionYear",
")",
")",
"# den Pfad in variables[\"path\"] ablegen",
"dirname",
"=",
"osp",
".",
"abspath",
"(",
"osp",
".",
"join",
"(",
"*",
"paths",
")",
")",
"self",
".",
"variables",
"[",
"\"path\"",
"]",
"=",
"dirname",
"return",
"dirname"
] | [
83,
4
] | [
115,
22
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
Holidays.get_three_kings | (self, state_code) | Heilige Drei Könige | Heilige Drei Könige | def get_three_kings(self, state_code):
""" Heilige Drei Könige """
valid = ['BY', 'BW', 'ST']
if state_code in valid:
three_kings = datetime.date(self.year, 1, 6)
self.holiday_list.append([three_kings, u'Heilige Drei Könige']) | [
"def",
"get_three_kings",
"(",
"self",
",",
"state_code",
")",
":",
"valid",
"=",
"[",
"'BY'",
",",
"'BW'",
",",
"'ST'",
"]",
"if",
"state_code",
"in",
"valid",
":",
"three_kings",
"=",
"datetime",
".",
"date",
"(",
"self",
".",
"year",
",",
"1",
",",
"6",
")",
"self",
".",
"holiday_list",
".",
"append",
"(",
"[",
"three_kings",
",",
"u'Heilige Drei Könige']",
")",
""
] | [
152,
4
] | [
157,
76
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
Geometry.copy_arc | (self, center, radius, start_angle, end_angle,
start_line, end_line, inner_circle, outer_circle, e,
rtol=1e-04,
atol=1e-04,
points_inner=None,
points_outer=None) | return new_elements | Die Funktion kopiert die Teile eines Kreissegments, welche sich in der
durch die Parameter definierten Teilkreisfläche befinden.
| Die Funktion kopiert die Teile eines Kreissegments, welche sich in der
durch die Parameter definierten Teilkreisfläche befinden.
| def copy_arc(self, center, radius, start_angle, end_angle,
start_line, end_line, inner_circle, outer_circle, e,
rtol=1e-04,
atol=1e-04,
points_inner=None,
points_outer=None):
""" Die Funktion kopiert die Teile eines Kreissegments, welche sich in der
durch die Parameter definierten Teilkreisfläche befinden.
"""
assert(isinstance(e, Arc))
if is_same_angle(start_angle, end_angle):
pts_inner = inner_circle.intersect_arc(e,
rtol,
atol,
False)
pts_outer = outer_circle.intersect_arc(e,
rtol,
atol,
False)
points = pts_inner + pts_outer + [e.p2]
else:
pts_start = e.intersect_line(start_line,
rtol,
atol,
False)
pts_end = e.intersect_line(end_line,
rtol,
atol,
False)
pts_inner = inner_circle.intersect_arc(e,
rtol,
atol,
False)
pts_outer = outer_circle.intersect_arc(e,
rtol,
atol,
False)
points = pts_start + pts_end + \
pts_inner + pts_outer + [e.p2]
if points_inner is not None and pts_inner:
points_inner += pts_inner
if points_outer is not None and pts_outer:
points_outer += pts_outer
new_elements = []
sorted_points = []
alpha_start = alpha_line(e.center, e.p1)
for p in points:
alpha_next = alpha_line(e.center, p)
if less_equal(alpha_next, alpha_start):
alpha_next += 2*np.pi
sorted_points.append((alpha_next, p))
alpha_start = alpha_next
sorted_points.sort()
p1 = e.p1
alpha_start = alpha_line(e.center, e.p1)
for x, p2 in sorted_points:
alpha_end = alpha_line(e.center, p2)
pm = middle_point_of_arc(e.center, e.radius, p1, p2, rtol=rtol)
if is_point_inside_region(pm, center,
inner_circle.radius, outer_circle.radius,
start_angle, end_angle):
if not (len(points) > 1 and
points_are_close(p1, p2, 1e-3, 1e-3)):
if len(points) == 1 and e.rtheta is not None:
a = Arc(Element(center=e.center,
radius=e.radius,
start_angle=alpha_start*180/np.pi,
end_angle=alpha_end*180/np.pi,
width=e.width,
height=e.height,
rtheta=e.rtheta,
start_param=e.start_param,
end_param=e.end_param))
else:
a = Arc(Element(center=e.center,
radius=e.radius,
start_angle=alpha_start*180/np.pi,
end_angle=alpha_end*180/np.pi))
new_elements.append(a)
alpha_start = alpha_end
p1 = p2
return new_elements | [
"def",
"copy_arc",
"(",
"self",
",",
"center",
",",
"radius",
",",
"start_angle",
",",
"end_angle",
",",
"start_line",
",",
"end_line",
",",
"inner_circle",
",",
"outer_circle",
",",
"e",
",",
"rtol",
"=",
"1e-04",
",",
"atol",
"=",
"1e-04",
",",
"points_inner",
"=",
"None",
",",
"points_outer",
"=",
"None",
")",
":",
"assert",
"(",
"isinstance",
"(",
"e",
",",
"Arc",
")",
")",
"if",
"is_same_angle",
"(",
"start_angle",
",",
"end_angle",
")",
":",
"pts_inner",
"=",
"inner_circle",
".",
"intersect_arc",
"(",
"e",
",",
"rtol",
",",
"atol",
",",
"False",
")",
"pts_outer",
"=",
"outer_circle",
".",
"intersect_arc",
"(",
"e",
",",
"rtol",
",",
"atol",
",",
"False",
")",
"points",
"=",
"pts_inner",
"+",
"pts_outer",
"+",
"[",
"e",
".",
"p2",
"]",
"else",
":",
"pts_start",
"=",
"e",
".",
"intersect_line",
"(",
"start_line",
",",
"rtol",
",",
"atol",
",",
"False",
")",
"pts_end",
"=",
"e",
".",
"intersect_line",
"(",
"end_line",
",",
"rtol",
",",
"atol",
",",
"False",
")",
"pts_inner",
"=",
"inner_circle",
".",
"intersect_arc",
"(",
"e",
",",
"rtol",
",",
"atol",
",",
"False",
")",
"pts_outer",
"=",
"outer_circle",
".",
"intersect_arc",
"(",
"e",
",",
"rtol",
",",
"atol",
",",
"False",
")",
"points",
"=",
"pts_start",
"+",
"pts_end",
"+",
"pts_inner",
"+",
"pts_outer",
"+",
"[",
"e",
".",
"p2",
"]",
"if",
"points_inner",
"is",
"not",
"None",
"and",
"pts_inner",
":",
"points_inner",
"+=",
"pts_inner",
"if",
"points_outer",
"is",
"not",
"None",
"and",
"pts_outer",
":",
"points_outer",
"+=",
"pts_outer",
"new_elements",
"=",
"[",
"]",
"sorted_points",
"=",
"[",
"]",
"alpha_start",
"=",
"alpha_line",
"(",
"e",
".",
"center",
",",
"e",
".",
"p1",
")",
"for",
"p",
"in",
"points",
":",
"alpha_next",
"=",
"alpha_line",
"(",
"e",
".",
"center",
",",
"p",
")",
"if",
"less_equal",
"(",
"alpha_next",
",",
"alpha_start",
")",
":",
"alpha_next",
"+=",
"2",
"*",
"np",
".",
"pi",
"sorted_points",
".",
"append",
"(",
"(",
"alpha_next",
",",
"p",
")",
")",
"alpha_start",
"=",
"alpha_next",
"sorted_points",
".",
"sort",
"(",
")",
"p1",
"=",
"e",
".",
"p1",
"alpha_start",
"=",
"alpha_line",
"(",
"e",
".",
"center",
",",
"e",
".",
"p1",
")",
"for",
"x",
",",
"p2",
"in",
"sorted_points",
":",
"alpha_end",
"=",
"alpha_line",
"(",
"e",
".",
"center",
",",
"p2",
")",
"pm",
"=",
"middle_point_of_arc",
"(",
"e",
".",
"center",
",",
"e",
".",
"radius",
",",
"p1",
",",
"p2",
",",
"rtol",
"=",
"rtol",
")",
"if",
"is_point_inside_region",
"(",
"pm",
",",
"center",
",",
"inner_circle",
".",
"radius",
",",
"outer_circle",
".",
"radius",
",",
"start_angle",
",",
"end_angle",
")",
":",
"if",
"not",
"(",
"len",
"(",
"points",
")",
">",
"1",
"and",
"points_are_close",
"(",
"p1",
",",
"p2",
",",
"1e-3",
",",
"1e-3",
")",
")",
":",
"if",
"len",
"(",
"points",
")",
"==",
"1",
"and",
"e",
".",
"rtheta",
"is",
"not",
"None",
":",
"a",
"=",
"Arc",
"(",
"Element",
"(",
"center",
"=",
"e",
".",
"center",
",",
"radius",
"=",
"e",
".",
"radius",
",",
"start_angle",
"=",
"alpha_start",
"*",
"180",
"/",
"np",
".",
"pi",
",",
"end_angle",
"=",
"alpha_end",
"*",
"180",
"/",
"np",
".",
"pi",
",",
"width",
"=",
"e",
".",
"width",
",",
"height",
"=",
"e",
".",
"height",
",",
"rtheta",
"=",
"e",
".",
"rtheta",
",",
"start_param",
"=",
"e",
".",
"start_param",
",",
"end_param",
"=",
"e",
".",
"end_param",
")",
")",
"else",
":",
"a",
"=",
"Arc",
"(",
"Element",
"(",
"center",
"=",
"e",
".",
"center",
",",
"radius",
"=",
"e",
".",
"radius",
",",
"start_angle",
"=",
"alpha_start",
"*",
"180",
"/",
"np",
".",
"pi",
",",
"end_angle",
"=",
"alpha_end",
"*",
"180",
"/",
"np",
".",
"pi",
")",
")",
"new_elements",
".",
"append",
"(",
"a",
")",
"alpha_start",
"=",
"alpha_end",
"p1",
"=",
"p2",
"return",
"new_elements"
] | [
1626,
4
] | [
1714,
27
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
to_dict | (values: dict, data: StepData) | Wandelt eine Liste aus Tupeln oder Arrays in ein Dictionary um.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
:return:
| Wandelt eine Liste aus Tupeln oder Arrays in ein Dictionary um. | def to_dict(values: dict, data: StepData):
"""Wandelt eine Liste aus Tupeln oder Arrays in ein Dictionary um.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
:return:
"""
for idx, key in data.loop_key(values["keys"], values):
value = data.get_data(key, values)
new_key = get_new_keys(values, idx)
new_value = dict(value)
data.insert_data(new_key, new_value, values) | [
"def",
"to_dict",
"(",
"values",
":",
"dict",
",",
"data",
":",
"StepData",
")",
":",
"for",
"idx",
",",
"key",
"in",
"data",
".",
"loop_key",
"(",
"values",
"[",
"\"keys\"",
"]",
",",
"values",
")",
":",
"value",
"=",
"data",
".",
"get_data",
"(",
"key",
",",
"values",
")",
"new_key",
"=",
"get_new_keys",
"(",
"values",
",",
"idx",
")",
"new_value",
"=",
"dict",
"(",
"value",
")",
"data",
".",
"insert_data",
"(",
"new_key",
",",
"new_value",
",",
"values",
")"
] | [
585,
0
] | [
597,
52
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
custom | (values: dict, step_data: StepData, out_images, out_audios, out_audio_l) | Generierung des Output-Videos aus ausgewählten Bild- und Audiodateien.
Generiert das Output-Video. In values (in der JSON) muss angegeben sein in welcher Reihenfolge und wie lange jedes Bild
und die passenden Audiodatei aneinandergereiht werden sollen.
:param values: Werte aus der JSON-Datei
:param step_data: Daten aus der API
:return: Pfad zum Output-Video
:rtype: str
| Generierung des Output-Videos aus ausgewählten Bild- und Audiodateien. | def custom(values: dict, step_data: StepData, out_images, out_audios, out_audio_l):
"""Generierung des Output-Videos aus ausgewählten Bild- und Audiodateien.
Generiert das Output-Video. In values (in der JSON) muss angegeben sein in welcher Reihenfolge und wie lange jedes Bild
und die passenden Audiodatei aneinandergereiht werden sollen.
:param values: Werte aus der JSON-Datei
:param step_data: Daten aus der API
:return: Pfad zum Output-Video
:rtype: str
"""
for s in values["sequence"]["pattern"]:
out_images.append(values["images"][step_data.format(s["image"])])
if s.get("audio_l", None) is None:
out_audio_l.append(step_data.get_data(s.get("time_diff", 0), None, numbers.Number))
else:
out_audios.append(values["audio"]["audios"][step_data.format(s["audio_l"])])
out_audio_l.append(step_data.get_data(s.get("time_diff", 0), None, numbers.Number) + MP3(
values["audio"]["audios"][step_data.format(s["audio_l"])]).info.length) | [
"def",
"custom",
"(",
"values",
":",
"dict",
",",
"step_data",
":",
"StepData",
",",
"out_images",
",",
"out_audios",
",",
"out_audio_l",
")",
":",
"for",
"s",
"in",
"values",
"[",
"\"sequence\"",
"]",
"[",
"\"pattern\"",
"]",
":",
"out_images",
".",
"append",
"(",
"values",
"[",
"\"images\"",
"]",
"[",
"step_data",
".",
"format",
"(",
"s",
"[",
"\"image\"",
"]",
")",
"]",
")",
"if",
"s",
".",
"get",
"(",
"\"audio_l\"",
",",
"None",
")",
"is",
"None",
":",
"out_audio_l",
".",
"append",
"(",
"step_data",
".",
"get_data",
"(",
"s",
".",
"get",
"(",
"\"time_diff\"",
",",
"0",
")",
",",
"None",
",",
"numbers",
".",
"Number",
")",
")",
"else",
":",
"out_audios",
".",
"append",
"(",
"values",
"[",
"\"audio\"",
"]",
"[",
"\"audios\"",
"]",
"[",
"step_data",
".",
"format",
"(",
"s",
"[",
"\"audio_l\"",
"]",
")",
"]",
")",
"out_audio_l",
".",
"append",
"(",
"step_data",
".",
"get_data",
"(",
"s",
".",
"get",
"(",
"\"time_diff\"",
",",
"0",
")",
",",
"None",
",",
"numbers",
".",
"Number",
")",
"+",
"MP3",
"(",
"values",
"[",
"\"audio\"",
"]",
"[",
"\"audios\"",
"]",
"[",
"step_data",
".",
"format",
"(",
"s",
"[",
"\"audio_l\"",
"]",
")",
"]",
")",
".",
"info",
".",
"length",
")"
] | [
110,
0
] | [
128,
87
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
UNet64_output_expansed | (input_shape) | return model | gleich wie UNet64, nur am output ist ein 3x3 Kernel eingesetzt. | gleich wie UNet64, nur am output ist ein 3x3 Kernel eingesetzt. | def UNet64_output_expansed(input_shape):
"""gleich wie UNet64, nur am output ist ein 3x3 Kernel eingesetzt."""
inputs = Input(shape=input_shape)
conv01 = Conv2D(10, kernel_size=(3, 3), padding="same")(inputs) # 10 x 64x64
conv01 = Activation('relu')(conv01)
conv01_pool = MaxPooling2D((2, 2), strides=(2, 2))(conv01) # 10 x 32x32
print("0)", conv01_pool.shape, "10 x 32x32")
conv02 = Conv2D(20, kernel_size=(3, 3), padding="same")(conv01_pool) # 20 x 32x32
conv02 = Activation('relu')(conv02)
conv02_pool = MaxPooling2D((2, 2), strides=(2, 2))(conv02) # 20 x 16x16
print("1)", conv02_pool.shape, "20 x 16x16")
conv03 = Conv2D(20, kernel_size=(3, 3), padding="same")(conv02_pool) # 20 x 16x16
conv03 = Activation('relu')(conv03)
conv03_pool = MaxPooling2D((2, 2), strides=(2, 2))(conv03) # 20 x 8x8
print("2)", conv03_pool.shape, "20 x 8x8")
conv04 = Conv2D(20, kernel_size=(3, 3), padding="same")(conv03_pool) # 20 x 8x8
conv04 = Activation('relu')(conv04)
conv04_pool = MaxPooling2D((2, 2), strides=(2, 2))(conv04) # 20 x 4x4
print("3)", conv04_pool.shape, "20 x 4x4")
### UPSAMPLING:
up04 = UpSampling2D((2, 2))(conv04_pool) # 20 x 8x8
up04 = concatenate([conv04, up04], axis=3) # 20+20 x 8x8
print("4)", up04.shape, "40 x 8x8")
up03 = UpSampling2D((2, 2))(up04) # 40 x 16x16
up03 = concatenate([conv03, up03], axis=3) # 20+40 x 16x16
print("5)", up03.shape, "60 x 16x16")
up02 = UpSampling2D((2, 2))(up03) # 60 x 32x32
up02 = concatenate([conv02, up02], axis=3) # 20+60 x 32x32
print("6)", up02.shape, "80 x 32x32")
up01 = UpSampling2D((2, 2))(up02) # 80 x 64x64
up01 = concatenate([conv01, up01], axis=3) # 10+80 x 64x64
print("7)", up01.shape, "90 x 64x64")
output = Conv2D(1, (3, 3), activation='relu', padding="same")(up01) # 1 x 64x64
print("8)", output.shape, "1 x 64x64")
output = Flatten()(output)
model = Model(inputs=inputs, outputs=output)
model.compile(loss="mean_squared_error", optimizer='adam')
return model | [
"def",
"UNet64_output_expansed",
"(",
"input_shape",
")",
":",
"inputs",
"=",
"Input",
"(",
"shape",
"=",
"input_shape",
")",
"conv01",
"=",
"Conv2D",
"(",
"10",
",",
"kernel_size",
"=",
"(",
"3",
",",
"3",
")",
",",
"padding",
"=",
"\"same\"",
")",
"(",
"inputs",
")",
"# 10 x 64x64",
"conv01",
"=",
"Activation",
"(",
"'relu'",
")",
"(",
"conv01",
")",
"conv01_pool",
"=",
"MaxPooling2D",
"(",
"(",
"2",
",",
"2",
")",
",",
"strides",
"=",
"(",
"2",
",",
"2",
")",
")",
"(",
"conv01",
")",
"# 10 x 32x32",
"print",
"(",
"\"0)\"",
",",
"conv01_pool",
".",
"shape",
",",
"\"10 x 32x32\"",
")",
"conv02",
"=",
"Conv2D",
"(",
"20",
",",
"kernel_size",
"=",
"(",
"3",
",",
"3",
")",
",",
"padding",
"=",
"\"same\"",
")",
"(",
"conv01_pool",
")",
"# 20 x 32x32",
"conv02",
"=",
"Activation",
"(",
"'relu'",
")",
"(",
"conv02",
")",
"conv02_pool",
"=",
"MaxPooling2D",
"(",
"(",
"2",
",",
"2",
")",
",",
"strides",
"=",
"(",
"2",
",",
"2",
")",
")",
"(",
"conv02",
")",
"# 20 x 16x16",
"print",
"(",
"\"1)\"",
",",
"conv02_pool",
".",
"shape",
",",
"\"20 x 16x16\"",
")",
"conv03",
"=",
"Conv2D",
"(",
"20",
",",
"kernel_size",
"=",
"(",
"3",
",",
"3",
")",
",",
"padding",
"=",
"\"same\"",
")",
"(",
"conv02_pool",
")",
"# 20 x 16x16",
"conv03",
"=",
"Activation",
"(",
"'relu'",
")",
"(",
"conv03",
")",
"conv03_pool",
"=",
"MaxPooling2D",
"(",
"(",
"2",
",",
"2",
")",
",",
"strides",
"=",
"(",
"2",
",",
"2",
")",
")",
"(",
"conv03",
")",
"# 20 x 8x8",
"print",
"(",
"\"2)\"",
",",
"conv03_pool",
".",
"shape",
",",
"\"20 x 8x8\"",
")",
"conv04",
"=",
"Conv2D",
"(",
"20",
",",
"kernel_size",
"=",
"(",
"3",
",",
"3",
")",
",",
"padding",
"=",
"\"same\"",
")",
"(",
"conv03_pool",
")",
"# 20 x 8x8",
"conv04",
"=",
"Activation",
"(",
"'relu'",
")",
"(",
"conv04",
")",
"conv04_pool",
"=",
"MaxPooling2D",
"(",
"(",
"2",
",",
"2",
")",
",",
"strides",
"=",
"(",
"2",
",",
"2",
")",
")",
"(",
"conv04",
")",
"# 20 x 4x4",
"print",
"(",
"\"3)\"",
",",
"conv04_pool",
".",
"shape",
",",
"\"20 x 4x4\"",
")",
"### UPSAMPLING:",
"up04",
"=",
"UpSampling2D",
"(",
"(",
"2",
",",
"2",
")",
")",
"(",
"conv04_pool",
")",
"# 20 x 8x8",
"up04",
"=",
"concatenate",
"(",
"[",
"conv04",
",",
"up04",
"]",
",",
"axis",
"=",
"3",
")",
"# 20+20 x 8x8",
"print",
"(",
"\"4)\"",
",",
"up04",
".",
"shape",
",",
"\"40 x 8x8\"",
")",
"up03",
"=",
"UpSampling2D",
"(",
"(",
"2",
",",
"2",
")",
")",
"(",
"up04",
")",
"# 40 x 16x16",
"up03",
"=",
"concatenate",
"(",
"[",
"conv03",
",",
"up03",
"]",
",",
"axis",
"=",
"3",
")",
"# 20+40 x 16x16",
"print",
"(",
"\"5)\"",
",",
"up03",
".",
"shape",
",",
"\"60 x 16x16\"",
")",
"up02",
"=",
"UpSampling2D",
"(",
"(",
"2",
",",
"2",
")",
")",
"(",
"up03",
")",
"# 60 x 32x32",
"up02",
"=",
"concatenate",
"(",
"[",
"conv02",
",",
"up02",
"]",
",",
"axis",
"=",
"3",
")",
"# 20+60 x 32x32",
"print",
"(",
"\"6)\"",
",",
"up02",
".",
"shape",
",",
"\"80 x 32x32\"",
")",
"up01",
"=",
"UpSampling2D",
"(",
"(",
"2",
",",
"2",
")",
")",
"(",
"up02",
")",
"# 80 x 64x64",
"up01",
"=",
"concatenate",
"(",
"[",
"conv01",
",",
"up01",
"]",
",",
"axis",
"=",
"3",
")",
"# 10+80 x 64x64",
"print",
"(",
"\"7)\"",
",",
"up01",
".",
"shape",
",",
"\"90 x 64x64\"",
")",
"output",
"=",
"Conv2D",
"(",
"1",
",",
"(",
"3",
",",
"3",
")",
",",
"activation",
"=",
"'relu'",
",",
"padding",
"=",
"\"same\"",
")",
"(",
"up01",
")",
"# 1 x 64x64",
"print",
"(",
"\"8)\"",
",",
"output",
".",
"shape",
",",
"\"1 x 64x64\"",
")",
"output",
"=",
"Flatten",
"(",
")",
"(",
"output",
")",
"model",
"=",
"Model",
"(",
"inputs",
"=",
"inputs",
",",
"outputs",
"=",
"output",
")",
"model",
".",
"compile",
"(",
"loss",
"=",
"\"mean_squared_error\"",
",",
"optimizer",
"=",
"'adam'",
")",
"return",
"model"
] | [
129,
0
] | [
175,
16
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
ToolboxAgenda.set_tags_for_slide | (sld, slideNo) | Meta-Informationen für Slide einstellen | Meta-Informationen für Slide einstellen | def set_tags_for_slide(sld, slideNo):
''' Meta-Informationen für Slide einstellen '''
sld.Tags.Add(TOOLBOX_AGENDA, "1")
sld.Tags.Add(TOOLBOX_AGENDA_SLIDENO, str(slideNo)) | [
"def",
"set_tags_for_slide",
"(",
"sld",
",",
"slideNo",
")",
":",
"sld",
".",
"Tags",
".",
"Add",
"(",
"TOOLBOX_AGENDA",
",",
"\"1\"",
")",
"sld",
".",
"Tags",
".",
"Add",
"(",
"TOOLBOX_AGENDA_SLIDENO",
",",
"str",
"(",
"slideNo",
")",
")"
] | [
1230,
4
] | [
1233,
58
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
Train.initialize_model | (self) | Initialisierung des Models | Initialisierung des Models | def initialize_model(self):
"""Initialisierung des Models"""
print("[INFO] initializing model...")
self.model = ImageTaggingCNN.build(self.INPUT_SHAPE)
print("[INFO] compiling model...")
optimizer = Adam(lr=self.LEARNINGRATE, decay=self.LEARNINGRATE / self.EPOCHS)
self.model.compile(optimizer=optimizer, loss=self.LOSSES, loss_weights=self.LOSS_WEIGHTS, metrics=["accuracy"])
self.model.summary() | [
"def",
"initialize_model",
"(",
"self",
")",
":",
"print",
"(",
"\"[INFO] initializing model...\"",
")",
"self",
".",
"model",
"=",
"ImageTaggingCNN",
".",
"build",
"(",
"self",
".",
"INPUT_SHAPE",
")",
"print",
"(",
"\"[INFO] compiling model...\"",
")",
"optimizer",
"=",
"Adam",
"(",
"lr",
"=",
"self",
".",
"LEARNINGRATE",
",",
"decay",
"=",
"self",
".",
"LEARNINGRATE",
"/",
"self",
".",
"EPOCHS",
")",
"self",
".",
"model",
".",
"compile",
"(",
"optimizer",
"=",
"optimizer",
",",
"loss",
"=",
"self",
".",
"LOSSES",
",",
"loss_weights",
"=",
"self",
".",
"LOSS_WEIGHTS",
",",
"metrics",
"=",
"[",
"\"accuracy\"",
"]",
")",
"self",
".",
"model",
".",
"summary",
"(",
")"
] | [
26,
4
] | [
33,
28
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
new | (values: dict, step_data: StepData) | Erstellt ein neues Bild, welches als Thumbnail für das zu erstellende Video verwendet wird.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
:return:
| Erstellt ein neues Bild, welches als Thumbnail für das zu erstellende Video verwendet wird. | def new(values: dict, step_data: StepData):
"""Erstellt ein neues Bild, welches als Thumbnail für das zu erstellende Video verwendet wird.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
:return:
"""
image_func = get_type_func(values["thumbnail"]["image"], IMAGE_TYPES)
src_file = image_func(values["thumbnail"]["image"], step_data, values["images"])
_copy_and_rename(src_file, values, step_data) | [
"def",
"new",
"(",
"values",
":",
"dict",
",",
"step_data",
":",
"StepData",
")",
":",
"image_func",
"=",
"get_type_func",
"(",
"values",
"[",
"\"thumbnail\"",
"]",
"[",
"\"image\"",
"]",
",",
"IMAGE_TYPES",
")",
"src_file",
"=",
"image_func",
"(",
"values",
"[",
"\"thumbnail\"",
"]",
"[",
"\"image\"",
"]",
",",
"step_data",
",",
"values",
"[",
"\"images\"",
"]",
")",
"_copy_and_rename",
"(",
"src_file",
",",
"values",
",",
"step_data",
")"
] | [
50,
0
] | [
59,
49
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.