identifier
stringlengths 0
89
| parameters
stringlengths 0
399
| return_statement
stringlengths 0
982
⌀ | docstring
stringlengths 10
3.04k
| docstring_summary
stringlengths 0
3.04k
| function
stringlengths 13
25.8k
| function_tokens
sequence | start_point
sequence | end_point
sequence | argument_list
null | language
stringclasses 3
values | docstring_language
stringclasses 4
values | docstring_language_predictions
stringclasses 4
values | is_langid_reliable
stringclasses 2
values | is_langid_extra_reliable
bool 1
class | type
stringclasses 9
values |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
request_multiple_custom | (values: dict, data: StepData, name: str, save_key, ignore_testing=False) | Fragt unterschiedliche Daten einer API ab.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
:param name: Testdatei, die geladen werden soll.
:param save_key: Key, unter dem die Daten gespeichert werden.
:param ignore_testing: Ob der Request durchgeführt werden soll, obwohl testing `true` ist.
| Fragt unterschiedliche Daten einer API ab. | def request_multiple_custom(values: dict, data: StepData, name: str, save_key, ignore_testing=False):
"""Fragt unterschiedliche Daten einer API ab.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
:param name: Testdatei, die geladen werden soll.
:param save_key: Key, unter dem die Daten gespeichert werden.
:param ignore_testing: Ob der Request durchgeführt werden soll, obwohl testing `true` ist.
"""
if data.get_config("testing", False) and not ignore_testing:
return _load_test_data(values, data, name, save_key)
if values.get("use_loop_as_key", False):
data.insert_data(save_key, {}, values)
for idx, key in enumerate(values["steps_value"]):
api_request(values["requests"][idx], data, name, f"{save_key}|{key}", ignore_testing)
else:
data.insert_data(save_key, [None] * len(values["requests"]), values)
for idx, value in enumerate(values["requests"]):
api_request(value, data, name, f"{save_key}|{idx}", ignore_testing) | [
"def",
"request_multiple_custom",
"(",
"values",
":",
"dict",
",",
"data",
":",
"StepData",
",",
"name",
":",
"str",
",",
"save_key",
",",
"ignore_testing",
"=",
"False",
")",
":",
"if",
"data",
".",
"get_config",
"(",
"\"testing\"",
",",
"False",
")",
"and",
"not",
"ignore_testing",
":",
"return",
"_load_test_data",
"(",
"values",
",",
"data",
",",
"name",
",",
"save_key",
")",
"if",
"values",
".",
"get",
"(",
"\"use_loop_as_key\"",
",",
"False",
")",
":",
"data",
".",
"insert_data",
"(",
"save_key",
",",
"{",
"}",
",",
"values",
")",
"for",
"idx",
",",
"key",
"in",
"enumerate",
"(",
"values",
"[",
"\"steps_value\"",
"]",
")",
":",
"api_request",
"(",
"values",
"[",
"\"requests\"",
"]",
"[",
"idx",
"]",
",",
"data",
",",
"name",
",",
"f\"{save_key}|{key}\"",
",",
"ignore_testing",
")",
"else",
":",
"data",
".",
"insert_data",
"(",
"save_key",
",",
"[",
"None",
"]",
"*",
"len",
"(",
"values",
"[",
"\"requests\"",
"]",
")",
",",
"values",
")",
"for",
"idx",
",",
"value",
"in",
"enumerate",
"(",
"values",
"[",
"\"requests\"",
"]",
")",
":",
"api_request",
"(",
"value",
",",
"data",
",",
"name",
",",
"f\"{save_key}|{idx}\"",
",",
"ignore_testing",
")"
] | [
134,
0
] | [
156,
79
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
RawData.write_LatestCheerFile | ( self, cheerAmount = 0, userDisplayName = "" ) | return | Schreibt den letzten Cheerer mit der Anzahl der Bits in ein File | Schreibt den letzten Cheerer mit der Anzahl der Bits in ein File | def write_LatestCheerFile( self, cheerAmount = 0, userDisplayName = "" ):
''' Schreibt den letzten Cheerer mit der Anzahl der Bits in ein File '''
thisActionName = "write_LatestCheerFile"
# Daten nur Schreiben, wenn des Daten-Files-Verzeichnis angegeben wurde
if self.DataFilesPath:
with codecs.open( self.LatestCheerFile, encoding="utf-8", mode="w") as file:
file.write( str( "{0}".format( userDisplayName ) ) + os.linesep + str( "({0} Bits)".format( TransformLocale_Decimals(int(cheerAmount)) ) ) )
file.close()
return | [
"def",
"write_LatestCheerFile",
"(",
"self",
",",
"cheerAmount",
"=",
"0",
",",
"userDisplayName",
"=",
"\"\"",
")",
":",
"thisActionName",
"=",
"\"write_LatestCheerFile\"",
"# Daten nur Schreiben, wenn des Daten-Files-Verzeichnis angegeben wurde\r",
"if",
"self",
".",
"DataFilesPath",
":",
"with",
"codecs",
".",
"open",
"(",
"self",
".",
"LatestCheerFile",
",",
"encoding",
"=",
"\"utf-8\"",
",",
"mode",
"=",
"\"w\"",
")",
"as",
"file",
":",
"file",
".",
"write",
"(",
"str",
"(",
"\"{0}\"",
".",
"format",
"(",
"userDisplayName",
")",
")",
"+",
"os",
".",
"linesep",
"+",
"str",
"(",
"\"({0} Bits)\"",
".",
"format",
"(",
"TransformLocale_Decimals",
"(",
"int",
"(",
"cheerAmount",
")",
")",
")",
")",
")",
"file",
".",
"close",
"(",
")",
"return"
] | [
189,
4
] | [
200,
14
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
HeistSystem.WriteMessage_NotEnoughPoints | (self, data) | return | Schreibt eine Benachrichtigung in den Chat, dass der Spieler nicht
über ausreichend Punkte verfügt.
| Schreibt eine Benachrichtigung in den Chat, dass der Spieler nicht
über ausreichend Punkte verfügt.
| def WriteMessage_NotEnoughPoints(self, data):
''' Schreibt eine Benachrichtigung in den Chat, dass der Spieler nicht
über ausreichend Punkte verfügt.
'''
thisActionName = "WriteMessage_NotEnoughPoints"
# Benachrichtigung aus der Datenbank auslesen
messageText = self.RandomMessage_ByType(
messageType=self.MessageType_NotEnoughPoints
)
# Nachricht in den Chat schreiben
self.chat_WriteTextMessage(
messageText=str(messageText).format(
user=data.UserName,
target=self.GameTargetName,
points=TransformLocale_Decimals(
self.Parent.GetPoints(data.User)
),
pointsname=self.Parent.GetCurrencyName()
)
)
return | [
"def",
"WriteMessage_NotEnoughPoints",
"(",
"self",
",",
"data",
")",
":",
"thisActionName",
"=",
"\"WriteMessage_NotEnoughPoints\"",
"# Benachrichtigung aus der Datenbank auslesen\r",
"messageText",
"=",
"self",
".",
"RandomMessage_ByType",
"(",
"messageType",
"=",
"self",
".",
"MessageType_NotEnoughPoints",
")",
"# Nachricht in den Chat schreiben\r",
"self",
".",
"chat_WriteTextMessage",
"(",
"messageText",
"=",
"str",
"(",
"messageText",
")",
".",
"format",
"(",
"user",
"=",
"data",
".",
"UserName",
",",
"target",
"=",
"self",
".",
"GameTargetName",
",",
"points",
"=",
"TransformLocale_Decimals",
"(",
"self",
".",
"Parent",
".",
"GetPoints",
"(",
"data",
".",
"User",
")",
")",
",",
"pointsname",
"=",
"self",
".",
"Parent",
".",
"GetCurrencyName",
"(",
")",
")",
")",
"return"
] | [
1202,
4
] | [
1225,
14
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
ToolboxAgenda.set_tags_for_selector | (selector) | Meta-Informationen für Selector-Shape einstellen | Meta-Informationen für Selector-Shape einstellen | def set_tags_for_selector(selector):
''' Meta-Informationen für Selector-Shape einstellen '''
selector.Tags.Add(TOOLBOX_AGENDA_SELECTOR, "1") | [
"def",
"set_tags_for_selector",
"(",
"selector",
")",
":",
"selector",
".",
"Tags",
".",
"Add",
"(",
"TOOLBOX_AGENDA_SELECTOR",
",",
"\"1\"",
")"
] | [
1242,
4
] | [
1244,
55
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
ispBaseWebApp.routeFile | ( self, filepath:str="", root="" ) | return output | Eine normale Datei laden.
Parameters
----------
filepath : str, optional
file und path einer datei aus root. The default is "".
root : str, optional
Basispfad für filepath
Returns
-------
output :
Inhalt der geladenen Datei
| Eine normale Datei laden. | def routeFile( self, filepath:str="", root="" ):
"""Eine normale Datei laden.
Parameters
----------
filepath : str, optional
file und path einer datei aus root. The default is "".
root : str, optional
Basispfad für filepath
Returns
-------
output :
Inhalt der geladenen Datei
"""
# sonst nur die Datei laden
filepath = osp.join( root, filepath ) # .format( **{"BASE_DIR": self._config.BASE_DIR} )
try:
output = send_file( filepath )
except:
output = "<h1>Datei {} wurde nicht gefunden</h1>".format( filepath )
self.status_code = 404
pass
return output | [
"def",
"routeFile",
"(",
"self",
",",
"filepath",
":",
"str",
"=",
"\"\"",
",",
"root",
"=",
"\"\"",
")",
":",
"# sonst nur die Datei laden",
"filepath",
"=",
"osp",
".",
"join",
"(",
"root",
",",
"filepath",
")",
"# .format( **{\"BASE_DIR\": self._config.BASE_DIR} )",
"try",
":",
"output",
"=",
"send_file",
"(",
"filepath",
")",
"except",
":",
"output",
"=",
"\"<h1>Datei {} wurde nicht gefunden</h1>\"",
".",
"format",
"(",
"filepath",
")",
"self",
".",
"status_code",
"=",
"404",
"pass",
"return",
"output"
] | [
650,
4
] | [
676,
21
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
GPIO._callback | (self, channel, callback) | Funktion, die fuer den event_callback genutzt wird
Sie ruft die gewuenschte Funktion nach 5-10 Sekunden auf
:param channel: Der channel mit Aenderung
:param callback: Die Callbackfunktion
| Funktion, die fuer den event_callback genutzt wird
Sie ruft die gewuenschte Funktion nach 5-10 Sekunden auf
:param channel: Der channel mit Aenderung
:param callback: Die Callbackfunktion
| def _callback(self, channel, callback):
"""Funktion, die fuer den event_callback genutzt wird
Sie ruft die gewuenschte Funktion nach 5-10 Sekunden auf
:param channel: Der channel mit Aenderung
:param callback: Die Callbackfunktion
"""
# warte eine zufaellige Zeit
sleep(randint(5, 10))
# falls das Programm noch laueft,
# der channel noch nicht gecleart wurde
# und der Channel noch nicht entfernt wurde
# (zum Beispiel durch einen Wechsel des Channels zu einem Ausgang)
# rufe die Callbackfunktion auf
if self.mode is not None and channel in self.channels and channel in self.events:
callback(channel) | [
"def",
"_callback",
"(",
"self",
",",
"channel",
",",
"callback",
")",
":",
"# warte eine zufaellige Zeit",
"sleep",
"(",
"randint",
"(",
"5",
",",
"10",
")",
")",
"# falls das Programm noch laueft,",
"# der channel noch nicht gecleart wurde",
"# und der Channel noch nicht entfernt wurde",
"# (zum Beispiel durch einen Wechsel des Channels zu einem Ausgang)",
"# rufe die Callbackfunktion auf",
"if",
"self",
".",
"mode",
"is",
"not",
"None",
"and",
"channel",
"in",
"self",
".",
"channels",
"and",
"channel",
"in",
"self",
".",
"events",
":",
"callback",
"(",
"channel",
")"
] | [
51,
4
] | [
65,
29
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
ispSAFRS._int_groupby | (cls, query, params:dict={} ) | return query, group_entities, ok | Internes durchführen einer group query
Bei einer Angabe von fields werden nur diese für den select bereich verwendet
Parameters
----------
query : obj
Das bisherige query Object
params : dict, optional
The default is::
{
"groups": {},
"fields": { "<tablename>": ["field1","FieldX"] },
"labels": { "<tablename>.<fieldname1>": [ "<label1>","<label2>" ], ... }
}
Returns
-------
query : query
Die um group ergänzte query
group_entities: list
Eine Liste mit den für die Gruppierung verwendeten Feldern::
{
"fields[<modul>]": "<feld1,feld2>",
"group": "<feld1,feld2>",
"filter": "eq(aktiv,true)"
}
ok: bool
gruppierung konnte erstellt werden
| Internes durchführen einer group query | def _int_groupby(cls, query, params:dict={} ):
"""Internes durchführen einer group query
Bei einer Angabe von fields werden nur diese für den select bereich verwendet
Parameters
----------
query : obj
Das bisherige query Object
params : dict, optional
The default is::
{
"groups": {},
"fields": { "<tablename>": ["field1","FieldX"] },
"labels": { "<tablename>.<fieldname1>": [ "<label1>","<label2>" ], ... }
}
Returns
-------
query : query
Die um group ergänzte query
group_entities: list
Eine Liste mit den für die Gruppierung verwendeten Feldern::
{
"fields[<modul>]": "<feld1,feld2>",
"group": "<feld1,feld2>",
"filter": "eq(aktiv,true)"
}
ok: bool
gruppierung konnte erstellt werden
"""
group_entities = []
field_entities = []
_params = {
"groups": {},
"fields": {},
"labels": {},
}
_params.update( params )
#
cls.appInfo("_int_groupby", _params )
try:
# ist groups angegeben worden dann verwenden
if len( _params["groups"].items() ) > 0:
for name, fields in _params["groups"].items():
for field in fields:
# das passende Model bestimmen
model = cls.access_cls( name )
# und daraus mit getattr die richtige column holen
column = getattr( model, field, None )
if column:
#
if "{}.{}".format(name, field) in _params["labels"]:
labels = _params["labels"]["{}.{}".format(name, field)]
if type(labels) is list:
for labelname in labels:
group_entities.append( column.label( labelname ) )
else:
group_entities.append( column.label( labels ) )
else:
group_entities.append( column )
# alle felder aus request verwenden
for name, fields in _params["fields"].items():
for field in fields:
# das passende Model bestimmen
model = cls.access_cls( name )
# und daraus mit getattr die richtige column
column = getattr( model, field, None )
if column:
if "{}.{}".format(name, field) in _params["labels"]:
labels = _params["labels"]["{}.{}".format(name, field)]
if type(labels) is list:
for labelname in labels:
field_entities.append( column.label( labelname ) )
else:
field_entities.append( column.label( labels ) )
else:
field_entities.append( column )
# ohne gruppenfelder die in fields angegebenen Verwenden
if len(group_entities) == 0:
group_entities = field_entities
# oder ohne fields die gruppenfelder verwenden
if len(field_entities) == 0:
field_entities = group_entities
# gruppen felder als Gruppierung verwenden
query = query.group_by( *group_entities )
# bisher alles ok
ok = True
except Exception as exc: # pragma: no cover
cls.appError( "Fehler bei _int_group", str( exc ) )
log.exception(exc)
ok = False
# die in fields angegebenen Felder als Anzeigefelder verwenden
query = query.with_entities(
*field_entities
)
return query, group_entities, ok | [
"def",
"_int_groupby",
"(",
"cls",
",",
"query",
",",
"params",
":",
"dict",
"=",
"{",
"}",
")",
":",
"group_entities",
"=",
"[",
"]",
"field_entities",
"=",
"[",
"]",
"_params",
"=",
"{",
"\"groups\"",
":",
"{",
"}",
",",
"\"fields\"",
":",
"{",
"}",
",",
"\"labels\"",
":",
"{",
"}",
",",
"}",
"_params",
".",
"update",
"(",
"params",
")",
"#",
"cls",
".",
"appInfo",
"(",
"\"_int_groupby\"",
",",
"_params",
")",
"try",
":",
"# ist groups angegeben worden dann verwenden",
"if",
"len",
"(",
"_params",
"[",
"\"groups\"",
"]",
".",
"items",
"(",
")",
")",
">",
"0",
":",
"for",
"name",
",",
"fields",
"in",
"_params",
"[",
"\"groups\"",
"]",
".",
"items",
"(",
")",
":",
"for",
"field",
"in",
"fields",
":",
"# das passende Model bestimmen",
"model",
"=",
"cls",
".",
"access_cls",
"(",
"name",
")",
"# und daraus mit getattr die richtige column holen",
"column",
"=",
"getattr",
"(",
"model",
",",
"field",
",",
"None",
")",
"if",
"column",
":",
"#",
"if",
"\"{}.{}\"",
".",
"format",
"(",
"name",
",",
"field",
")",
"in",
"_params",
"[",
"\"labels\"",
"]",
":",
"labels",
"=",
"_params",
"[",
"\"labels\"",
"]",
"[",
"\"{}.{}\"",
".",
"format",
"(",
"name",
",",
"field",
")",
"]",
"if",
"type",
"(",
"labels",
")",
"is",
"list",
":",
"for",
"labelname",
"in",
"labels",
":",
"group_entities",
".",
"append",
"(",
"column",
".",
"label",
"(",
"labelname",
")",
")",
"else",
":",
"group_entities",
".",
"append",
"(",
"column",
".",
"label",
"(",
"labels",
")",
")",
"else",
":",
"group_entities",
".",
"append",
"(",
"column",
")",
"# alle felder aus request verwenden",
"for",
"name",
",",
"fields",
"in",
"_params",
"[",
"\"fields\"",
"]",
".",
"items",
"(",
")",
":",
"for",
"field",
"in",
"fields",
":",
"# das passende Model bestimmen",
"model",
"=",
"cls",
".",
"access_cls",
"(",
"name",
")",
"# und daraus mit getattr die richtige column",
"column",
"=",
"getattr",
"(",
"model",
",",
"field",
",",
"None",
")",
"if",
"column",
":",
"if",
"\"{}.{}\"",
".",
"format",
"(",
"name",
",",
"field",
")",
"in",
"_params",
"[",
"\"labels\"",
"]",
":",
"labels",
"=",
"_params",
"[",
"\"labels\"",
"]",
"[",
"\"{}.{}\"",
".",
"format",
"(",
"name",
",",
"field",
")",
"]",
"if",
"type",
"(",
"labels",
")",
"is",
"list",
":",
"for",
"labelname",
"in",
"labels",
":",
"field_entities",
".",
"append",
"(",
"column",
".",
"label",
"(",
"labelname",
")",
")",
"else",
":",
"field_entities",
".",
"append",
"(",
"column",
".",
"label",
"(",
"labels",
")",
")",
"else",
":",
"field_entities",
".",
"append",
"(",
"column",
")",
"# ohne gruppenfelder die in fields angegebenen Verwenden",
"if",
"len",
"(",
"group_entities",
")",
"==",
"0",
":",
"group_entities",
"=",
"field_entities",
"# oder ohne fields die gruppenfelder verwenden",
"if",
"len",
"(",
"field_entities",
")",
"==",
"0",
":",
"field_entities",
"=",
"group_entities",
"# gruppen felder als Gruppierung verwenden",
"query",
"=",
"query",
".",
"group_by",
"(",
"*",
"group_entities",
")",
"# bisher alles ok",
"ok",
"=",
"True",
"except",
"Exception",
"as",
"exc",
":",
"# pragma: no cover",
"cls",
".",
"appError",
"(",
"\"Fehler bei _int_group\"",
",",
"str",
"(",
"exc",
")",
")",
"log",
".",
"exception",
"(",
"exc",
")",
"ok",
"=",
"False",
"# die in fields angegebenen Felder als Anzeigefelder verwenden",
"query",
"=",
"query",
".",
"with_entities",
"(",
"*",
"field_entities",
")",
"return",
"query",
",",
"group_entities",
",",
"ok"
] | [
1079,
4
] | [
1194,
40
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
ispSAFRS._int_init | ( cls ) | Initialisierung vor jedem Aufruf.
Setzt _resultUpdate vor jedem Aufruf::
{
"infos" : []
"errors" : []
}
Stellt _config und _configOverlay des Flask Servers bereit
Diese Funktion wird von ispSAFRS_decorator aufgerufen
Returns
-------
None.
| Initialisierung vor jedem Aufruf. | def _int_init( cls ):
"""Initialisierung vor jedem Aufruf.
Setzt _resultUpdate vor jedem Aufruf::
{
"infos" : []
"errors" : []
}
Stellt _config und _configOverlay des Flask Servers bereit
Diese Funktion wird von ispSAFRS_decorator aufgerufen
Returns
-------
None.
"""
cls._resultUpdate = {
"infos" : [],
"errors" : []
}
cls._config = current_app._config
cls._configOverlay = current_app._configOverlay | [
"def",
"_int_init",
"(",
"cls",
")",
":",
"cls",
".",
"_resultUpdate",
"=",
"{",
"\"infos\"",
":",
"[",
"]",
",",
"\"errors\"",
":",
"[",
"]",
"}",
"cls",
".",
"_config",
"=",
"current_app",
".",
"_config",
"cls",
".",
"_configOverlay",
"=",
"current_app",
".",
"_configOverlay"
] | [
703,
4
] | [
727,
55
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
register_audio_parts | (func) | return register_type_func(AUDIO_PARTS_TYPES, AudioError, func) | Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block.
Fügt eine Typ-Funktion dem Dictionary AUDIO_PARTS_TYPES hinzu.
:param func: die zu registrierende Funktion
:return: Funktion mit try/except-Block
| Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block.
Fügt eine Typ-Funktion dem Dictionary AUDIO_PARTS_TYPES hinzu. | def register_audio_parts(func):
"""Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block.
Fügt eine Typ-Funktion dem Dictionary AUDIO_PARTS_TYPES hinzu.
:param func: die zu registrierende Funktion
:return: Funktion mit try/except-Block
"""
return register_type_func(AUDIO_PARTS_TYPES, AudioError, func) | [
"def",
"register_audio_parts",
"(",
"func",
")",
":",
"return",
"register_type_func",
"(",
"AUDIO_PARTS_TYPES",
",",
"AudioError",
",",
"func",
")"
] | [
24,
0
] | [
31,
66
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
qa_mlc.getLeafCenterPositions | ( self ) | return np.concatenate( ( fl1, hl, fl2) ) | Gibt die Positionen der Leaf Mitte aller Leafs
| Gibt die Positionen der Leaf Mitte aller Leafs | def getLeafCenterPositions( self ):
"""Gibt die Positionen der Leaf Mitte aller Leafs
"""
# Auswertepositionen (Mitte der Leafs)
fl1 = np.arange(-195, -100, 10 ) # Leafbreite 10mm
hl = np.arange(-97.5, 100, 5 ) # Leafbreite 5mm
fl2 = np.arange(105, 200, 10 ) # Leafbreite 10mm
return np.concatenate( ( fl1, hl, fl2) ) | [
"def",
"getLeafCenterPositions",
"(",
"self",
")",
":",
"# Auswertepositionen (Mitte der Leafs)",
"fl1",
"=",
"np",
".",
"arange",
"(",
"-",
"195",
",",
"-",
"100",
",",
"10",
")",
"# Leafbreite 10mm",
"hl",
"=",
"np",
".",
"arange",
"(",
"-",
"97.5",
",",
"100",
",",
"5",
")",
"# Leafbreite 5mm",
"fl2",
"=",
"np",
".",
"arange",
"(",
"105",
",",
"200",
",",
"10",
")",
"# Leafbreite 10mm",
"return",
"np",
".",
"concatenate",
"(",
"(",
"fl1",
",",
"hl",
",",
"fl2",
")",
")"
] | [
110,
4
] | [
118,
48
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
_bezierKreisN | (n) | return [item for sublist in kurven for item in sublist] | Einheitskreis in Bezierkurven, Quadranten n-fach geteilt | Einheitskreis in Bezierkurven, Quadranten n-fach geteilt | def _bezierKreisN(n):
'''Einheitskreis in Bezierkurven, Quadranten n-fach geteilt'''
kurven = map(lambda k : _bezierKurveNFachTeilen(k,n), _bezierKurvenEinheitskreis())
return [item for sublist in kurven for item in sublist] | [
"def",
"_bezierKreisN",
"(",
"n",
")",
":",
"kurven",
"=",
"map",
"(",
"lambda",
"k",
":",
"_bezierKurveNFachTeilen",
"(",
"k",
",",
"n",
")",
",",
"_bezierKurvenEinheitskreis",
"(",
")",
")",
"return",
"[",
"item",
"for",
"sublist",
"in",
"kurven",
"for",
"item",
"in",
"sublist",
"]"
] | [
59,
0
] | [
62,
59
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
checkVMAT.doMT_VMAT | ( self, fileData ) | return self.pdf.finish(), result | Variationen von DoseRate und Gantry Speed
Parameters
----------
fileData : pandas.DataFrame
Returns
-------
pdfFilename : str
Name der erzeugten Pdfdatei
result : list
list mit dicts der Testergebnisse
See Also
--------
isp.results : Aufbau von result
| Variationen von DoseRate und Gantry Speed
Parameters
----------
fileData : pandas.DataFrame
Returns
-------
pdfFilename : str
Name der erzeugten Pdfdatei
result : list
list mit dicts der Testergebnisse
See Also
--------
isp.results : Aufbau von result
| def doMT_VMAT( self, fileData ):
"""Variationen von DoseRate und Gantry Speed
Parameters
----------
fileData : pandas.DataFrame
Returns
-------
pdfFilename : str
Name der erzeugten Pdfdatei
result : list
list mit dicts der Testergebnisse
See Also
--------
isp.results : Aufbau von result
"""
result=[]
# wird für progress verwendet
filesMax=len( fileData )
self.fileCount = 0
# metadata ergänzen und lokal als md bereitstellen
md = self.metadata
md.update( {
"field_count": 2,
"_imgSize" : {"width" : 90, "height" : 85},
"_imgField": {"border": 20 },
"_chart": { "width" : 180, "height" : 60},
"manual": {
"filename": self.metadata.info["anleitung"],
"attrs": {"class":"layout-fill-width", "margin-bottom": "5mm"},
},
} )
def groupBySeries( df_group ):
"""Datumsweise Auswertung und PDF Ausgabe
"""
# das Datum vom ersten Datensatz verwenden
checkDate = df_group['AcquisitionDateTime'].iloc[0].strftime("%d.%m.%Y")
self.pdf.setContentName( checkDate )
#
# Anleitung
#
self.pdf.textFile( **md.manual )
# auf genau 2 Felder prüfen (open, DMLC)
errors = self.checkFields( md, None, df_group, md["field_count"])
if len(errors) > 0:
result.append( self.pdf_error_result(
md, date=checkDate, group_len=len( result ),
errors=errors
) )
return
'''
if not self.checkFields( md, fields=df_group, fieldLen=2 ):
result.append(
self.pdf_error_result(
md, group_len=len( result ),
date=checkDate,
msg="Die Feldanzahl ist nicht 2"
)
)
return
'''
# Analyse durchführen
drgs = qa_vmat( df_group, str( md["vmat_type"]), metadata=md )
if drgs.analysed == False:
logger.error( self.getMetaErrorString( md ) + ": Analyse nicht möglich" )
result.append(
self.pdf_error_result(
md, date=checkDate, group_len=len( result ),
msg="Analyse nicht möglich"
)
)
return
#
# Auswertung holen und Dataframe erstellen
#
df = pd.DataFrame( drgs.results["segments"] )
#
# Abweichung ausrechnen und Passed setzen
#
check = [
{ "field": 'r_dev', 'tolerance':'default' }
]
acceptance = self.check_acceptance( df, md, check )
#
# Ergebnis in result merken
#
result.append( self.createResult( df, md, check,
checkDate,
len( result ), # bisherige Ergebnisse in result
acceptance
) )
#
# das offene Feld
#
img_cls, fig, ax = drgs.open_image.plotImage( original=False
, field = md["_imgField"]
, metadata = md
, plotTitle = "{Kennung}"
, invert=False, plotCax=False, plotField=True, getPlot=False )
# segmente einzeichnen
drgs.draw_segments( ax, df )
# Bild anzeigen
self.pdf.image( img_cls.getPlot(), md["_imgSize"] )
#
# das vmat Feld
#
img_cls, fig, ax = drgs.dmlc_image.plotImage( original=False
, field = md["_imgField"]
, metadata = md
, plotTitle="{Kennung}"
, invert=False, plotCax=False, plotField=True, getPlot=False )
# segmente einzeichnen
drgs.draw_segments(ax, df)
# Bild anzeigen
self.pdf.image(img_cls.getPlot(), md["_imgSize"] )
#
# das chart
#
self.pdf.image( drgs.plotChart( md["_chart"] ), md["_chart"], attrs={"margin-top": "5mm"} )
#
# Tabelle anzeigen
#
self.pdf.pandas( df,
attrs={"class":"layout-fill-width", "margin-top": "5mm"},
fields=md["table_fields"]
)
#
# Auswertungs text anzeigen
#
drgs.results["f_warning"] = md.current.tolerance.default.warning.get("f","")
drgs.results["f_error"] = md.current.tolerance.default.error.get("f","")
# <h3>{header} VMAT results:</h3>
text = """<br>
Source-to-Image Distance: <b style="position:absolute;left:45mm;">{SID:2.0f} mm</b>
Absolute mean deviation: <b style="position:absolute;left:45mm;">{deviation:2.2f} %</b>
Maximum deviation: <b style="position:absolute;left:45mm;">{maximum_deviation:2.2f} %</b><br>
Warnung bei: <b style="position:absolute;left:45mm;">{f_warning}</b><br>
Fehler bei: <b style="position:absolute;left:45mm;">{f_error}</b>
""".format( **drgs.results ).replace("{value}", "M<sub>dev</sub>")
self.pdf.text( text )
# Gesamt check - das schlechteste aus der tabelle
self.pdf.resultIcon( acceptance )
# progress pro file stimmt nicht immer genau (baseimage)
# 40% für die dicom daten 40% für die Auswertung 20 % für das pdf
self.fileCount += 2
if hasattr( logger, "progress"):
logger.progress( md["testId"], 40 + ( 40 / filesMax * self.fileCount ) )
#
# Gruppiert nach SeriesNumber abarbeiten
#
fileData.groupby( [ 'day', 'SeriesNumber' ] ).apply( groupBySeries )
# abschließen pdfdaten und result zurückgeben
return self.pdf.finish(), result | [
"def",
"doMT_VMAT",
"(",
"self",
",",
"fileData",
")",
":",
"result",
"=",
"[",
"]",
"# wird für progress verwendet",
"filesMax",
"=",
"len",
"(",
"fileData",
")",
"self",
".",
"fileCount",
"=",
"0",
"# metadata ergänzen und lokal als md bereitstellen",
"md",
"=",
"self",
".",
"metadata",
"md",
".",
"update",
"(",
"{",
"\"field_count\"",
":",
"2",
",",
"\"_imgSize\"",
":",
"{",
"\"width\"",
":",
"90",
",",
"\"height\"",
":",
"85",
"}",
",",
"\"_imgField\"",
":",
"{",
"\"border\"",
":",
"20",
"}",
",",
"\"_chart\"",
":",
"{",
"\"width\"",
":",
"180",
",",
"\"height\"",
":",
"60",
"}",
",",
"\"manual\"",
":",
"{",
"\"filename\"",
":",
"self",
".",
"metadata",
".",
"info",
"[",
"\"anleitung\"",
"]",
",",
"\"attrs\"",
":",
"{",
"\"class\"",
":",
"\"layout-fill-width\"",
",",
"\"margin-bottom\"",
":",
"\"5mm\"",
"}",
",",
"}",
",",
"}",
")",
"def",
"groupBySeries",
"(",
"df_group",
")",
":",
"\"\"\"Datumsweise Auswertung und PDF Ausgabe \n \n \"\"\"",
"# das Datum vom ersten Datensatz verwenden",
"checkDate",
"=",
"df_group",
"[",
"'AcquisitionDateTime'",
"]",
".",
"iloc",
"[",
"0",
"]",
".",
"strftime",
"(",
"\"%d.%m.%Y\"",
")",
"self",
".",
"pdf",
".",
"setContentName",
"(",
"checkDate",
")",
"# ",
"# Anleitung",
"#",
"self",
".",
"pdf",
".",
"textFile",
"(",
"*",
"*",
"md",
".",
"manual",
")",
"# auf genau 2 Felder prüfen (open, DMLC)",
"errors",
"=",
"self",
".",
"checkFields",
"(",
"md",
",",
"None",
",",
"df_group",
",",
"md",
"[",
"\"field_count\"",
"]",
")",
"if",
"len",
"(",
"errors",
")",
">",
"0",
":",
"result",
".",
"append",
"(",
"self",
".",
"pdf_error_result",
"(",
"md",
",",
"date",
"=",
"checkDate",
",",
"group_len",
"=",
"len",
"(",
"result",
")",
",",
"errors",
"=",
"errors",
")",
")",
"return",
"'''\n if not self.checkFields( md, fields=df_group, fieldLen=2 ):\n result.append( \n self.pdf_error_result( \n md, group_len=len( result ),\n date=checkDate,\n msg=\"Die Feldanzahl ist nicht 2\"\n )\n )\n return\n '''",
"# Analyse durchführen",
"drgs",
"=",
"qa_vmat",
"(",
"df_group",
",",
"str",
"(",
"md",
"[",
"\"vmat_type\"",
"]",
")",
",",
"metadata",
"=",
"md",
")",
"if",
"drgs",
".",
"analysed",
"==",
"False",
":",
"logger",
".",
"error",
"(",
"self",
".",
"getMetaErrorString",
"(",
"md",
")",
"+",
"\": Analyse nicht möglich\" ",
"",
"result",
".",
"append",
"(",
"self",
".",
"pdf_error_result",
"(",
"md",
",",
"date",
"=",
"checkDate",
",",
"group_len",
"=",
"len",
"(",
"result",
")",
",",
"msg",
"=",
"\"Analyse nicht möglich\"",
")",
")",
"return",
"#",
"# Auswertung holen und Dataframe erstellen",
"# ",
"df",
"=",
"pd",
".",
"DataFrame",
"(",
"drgs",
".",
"results",
"[",
"\"segments\"",
"]",
")",
"#",
"# Abweichung ausrechnen und Passed setzen",
"#",
"check",
"=",
"[",
"{",
"\"field\"",
":",
"'r_dev'",
",",
"'tolerance'",
":",
"'default'",
"}",
"]",
"acceptance",
"=",
"self",
".",
"check_acceptance",
"(",
"df",
",",
"md",
",",
"check",
")",
"#",
"# Ergebnis in result merken",
"#",
"result",
".",
"append",
"(",
"self",
".",
"createResult",
"(",
"df",
",",
"md",
",",
"check",
",",
"checkDate",
",",
"len",
"(",
"result",
")",
",",
"# bisherige Ergebnisse in result",
"acceptance",
")",
")",
"#",
"# das offene Feld",
"#",
"img_cls",
",",
"fig",
",",
"ax",
"=",
"drgs",
".",
"open_image",
".",
"plotImage",
"(",
"original",
"=",
"False",
",",
"field",
"=",
"md",
"[",
"\"_imgField\"",
"]",
",",
"metadata",
"=",
"md",
",",
"plotTitle",
"=",
"\"{Kennung}\"",
",",
"invert",
"=",
"False",
",",
"plotCax",
"=",
"False",
",",
"plotField",
"=",
"True",
",",
"getPlot",
"=",
"False",
")",
"# segmente einzeichnen",
"drgs",
".",
"draw_segments",
"(",
"ax",
",",
"df",
")",
"# Bild anzeigen",
"self",
".",
"pdf",
".",
"image",
"(",
"img_cls",
".",
"getPlot",
"(",
")",
",",
"md",
"[",
"\"_imgSize\"",
"]",
")",
"#",
"# das vmat Feld",
"#",
"img_cls",
",",
"fig",
",",
"ax",
"=",
"drgs",
".",
"dmlc_image",
".",
"plotImage",
"(",
"original",
"=",
"False",
",",
"field",
"=",
"md",
"[",
"\"_imgField\"",
"]",
",",
"metadata",
"=",
"md",
",",
"plotTitle",
"=",
"\"{Kennung}\"",
",",
"invert",
"=",
"False",
",",
"plotCax",
"=",
"False",
",",
"plotField",
"=",
"True",
",",
"getPlot",
"=",
"False",
")",
"# segmente einzeichnen",
"drgs",
".",
"draw_segments",
"(",
"ax",
",",
"df",
")",
"# Bild anzeigen",
"self",
".",
"pdf",
".",
"image",
"(",
"img_cls",
".",
"getPlot",
"(",
")",
",",
"md",
"[",
"\"_imgSize\"",
"]",
")",
"#",
"# das chart",
"#",
"self",
".",
"pdf",
".",
"image",
"(",
"drgs",
".",
"plotChart",
"(",
"md",
"[",
"\"_chart\"",
"]",
")",
",",
"md",
"[",
"\"_chart\"",
"]",
",",
"attrs",
"=",
"{",
"\"margin-top\"",
":",
"\"5mm\"",
"}",
")",
"#",
"# Tabelle anzeigen",
"#",
"self",
".",
"pdf",
".",
"pandas",
"(",
"df",
",",
"attrs",
"=",
"{",
"\"class\"",
":",
"\"layout-fill-width\"",
",",
"\"margin-top\"",
":",
"\"5mm\"",
"}",
",",
"fields",
"=",
"md",
"[",
"\"table_fields\"",
"]",
")",
"#",
"# Auswertungs text anzeigen",
"#",
"drgs",
".",
"results",
"[",
"\"f_warning\"",
"]",
"=",
"md",
".",
"current",
".",
"tolerance",
".",
"default",
".",
"warning",
".",
"get",
"(",
"\"f\"",
",",
"\"\"",
")",
"drgs",
".",
"results",
"[",
"\"f_error\"",
"]",
"=",
"md",
".",
"current",
".",
"tolerance",
".",
"default",
".",
"error",
".",
"get",
"(",
"\"f\"",
",",
"\"\"",
")",
"# <h3>{header} VMAT results:</h3>",
"text",
"=",
"\"\"\"<br>\n Source-to-Image Distance: <b style=\"position:absolute;left:45mm;\">{SID:2.0f} mm</b>\n Absolute mean deviation: <b style=\"position:absolute;left:45mm;\">{deviation:2.2f} %</b>\n Maximum deviation: <b style=\"position:absolute;left:45mm;\">{maximum_deviation:2.2f} %</b><br>\n Warnung bei: <b style=\"position:absolute;left:45mm;\">{f_warning}</b><br>\n Fehler bei: <b style=\"position:absolute;left:45mm;\">{f_error}</b>\n \"\"\"",
".",
"format",
"(",
"*",
"*",
"drgs",
".",
"results",
")",
".",
"replace",
"(",
"\"{value}\"",
",",
"\"M<sub>dev</sub>\"",
")",
"self",
".",
"pdf",
".",
"text",
"(",
"text",
")",
"# Gesamt check - das schlechteste aus der tabelle",
"self",
".",
"pdf",
".",
"resultIcon",
"(",
"acceptance",
")",
"# progress pro file stimmt nicht immer genau (baseimage)",
"# 40% für die dicom daten 40% für die Auswertung 20 % für das pdf",
"self",
".",
"fileCount",
"+=",
"2",
"if",
"hasattr",
"(",
"logger",
",",
"\"progress\"",
")",
":",
"logger",
".",
"progress",
"(",
"md",
"[",
"\"testId\"",
"]",
",",
"40",
"+",
"(",
"40",
"/",
"filesMax",
"*",
"self",
".",
"fileCount",
")",
")",
"#",
"# Gruppiert nach SeriesNumber abarbeiten",
"# ",
"fileData",
".",
"groupby",
"(",
"[",
"'day'",
",",
"'SeriesNumber'",
"]",
")",
".",
"apply",
"(",
"groupBySeries",
")",
"# abschließen pdfdaten und result zurückgeben",
"return",
"self",
".",
"pdf",
".",
"finish",
"(",
")",
",",
"result"
] | [
253,
4
] | [
431,
40
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
TicTacToe.setze | (self, pos, symbol) | return Zustand.OK | Setze ein Symbol (Belegung.KREUZ, Belegung.Kreis), wenn es die
Spielregeln erlauben. Speichere die Belegung des Spielfeldes,
rufe die entsprechende Ausgabe-Funktion auf und gib Zustand.OK oder
Zustand.NICHT_ERLAUBT zurück.
| Setze ein Symbol (Belegung.KREUZ, Belegung.Kreis), wenn es die
Spielregeln erlauben. Speichere die Belegung des Spielfeldes,
rufe die entsprechende Ausgabe-Funktion auf und gib Zustand.OK oder
Zustand.NICHT_ERLAUBT zurück.
| def setze(self, pos, symbol):
""" Setze ein Symbol (Belegung.KREUZ, Belegung.Kreis), wenn es die
Spielregeln erlauben. Speichere die Belegung des Spielfeldes,
rufe die entsprechende Ausgabe-Funktion auf und gib Zustand.OK oder
Zustand.NICHT_ERLAUBT zurück.
"""
return Zustand.OK | [
"def",
"setze",
"(",
"self",
",",
"pos",
",",
"symbol",
")",
":",
"return",
"Zustand",
".",
"OK"
] | [
37,
4
] | [
43,
25
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
open_memory_resource | (job_name: str, name: str, time_delta, mode: str = "rt") | return open_resource(os.path.join(MEMORY_LOCATION, job_name, name, res_name), mode) | Öffnet die übergebene Memory-Ressource.
:param time_delta: Tage, die abgezogen werden sollen vom heutigen Tag. Zum Öffnen der richtigen Ressource.
:param job_name: Name des Jobs von der die Funktion aufgerufen wurde.
:param name: Name der Datei (ohne Datum).
:param mode: Mode zum Öffnen der Datei siehe :func:`open`.
| Öffnet die übergebene Memory-Ressource. | def open_memory_resource(job_name: str, name: str, time_delta, mode: str = "rt"):
"""Öffnet die übergebene Memory-Ressource.
:param time_delta: Tage, die abgezogen werden sollen vom heutigen Tag. Zum Öffnen der richtigen Ressource.
:param job_name: Name des Jobs von der die Funktion aufgerufen wurde.
:param name: Name der Datei (ohne Datum).
:param mode: Mode zum Öffnen der Datei siehe :func:`open`.
"""
res_name = (datetime.now() - timedelta(time_delta)).strftime('%Y-%m-%d') + ".json"
return open_resource(os.path.join(MEMORY_LOCATION, job_name, name, res_name), mode) | [
"def",
"open_memory_resource",
"(",
"job_name",
":",
"str",
",",
"name",
":",
"str",
",",
"time_delta",
",",
"mode",
":",
"str",
"=",
"\"rt\"",
")",
":",
"res_name",
"=",
"(",
"datetime",
".",
"now",
"(",
")",
"-",
"timedelta",
"(",
"time_delta",
")",
")",
".",
"strftime",
"(",
"'%Y-%m-%d'",
")",
"+",
"\".json\"",
"return",
"open_resource",
"(",
"os",
".",
"path",
".",
"join",
"(",
"MEMORY_LOCATION",
",",
"job_name",
",",
"name",
",",
"res_name",
")",
",",
"mode",
")"
] | [
279,
0
] | [
289,
87
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
request | (values: dict, data: StepData, name: str, save_key, ignore_testing=False) | Fragt einmal die gewünschten Daten einer API ab.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
:param name: Testdatei, die geladen werden soll.
:param save_key: Key, unter dem die Daten gespeichert werden.
:param ignore_testing: Ob der Request durchgeführt werden soll, obwohl testing `true` ist.
| Fragt einmal die gewünschten Daten einer API ab. | def request(values: dict, data: StepData, name: str, save_key, ignore_testing=False):
"""Fragt einmal die gewünschten Daten einer API ab.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
:param name: Testdatei, die geladen werden soll.
:param save_key: Key, unter dem die Daten gespeichert werden.
:param ignore_testing: Ob der Request durchgeführt werden soll, obwohl testing `true` ist.
"""
if data.get_config("testing", False) and not ignore_testing:
return _load_test_data(values, data, name, save_key)
fetch(values, data, save_key) | [
"def",
"request",
"(",
"values",
":",
"dict",
",",
"data",
":",
"StepData",
",",
"name",
":",
"str",
",",
"save_key",
",",
"ignore_testing",
"=",
"False",
")",
":",
"if",
"data",
".",
"get_config",
"(",
"\"testing\"",
",",
"False",
")",
"and",
"not",
"ignore_testing",
":",
"return",
"_load_test_data",
"(",
"values",
",",
"data",
",",
"name",
",",
"save_key",
")",
"fetch",
"(",
"values",
",",
"data",
",",
"save_key",
")"
] | [
44,
0
] | [
56,
33
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
GUI.setzen0_digitale_eingaenge | (self) | Digitale Eingänge auf 0 setzen | Digitale Eingänge auf 0 setzen | def setzen0_digitale_eingaenge(self):
""" Digitale Eingänge auf 0 setzen """
setzer = Setzer()
setzer.setzen0_digitale_eingaenge()
self.aktualisieren_eingangswerte() | [
"def",
"setzen0_digitale_eingaenge",
"(",
"self",
")",
":",
"setzer",
"=",
"Setzer",
"(",
")",
"setzer",
".",
"setzen0_digitale_eingaenge",
"(",
")",
"self",
".",
"aktualisieren_eingangswerte",
"(",
")"
] | [
206,
4
] | [
210,
42
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
qa_wl._axisImage | (self, ax=None, axisId="", title="" ) | Ein Achsen Bild mit markern ausgeben
| Ein Achsen Bild mit markern ausgeben
| def _axisImage(self, ax=None, axisId="", title="" ):
"""Ein Achsen Bild mit markern ausgeben
"""
import matplotlib.patches as patches
cmap=plt.cm.gray
ax.imshow( self.mergeArray[axisId], cmap=cmap )
# ein Kreis für das erwartete Zentrum der Kugel
circ = patches.Circle((self.virtualCenterDots.x, self.virtualCenterDots.y,), 6, alpha=1, ec='yellow', fill=False)
ax.add_patch(circ)
ax.set_title( title )
ax.axis('off')
#print(self.roi )
# Achsenbeschriftung in mm
# x-Achse
#xlim = ax.get_xlim()
#width = xlim[0] + xlim[1]
#print(width)
#x = np.arange(0, len( transmission["profile"] ), width / 4 )
#ax.get_xaxis().set_ticklabels([ -20, -10, 0, 10, 20])
#ax.get_xaxis().set_ticks( x )
#ax.set_xlim( [ self.roi["X1"], self.roi["X2"] ] )
#ax.set_ylim( [ self.roi["Y1"], self.roi["Y2"] ] )
#ax.plot( len(self.mergeArray[axisId])/2, len(self.mergeArray[axisId])/2, 'y+', ms=100, markeredgewidth=1 )
ax.plot( self.virtualCenterDots.x, self.virtualCenterDots.y, 'y+', ms=200, markeredgewidth=1 ) | [
"def",
"_axisImage",
"(",
"self",
",",
"ax",
"=",
"None",
",",
"axisId",
"=",
"\"\"",
",",
"title",
"=",
"\"\"",
")",
":",
"import",
"matplotlib",
".",
"patches",
"as",
"patches",
"cmap",
"=",
"plt",
".",
"cm",
".",
"gray",
"ax",
".",
"imshow",
"(",
"self",
".",
"mergeArray",
"[",
"axisId",
"]",
",",
"cmap",
"=",
"cmap",
")",
"# ein Kreis für das erwartete Zentrum der Kugel",
"circ",
"=",
"patches",
".",
"Circle",
"(",
"(",
"self",
".",
"virtualCenterDots",
".",
"x",
",",
"self",
".",
"virtualCenterDots",
".",
"y",
",",
")",
",",
"6",
",",
"alpha",
"=",
"1",
",",
"ec",
"=",
"'yellow'",
",",
"fill",
"=",
"False",
")",
"ax",
".",
"add_patch",
"(",
"circ",
")",
"ax",
".",
"set_title",
"(",
"title",
")",
"ax",
".",
"axis",
"(",
"'off'",
")",
"#print(self.roi )",
"# Achsenbeschriftung in mm",
"# x-Achse",
"#xlim = ax.get_xlim()",
"#width = xlim[0] + xlim[1] ",
"#print(width)",
"#x = np.arange(0, len( transmission[\"profile\"] ), width / 4 )",
"#ax.get_xaxis().set_ticklabels([ -20, -10, 0, 10, 20])",
"#ax.get_xaxis().set_ticks( x )",
"#ax.set_xlim( [ self.roi[\"X1\"], self.roi[\"X2\"] ] )",
"#ax.set_ylim( [ self.roi[\"Y1\"], self.roi[\"Y2\"] ] )",
"#ax.plot( len(self.mergeArray[axisId])/2, len(self.mergeArray[axisId])/2, 'y+', ms=100, markeredgewidth=1 ) ",
"ax",
".",
"plot",
"(",
"self",
".",
"virtualCenterDots",
".",
"x",
",",
"self",
".",
"virtualCenterDots",
".",
"y",
",",
"'y+'",
",",
"ms",
"=",
"200",
",",
"markeredgewidth",
"=",
"1",
")"
] | [
611,
4
] | [
642,
102
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
register_api | (func) | return register_type_func(API_TYPES, APIError, func) | Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block.
Fügt eine Typ-Funktion dem Dictionary API_TYPES hinzu.
:param func: die zu registrierende Funktion
:return: Funktion mit try/except-Block
| Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block.
Fügt eine Typ-Funktion dem Dictionary API_TYPES hinzu. | def register_api(func):
"""Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block.
Fügt eine Typ-Funktion dem Dictionary API_TYPES hinzu.
:param func: die zu registrierende Funktion
:return: Funktion mit try/except-Block
"""
return register_type_func(API_TYPES, APIError, func) | [
"def",
"register_api",
"(",
"func",
")",
":",
"return",
"register_type_func",
"(",
"API_TYPES",
",",
"APIError",
",",
"func",
")"
] | [
33,
0
] | [
40,
56
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
open_infoprovider_resource | (path: str, mode: str = "rt") | return open(res_path, mode, encoding='utf-8') | Gibt einen geöffneten Infoprovider zurück.
Sollte die Datei oder ein zu dieser Datei führender Ordner fehlen, so werden diese erstellt.
:param path: Pfad der zu öffnenden Datei.
:param mode: Der Modus, mit welcher die Datei geöffnet wird. Für eine nähere Information siehe :func:`open`
:return: Die geöffnete Datei.
:raises: OSError
| Gibt einen geöffneten Infoprovider zurück. | def open_infoprovider_resource(path: str, mode: str = "rt"):
"""Gibt einen geöffneten Infoprovider zurück.
Sollte die Datei oder ein zu dieser Datei führender Ordner fehlen, so werden diese erstellt.
:param path: Pfad der zu öffnenden Datei.
:param mode: Der Modus, mit welcher die Datei geöffnet wird. Für eine nähere Information siehe :func:`open`
:return: Die geöffnete Datei.
:raises: OSError
"""
res_path = get_infoprovider_path(path)
os.makedirs(os.path.dirname(res_path), exist_ok=True)
return open(res_path, mode, encoding='utf-8') | [
"def",
"open_infoprovider_resource",
"(",
"path",
":",
"str",
",",
"mode",
":",
"str",
"=",
"\"rt\"",
")",
":",
"res_path",
"=",
"get_infoprovider_path",
"(",
"path",
")",
"os",
".",
"makedirs",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"res_path",
")",
",",
"exist_ok",
"=",
"True",
")",
"return",
"open",
"(",
"res_path",
",",
"mode",
",",
"encoding",
"=",
"'utf-8'",
")"
] | [
249,
0
] | [
263,
49
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
Playlists.initialize | (self) | Versuche das mpd Playlisten Verzeichnis aus mpd.conf herauszulesen | Versuche das mpd Playlisten Verzeichnis aus mpd.conf herauszulesen | def initialize(self):
""" Versuche das mpd Playlisten Verzeichnis aus mpd.conf herauszulesen """
try:
with open(MPD_CONFIG) as m_cfg:
for line in m_cfg:
if line.strip():
if line.strip().split()[0].lower() == 'playlist_directory':
self.pl_dir = line.strip().split()[1].strip("\'\"")
if line.strip().split()[0].lower() == 'music_directory':
self.music_dir = line.strip().split()[1].strip("\'\"")
except FileNotFoundError:
print("mpd config file " + MPD_CONFIG + " not found, using default directories")
print(self.pl_dir + " and " + self.music_dir)
# scriptuserid = os.getuid()
# if not scriptuserid: # Script läuft nicht als 'root'
# # Scriptuser sollte im Normalfall der User 'pi' sein
# scriptuser = pwd.getpwuid(scriptuserid).pw_name
# if not scriptuser in grp.getgrnam('audio').gr_mem:
# # Der Scriptuser sollte Mitglied der Gruppe 'audio' sein
# subprocess.call("sudo usermod -a -G audio " + scriptuser, shell=True)
#
# if os.path.isdir(self.pl_dir):
# # Mache das mpd Playlisten Verzeichnis für
# # die Gruppe 'audio' beschreibbar
# if grp.getgrgid(os.stat(self.pl_dir).st_gid).gr_name != 'audio':
# subprocess.call("sudo chgrp audio "+self.pl_dir, shell=True)
# if os.stat(self.pl_dir).st_mode != 17917: # oktal: 42755 = drwxrwsr-x
# subprocess.call("sudo chmod 2755 "+self.pl_dir, shell=True)
#
# if os.path.isdir(self.music_dir):
# # Mache das mpd music Verzeichnis für
# # die Gruppe 'audio' beschreibbar
# if grp.getgrgid(os.stat(self.music_dir).st_gid).gr_name != 'audio':
# subprocess.call("sudo chgrp audio "+self.music_dir, shell=True)
# if os.stat(self.music_dir).st_mode != 17917: # oktal: 42755 = drwxrwsr-x
# subprocess.call("sudo chmod 2775 "+self.music_dir, shell=True)
if MPC.listplaylists() == []:
self.copy_playlists()
# Ist vielleicht eine Playlist im mpd 'state'-file
if int(MPC.status()['playlistlength']) > 0:
if MPC.status()['state'] != 'play':
MPC.play()
else:
try:
MPC.load(DEFAULTPLAYLIST)
MPC.play()
except CommandError as err:
if 'No such playlist' in str(err):
print("Playlist '" + DEFAULTPLAYLIST + "' not found")
print("Starting with empty list.")
self.in_playlist = False
if self.read_pl():
self.index = int(MPC.currentsong().get('pos', '0')) | [
"def",
"initialize",
"(",
"self",
")",
":",
"try",
":",
"with",
"open",
"(",
"MPD_CONFIG",
")",
"as",
"m_cfg",
":",
"for",
"line",
"in",
"m_cfg",
":",
"if",
"line",
".",
"strip",
"(",
")",
":",
"if",
"line",
".",
"strip",
"(",
")",
".",
"split",
"(",
")",
"[",
"0",
"]",
".",
"lower",
"(",
")",
"==",
"'playlist_directory'",
":",
"self",
".",
"pl_dir",
"=",
"line",
".",
"strip",
"(",
")",
".",
"split",
"(",
")",
"[",
"1",
"]",
".",
"strip",
"(",
"\"\\'\\\"\"",
")",
"if",
"line",
".",
"strip",
"(",
")",
".",
"split",
"(",
")",
"[",
"0",
"]",
".",
"lower",
"(",
")",
"==",
"'music_directory'",
":",
"self",
".",
"music_dir",
"=",
"line",
".",
"strip",
"(",
")",
".",
"split",
"(",
")",
"[",
"1",
"]",
".",
"strip",
"(",
"\"\\'\\\"\"",
")",
"except",
"FileNotFoundError",
":",
"print",
"(",
"\"mpd config file \"",
"+",
"MPD_CONFIG",
"+",
"\" not found, using default directories\"",
")",
"print",
"(",
"self",
".",
"pl_dir",
"+",
"\" and \"",
"+",
"self",
".",
"music_dir",
")",
"# scriptuserid = os.getuid()",
"# if not scriptuserid: # Script läuft nicht als 'root'",
"# # Scriptuser sollte im Normalfall der User 'pi' sein",
"# scriptuser = pwd.getpwuid(scriptuserid).pw_name",
"# if not scriptuser in grp.getgrnam('audio').gr_mem:",
"# # Der Scriptuser sollte Mitglied der Gruppe 'audio' sein",
"# subprocess.call(\"sudo usermod -a -G audio \" + scriptuser, shell=True)",
"#",
"# if os.path.isdir(self.pl_dir):",
"# # Mache das mpd Playlisten Verzeichnis für",
"# # die Gruppe 'audio' beschreibbar",
"# if grp.getgrgid(os.stat(self.pl_dir).st_gid).gr_name != 'audio':",
"# subprocess.call(\"sudo chgrp audio \"+self.pl_dir, shell=True)",
"# if os.stat(self.pl_dir).st_mode != 17917: # oktal: 42755 = drwxrwsr-x",
"# subprocess.call(\"sudo chmod 2755 \"+self.pl_dir, shell=True)",
"#",
"# if os.path.isdir(self.music_dir):",
"# # Mache das mpd music Verzeichnis für",
"# # die Gruppe 'audio' beschreibbar",
"# if grp.getgrgid(os.stat(self.music_dir).st_gid).gr_name != 'audio':",
"# subprocess.call(\"sudo chgrp audio \"+self.music_dir, shell=True)",
"# if os.stat(self.music_dir).st_mode != 17917: # oktal: 42755 = drwxrwsr-x",
"# subprocess.call(\"sudo chmod 2775 \"+self.music_dir, shell=True)",
"if",
"MPC",
".",
"listplaylists",
"(",
")",
"==",
"[",
"]",
":",
"self",
".",
"copy_playlists",
"(",
")",
"# Ist vielleicht eine Playlist im mpd 'state'-file",
"if",
"int",
"(",
"MPC",
".",
"status",
"(",
")",
"[",
"'playlistlength'",
"]",
")",
">",
"0",
":",
"if",
"MPC",
".",
"status",
"(",
")",
"[",
"'state'",
"]",
"!=",
"'play'",
":",
"MPC",
".",
"play",
"(",
")",
"else",
":",
"try",
":",
"MPC",
".",
"load",
"(",
"DEFAULTPLAYLIST",
")",
"MPC",
".",
"play",
"(",
")",
"except",
"CommandError",
"as",
"err",
":",
"if",
"'No such playlist'",
"in",
"str",
"(",
"err",
")",
":",
"print",
"(",
"\"Playlist '\"",
"+",
"DEFAULTPLAYLIST",
"+",
"\"' not found\"",
")",
"print",
"(",
"\"Starting with empty list.\"",
")",
"self",
".",
"in_playlist",
"=",
"False",
"if",
"self",
".",
"read_pl",
"(",
")",
":",
"self",
".",
"index",
"=",
"int",
"(",
"MPC",
".",
"currentsong",
"(",
")",
".",
"get",
"(",
"'pos'",
",",
"'0'",
")",
")"
] | [
668,
4
] | [
723,
63
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
register_thumbnail | (func) | return register_type_func(THUMBNAIL_TYPES, ThumbnailError, func) | Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block.
Fügt eine Typ-Funktion dem Dictionary THUMBNAIL_TYPES hinzu.
:param func: die zu registrierende Funktion
:return: Funktion mit try/except-Block
| Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block.
Fügt eine Typ-Funktion dem Dictionary THUMBNAIL_TYPES hinzu. | def register_thumbnail(func):
"""Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block.
Fügt eine Typ-Funktion dem Dictionary THUMBNAIL_TYPES hinzu.
:param func: die zu registrierende Funktion
:return: Funktion mit try/except-Block
"""
return register_type_func(THUMBNAIL_TYPES, ThumbnailError, func) | [
"def",
"register_thumbnail",
"(",
"func",
")",
":",
"return",
"register_type_func",
"(",
"THUMBNAIL_TYPES",
",",
"ThumbnailError",
",",
"func",
")"
] | [
20,
0
] | [
27,
68
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
bot_command | (nachricht, bot, users, telegram_id) | Hier werden alle Verfügbaren Telegramkommdos angelegt | Hier werden alle Verfügbaren Telegramkommdos angelegt | def bot_command(nachricht, bot, users, telegram_id):
"""Hier werden alle Verfügbaren Telegramkommdos angelegt"""
kommando = nachricht["message"]["text"]
if kommando == "/starte_pc":
m_starte_pc(nachricht, bot, users, telegram_id)
elif kommando == "/abbrechen":
m_abbrechen(nachricht, bot, users, telegram_id)
elif users[telegram_id].menue is not None:
users[telegram_id].menue(nachricht, bot, users, telegram_id)
else:
bot.send_message(telegram_id, "Botkommando unbekannt") | [
"def",
"bot_command",
"(",
"nachricht",
",",
"bot",
",",
"users",
",",
"telegram_id",
")",
":",
"kommando",
"=",
"nachricht",
"[",
"\"message\"",
"]",
"[",
"\"text\"",
"]",
"if",
"kommando",
"==",
"\"/starte_pc\"",
":",
"m_starte_pc",
"(",
"nachricht",
",",
"bot",
",",
"users",
",",
"telegram_id",
")",
"elif",
"kommando",
"==",
"\"/abbrechen\"",
":",
"m_abbrechen",
"(",
"nachricht",
",",
"bot",
",",
"users",
",",
"telegram_id",
")",
"elif",
"users",
"[",
"telegram_id",
"]",
".",
"menue",
"is",
"not",
"None",
":",
"users",
"[",
"telegram_id",
"]",
".",
"menue",
"(",
"nachricht",
",",
"bot",
",",
"users",
",",
"telegram_id",
")",
"else",
":",
"bot",
".",
"send_message",
"(",
"telegram_id",
",",
"\"Botkommando unbekannt\"",
")"
] | [
141,
0
] | [
151,
62
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
StepData.get_api_key | (api_key_name) | return api_key | Funktion um einen API-Key aus der Konfigurationsdatei zu laden.
Verwendet hierzu das Modul :py:mod:`config_manager`.
:param api_key_name: Name des Konfigurationseintrags für den API-Key.
:return: API-Key
:raises: FileNotFoundError, KeyError
| Funktion um einen API-Key aus der Konfigurationsdatei zu laden. | def get_api_key(api_key_name):
""" Funktion um einen API-Key aus der Konfigurationsdatei zu laden.
Verwendet hierzu das Modul :py:mod:`config_manager`.
:param api_key_name: Name des Konfigurationseintrags für den API-Key.
:return: API-Key
:raises: FileNotFoundError, KeyError
"""
api_key = config_manager.get_private()["api_keys"].get(api_key_name, None)
if api_key is None:
raise APIKeyError(api_key_name)
return api_key | [
"def",
"get_api_key",
"(",
"api_key_name",
")",
":",
"api_key",
"=",
"config_manager",
".",
"get_private",
"(",
")",
"[",
"\"api_keys\"",
"]",
".",
"get",
"(",
"api_key_name",
",",
"None",
")",
"if",
"api_key",
"is",
"None",
":",
"raise",
"APIKeyError",
"(",
"api_key_name",
")",
"return",
"api_key"
] | [
33,
4
] | [
48,
22
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
owner.rmrank | (self, ctx, member: discord.Member=None, *rankName: str) | Gibt einem Teamler ein RankDown
Beispiel:
-----------
:rmrole @Der-Eddy#6508 Member
| Gibt einem Teamler ein RankDown
Beispiel:
-----------
:rmrole | async def rmrank(self, ctx, member: discord.Member=None, *rankName: str):
'''Gibt einem Teamler ein RankDown
Beispiel:
-----------
:rmrole @Der-Eddy#6508 Member
'''
rank = discord.utils.get(ctx.guild.roles, name=' '.join(rankName))
await ctx.message.delete()
if member is not None:
await member.remove_roles(rank)
await ctx.send(f':white_check_mark: Teamler **{member.name}** wurde auf **{rank.name}** downranked')
else:
await ctx.send(':no_entry: Du musst einen Benutzer angeben!') | [
"async",
"def",
"rmrank",
"(",
"self",
",",
"ctx",
",",
"member",
":",
"discord",
".",
"Member",
"=",
"None",
",",
"*",
"rankName",
":",
"str",
")",
":",
"rank",
"=",
"discord",
".",
"utils",
".",
"get",
"(",
"ctx",
".",
"guild",
".",
"roles",
",",
"name",
"=",
"' '",
".",
"join",
"(",
"rankName",
")",
")",
"await",
"ctx",
".",
"message",
".",
"delete",
"(",
")",
"if",
"member",
"is",
"not",
"None",
":",
"await",
"member",
".",
"remove_roles",
"(",
"rank",
")",
"await",
"ctx",
".",
"send",
"(",
"f':white_check_mark: Teamler **{member.name}** wurde auf **{rank.name}** downranked'",
")",
"else",
":",
"await",
"ctx",
".",
"send",
"(",
"':no_entry: Du musst einen Benutzer angeben!'",
")"
] | [
330,
4
] | [
342,
73
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
ImpfterminService.impfzentren_laden | (self) | return False | Laden aller Impfzentren zum Abgleich der eingegebenen PLZ.
:return: bool
| Laden aller Impfzentren zum Abgleich der eingegebenen PLZ. | def impfzentren_laden(self):
"""Laden aller Impfzentren zum Abgleich der eingegebenen PLZ.
:return: bool
"""
url = "https://www.impfterminservice.de/assets/static/impfzentren.json"
res = self.s.get(url, timeout=15)
if res.ok:
# Antwort-JSON umformatieren für einfachere Handhabung
formatierte_impfzentren = {}
for bundesland, impfzentren in res.json().items():
for impfzentrum in impfzentren:
formatierte_impfzentren[impfzentrum["PLZ"]] = impfzentrum
self.verfuegbare_impfzentren = formatierte_impfzentren
self.log.info(f"{len(self.verfuegbare_impfzentren)} Impfzentren verfügbar")
# Prüfen, ob Impfzentrum zur eingetragenen PLZ existiert
self.impfzentrum = self.verfuegbare_impfzentren.get(self.plz)
if self.impfzentrum:
self.domain = self.impfzentrum.get("URL")
self.log.info("'{}' in {} {} ausgewählt".format(
self.impfzentrum.get("Zentrumsname").strip(),
self.impfzentrum.get("PLZ"),
self.impfzentrum.get("Ort")))
return True
else:
self.log.error(f"Kein Impfzentrum in PLZ {self.plz} verfügbar")
else:
self.log.error("Impfzentren können nicht geladen werden")
return False | [
"def",
"impfzentren_laden",
"(",
"self",
")",
":",
"url",
"=",
"\"https://www.impfterminservice.de/assets/static/impfzentren.json\"",
"res",
"=",
"self",
".",
"s",
".",
"get",
"(",
"url",
",",
"timeout",
"=",
"15",
")",
"if",
"res",
".",
"ok",
":",
"# Antwort-JSON umformatieren für einfachere Handhabung",
"formatierte_impfzentren",
"=",
"{",
"}",
"for",
"bundesland",
",",
"impfzentren",
"in",
"res",
".",
"json",
"(",
")",
".",
"items",
"(",
")",
":",
"for",
"impfzentrum",
"in",
"impfzentren",
":",
"formatierte_impfzentren",
"[",
"impfzentrum",
"[",
"\"PLZ\"",
"]",
"]",
"=",
"impfzentrum",
"self",
".",
"verfuegbare_impfzentren",
"=",
"formatierte_impfzentren",
"self",
".",
"log",
".",
"info",
"(",
"f\"{len(self.verfuegbare_impfzentren)} Impfzentren verfügbar\")",
"",
"# Prüfen, ob Impfzentrum zur eingetragenen PLZ existiert",
"self",
".",
"impfzentrum",
"=",
"self",
".",
"verfuegbare_impfzentren",
".",
"get",
"(",
"self",
".",
"plz",
")",
"if",
"self",
".",
"impfzentrum",
":",
"self",
".",
"domain",
"=",
"self",
".",
"impfzentrum",
".",
"get",
"(",
"\"URL\"",
")",
"self",
".",
"log",
".",
"info",
"(",
"\"'{}' in {} {} ausgewählt\".",
"f",
"ormat(",
"",
"self",
".",
"impfzentrum",
".",
"get",
"(",
"\"Zentrumsname\"",
")",
".",
"strip",
"(",
")",
",",
"self",
".",
"impfzentrum",
".",
"get",
"(",
"\"PLZ\"",
")",
",",
"self",
".",
"impfzentrum",
".",
"get",
"(",
"\"Ort\"",
")",
")",
")",
"return",
"True",
"else",
":",
"self",
".",
"log",
".",
"error",
"(",
"f\"Kein Impfzentrum in PLZ {self.plz} verfügbar\")",
"",
"else",
":",
"self",
".",
"log",
".",
"error",
"(",
"\"Impfzentren können nicht geladen werden\")",
"",
"return",
"False"
] | [
78,
4
] | [
109,
20
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
Geometry.copy_shape | (self,
center,
radius,
startangle,
endangle,
inner_radius,
outer_radius,
split=False,
rtol=0.0,
atol=0.0,
append_inner=False,
append_outer=False) | Die Funktion kopiert die Teile von Shape-Objekten, welche sich in
der durch die Parameter definierten Teilkreisfläche befinden.
| Die Funktion kopiert die Teile von Shape-Objekten, welche sich in
der durch die Parameter definierten Teilkreisfläche befinden.
| def copy_shape(self,
center,
radius,
startangle,
endangle,
inner_radius,
outer_radius,
split=False,
rtol=0.0,
atol=0.0,
append_inner=False,
append_outer=False):
""" Die Funktion kopiert die Teile von Shape-Objekten, welche sich in
der durch die Parameter definierten Teilkreisfläche befinden.
"""
logger.debug('copy_shape(%s, %s)', startangle, endangle)
if not rtol:
rtol = self.rtol
if not atol:
atol = self.atol
if is_same_angle(startangle, endangle):
start_line = Line(
Element(start=center,
end=point(center, radius+1, startangle)))
end_line = Line(
Element(start=center,
end=point(center, radius+1, startangle)))
else:
start_line = Line(
Element(start=center,
end=point(center, radius+1, startangle)))
end_line = Line(
Element(start=center,
end=point(center, radius+1, endangle)))
if np.isclose(normalise_angle(startangle),
normalise_angle(endangle), 0.0):
inner_circle = Circle(Element(center=center, radius=inner_radius))
outer_circle = Circle(Element(center=center, radius=outer_radius))
else:
inner_circle = Arc(Element(center=center, radius=inner_radius,
start_angle=startangle*180/np.pi,
end_angle=endangle*180/np.pi))
outer_circle = Arc(Element(center=center, radius=outer_radius,
start_angle=startangle*180/np.pi,
end_angle=endangle*180/np.pi))
new_elements = []
pts_inner = [] if append_inner else None
pts_outer = [] if append_outer else None
for e in self.elements(Shape):
if isinstance(e, Line):
new_elements += self.copy_line(
center, radius, startangle, endangle,
start_line, end_line,
inner_circle, outer_circle, e,
rtol=rtol,
atol=atol,
points_inner=pts_inner,
points_outer=pts_outer)
elif isinstance(e, Arc):
new_elements += self.copy_arc(
center, radius, startangle, endangle,
start_line, end_line,
inner_circle, outer_circle, e,
rtol=rtol,
atol=atol,
points_inner=pts_inner,
points_outer=pts_outer)
elif isinstance(e, Circle):
new_elements += self.copy_circle(
center, radius, startangle, endangle,
start_line, end_line,
inner_circle, outer_circle, e,
rtol=rtol,
atol=atol,
points_inner=pts_inner,
points_outer=pts_outer)
if pts_inner and len(pts_inner) > 1:
pts_inner.sort(reverse=True)
p1 = pts_inner[0]
for p2 in pts_inner[1:]:
start_angle = alpha_line(center, p1)
end_angle = alpha_line(center, p2)
arc = Arc(Element(center=center,
radius=inner_radius,
start_angle=start_angle*180/np.pi,
end_angle=end_angle*180/np.pi))
new_elements.append(arc)
p1 = p2
if pts_outer and len(pts_outer) > 1:
pts_outer.sort(reverse=True)
p1 = pts_outer[0]
for p2 in pts_outer[1:]:
start_angle = alpha_line(center, p1)
end_angle = alpha_line(center, p2)
arc = Arc(Element(center=center,
radius=outer_radius,
start_angle=start_angle*180/np.pi,
end_angle=end_angle*180/np.pi))
new_elements.append(arc)
p1 = p2
if split:
logger.debug('new Geometry with split')
return Geometry(new_elements, 0.05, 0.1, split=split)
else:
return Geometry(new_elements, self.rtol, self.atol) | [
"def",
"copy_shape",
"(",
"self",
",",
"center",
",",
"radius",
",",
"startangle",
",",
"endangle",
",",
"inner_radius",
",",
"outer_radius",
",",
"split",
"=",
"False",
",",
"rtol",
"=",
"0.0",
",",
"atol",
"=",
"0.0",
",",
"append_inner",
"=",
"False",
",",
"append_outer",
"=",
"False",
")",
":",
"logger",
".",
"debug",
"(",
"'copy_shape(%s, %s)'",
",",
"startangle",
",",
"endangle",
")",
"if",
"not",
"rtol",
":",
"rtol",
"=",
"self",
".",
"rtol",
"if",
"not",
"atol",
":",
"atol",
"=",
"self",
".",
"atol",
"if",
"is_same_angle",
"(",
"startangle",
",",
"endangle",
")",
":",
"start_line",
"=",
"Line",
"(",
"Element",
"(",
"start",
"=",
"center",
",",
"end",
"=",
"point",
"(",
"center",
",",
"radius",
"+",
"1",
",",
"startangle",
")",
")",
")",
"end_line",
"=",
"Line",
"(",
"Element",
"(",
"start",
"=",
"center",
",",
"end",
"=",
"point",
"(",
"center",
",",
"radius",
"+",
"1",
",",
"startangle",
")",
")",
")",
"else",
":",
"start_line",
"=",
"Line",
"(",
"Element",
"(",
"start",
"=",
"center",
",",
"end",
"=",
"point",
"(",
"center",
",",
"radius",
"+",
"1",
",",
"startangle",
")",
")",
")",
"end_line",
"=",
"Line",
"(",
"Element",
"(",
"start",
"=",
"center",
",",
"end",
"=",
"point",
"(",
"center",
",",
"radius",
"+",
"1",
",",
"endangle",
")",
")",
")",
"if",
"np",
".",
"isclose",
"(",
"normalise_angle",
"(",
"startangle",
")",
",",
"normalise_angle",
"(",
"endangle",
")",
",",
"0.0",
")",
":",
"inner_circle",
"=",
"Circle",
"(",
"Element",
"(",
"center",
"=",
"center",
",",
"radius",
"=",
"inner_radius",
")",
")",
"outer_circle",
"=",
"Circle",
"(",
"Element",
"(",
"center",
"=",
"center",
",",
"radius",
"=",
"outer_radius",
")",
")",
"else",
":",
"inner_circle",
"=",
"Arc",
"(",
"Element",
"(",
"center",
"=",
"center",
",",
"radius",
"=",
"inner_radius",
",",
"start_angle",
"=",
"startangle",
"*",
"180",
"/",
"np",
".",
"pi",
",",
"end_angle",
"=",
"endangle",
"*",
"180",
"/",
"np",
".",
"pi",
")",
")",
"outer_circle",
"=",
"Arc",
"(",
"Element",
"(",
"center",
"=",
"center",
",",
"radius",
"=",
"outer_radius",
",",
"start_angle",
"=",
"startangle",
"*",
"180",
"/",
"np",
".",
"pi",
",",
"end_angle",
"=",
"endangle",
"*",
"180",
"/",
"np",
".",
"pi",
")",
")",
"new_elements",
"=",
"[",
"]",
"pts_inner",
"=",
"[",
"]",
"if",
"append_inner",
"else",
"None",
"pts_outer",
"=",
"[",
"]",
"if",
"append_outer",
"else",
"None",
"for",
"e",
"in",
"self",
".",
"elements",
"(",
"Shape",
")",
":",
"if",
"isinstance",
"(",
"e",
",",
"Line",
")",
":",
"new_elements",
"+=",
"self",
".",
"copy_line",
"(",
"center",
",",
"radius",
",",
"startangle",
",",
"endangle",
",",
"start_line",
",",
"end_line",
",",
"inner_circle",
",",
"outer_circle",
",",
"e",
",",
"rtol",
"=",
"rtol",
",",
"atol",
"=",
"atol",
",",
"points_inner",
"=",
"pts_inner",
",",
"points_outer",
"=",
"pts_outer",
")",
"elif",
"isinstance",
"(",
"e",
",",
"Arc",
")",
":",
"new_elements",
"+=",
"self",
".",
"copy_arc",
"(",
"center",
",",
"radius",
",",
"startangle",
",",
"endangle",
",",
"start_line",
",",
"end_line",
",",
"inner_circle",
",",
"outer_circle",
",",
"e",
",",
"rtol",
"=",
"rtol",
",",
"atol",
"=",
"atol",
",",
"points_inner",
"=",
"pts_inner",
",",
"points_outer",
"=",
"pts_outer",
")",
"elif",
"isinstance",
"(",
"e",
",",
"Circle",
")",
":",
"new_elements",
"+=",
"self",
".",
"copy_circle",
"(",
"center",
",",
"radius",
",",
"startangle",
",",
"endangle",
",",
"start_line",
",",
"end_line",
",",
"inner_circle",
",",
"outer_circle",
",",
"e",
",",
"rtol",
"=",
"rtol",
",",
"atol",
"=",
"atol",
",",
"points_inner",
"=",
"pts_inner",
",",
"points_outer",
"=",
"pts_outer",
")",
"if",
"pts_inner",
"and",
"len",
"(",
"pts_inner",
")",
">",
"1",
":",
"pts_inner",
".",
"sort",
"(",
"reverse",
"=",
"True",
")",
"p1",
"=",
"pts_inner",
"[",
"0",
"]",
"for",
"p2",
"in",
"pts_inner",
"[",
"1",
":",
"]",
":",
"start_angle",
"=",
"alpha_line",
"(",
"center",
",",
"p1",
")",
"end_angle",
"=",
"alpha_line",
"(",
"center",
",",
"p2",
")",
"arc",
"=",
"Arc",
"(",
"Element",
"(",
"center",
"=",
"center",
",",
"radius",
"=",
"inner_radius",
",",
"start_angle",
"=",
"start_angle",
"*",
"180",
"/",
"np",
".",
"pi",
",",
"end_angle",
"=",
"end_angle",
"*",
"180",
"/",
"np",
".",
"pi",
")",
")",
"new_elements",
".",
"append",
"(",
"arc",
")",
"p1",
"=",
"p2",
"if",
"pts_outer",
"and",
"len",
"(",
"pts_outer",
")",
">",
"1",
":",
"pts_outer",
".",
"sort",
"(",
"reverse",
"=",
"True",
")",
"p1",
"=",
"pts_outer",
"[",
"0",
"]",
"for",
"p2",
"in",
"pts_outer",
"[",
"1",
":",
"]",
":",
"start_angle",
"=",
"alpha_line",
"(",
"center",
",",
"p1",
")",
"end_angle",
"=",
"alpha_line",
"(",
"center",
",",
"p2",
")",
"arc",
"=",
"Arc",
"(",
"Element",
"(",
"center",
"=",
"center",
",",
"radius",
"=",
"outer_radius",
",",
"start_angle",
"=",
"start_angle",
"*",
"180",
"/",
"np",
".",
"pi",
",",
"end_angle",
"=",
"end_angle",
"*",
"180",
"/",
"np",
".",
"pi",
")",
")",
"new_elements",
".",
"append",
"(",
"arc",
")",
"p1",
"=",
"p2",
"if",
"split",
":",
"logger",
".",
"debug",
"(",
"'new Geometry with split'",
")",
"return",
"Geometry",
"(",
"new_elements",
",",
"0.05",
",",
"0.1",
",",
"split",
"=",
"split",
")",
"else",
":",
"return",
"Geometry",
"(",
"new_elements",
",",
"self",
".",
"rtol",
",",
"self",
".",
"atol",
")"
] | [
1819,
4
] | [
1933,
63
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
WebAppTest.test_mlc_MT_LeafSpeed | (self) | Jahrestest - JT_LeafSpeed - Geschwindigkeit der Lamellen DIN 6875-3, Teil 4.2.5 (Variationen von Dl, Gantry und Kollimator)
| Jahrestest - JT_LeafSpeed - Geschwindigkeit der Lamellen DIN 6875-3, Teil 4.2.5 (Variationen von Dl, Gantry und Kollimator)
| def test_mlc_MT_LeafSpeed(self):
''' Jahrestest - JT_LeafSpeed - Geschwindigkeit der Lamellen DIN 6875-3, Teil 4.2.5 (Variationen von Dl, Gantry und Kollimator)
'''
self.run_test( {
"testid": "MT-LeafSpeed",
"unit": "Linac-1",
"year": 2021,
"month": 1
} )
self.run_test( {
"testid": "MT-LeafSpeed",
"unit": "Linac-2",
"year": 2021,
"month": 1
} ) | [
"def",
"test_mlc_MT_LeafSpeed",
"(",
"self",
")",
":",
"self",
".",
"run_test",
"(",
"{",
"\"testid\"",
":",
"\"MT-LeafSpeed\"",
",",
"\"unit\"",
":",
"\"Linac-1\"",
",",
"\"year\"",
":",
"2021",
",",
"\"month\"",
":",
"1",
"}",
")",
"self",
".",
"run_test",
"(",
"{",
"\"testid\"",
":",
"\"MT-LeafSpeed\"",
",",
"\"unit\"",
":",
"\"Linac-2\"",
",",
"\"year\"",
":",
"2021",
",",
"\"month\"",
":",
"1",
"}",
")"
] | [
483,
4
] | [
499,
11
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
ispConfig.mqttGetHandler | (self) | return mqtthdlr | Bestimmt den mqtt Handler wenn er initialisiert wurde.
Returns
-------
mqtthdlr.
| Bestimmt den mqtt Handler wenn er initialisiert wurde. | def mqttGetHandler(self):
"""Bestimmt den mqtt Handler wenn er initialisiert wurde.
Returns
-------
mqtthdlr.
"""
mqtthdlr = None
# gibt es noch keinen logger in self._mqtthdlr, dann über logging bestimmen
if self._mqtthdlr:
mqtthdlr = self._mqtthdlr
else:
logger = logging.getLogger( "MQTT" )
if hasattr(logger, '_mqtthdlr'):
mqtthdlr = logger._mqtthdlr
# Handler zurückgeben
return mqtthdlr | [
"def",
"mqttGetHandler",
"(",
"self",
")",
":",
"mqtthdlr",
"=",
"None",
"# gibt es noch keinen logger in self._mqtthdlr, dann über logging bestimmen",
"if",
"self",
".",
"_mqtthdlr",
":",
"mqtthdlr",
"=",
"self",
".",
"_mqtthdlr",
"else",
":",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
"\"MQTT\"",
")",
"if",
"hasattr",
"(",
"logger",
",",
"'_mqtthdlr'",
")",
":",
"mqtthdlr",
"=",
"logger",
".",
"_mqtthdlr",
"# Handler zurückgeben",
"return",
"mqtthdlr"
] | [
735,
4
] | [
752,
23
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
StepData.loop_key | (self, keys: list, values: dict) | return map(lambda value: (value[0], self.save_loop_key(value[1], values)), enumerate(keys)) | Zum durchlaufen eines Key-Arrays.
Setzt bei jedem Durchlauf die Variable `_key`.
`_key` entspricht dem aktuellen Wert des Arrays.
:param keys: Array mit Keys (Strings)
:param values: Werte aus der JSON-Datei
:return: Iterator über das Dictionary, welcher Seiteneffekte besitzt, mit (idx, key).
:rtype: map
| Zum durchlaufen eines Key-Arrays. | def loop_key(self, keys: list, values: dict):
""" Zum durchlaufen eines Key-Arrays.
Setzt bei jedem Durchlauf die Variable `_key`.
`_key` entspricht dem aktuellen Wert des Arrays.
:param keys: Array mit Keys (Strings)
:param values: Werte aus der JSON-Datei
:return: Iterator über das Dictionary, welcher Seiteneffekte besitzt, mit (idx, key).
:rtype: map
"""
return map(lambda value: (value[0], self.save_loop_key(value[1], values)), enumerate(keys)) | [
"def",
"loop_key",
"(",
"self",
",",
"keys",
":",
"list",
",",
"values",
":",
"dict",
")",
":",
"return",
"map",
"(",
"lambda",
"value",
":",
"(",
"value",
"[",
"0",
"]",
",",
"self",
".",
"save_loop_key",
"(",
"value",
"[",
"1",
"]",
",",
"values",
")",
")",
",",
"enumerate",
"(",
"keys",
")",
")"
] | [
86,
4
] | [
97,
99
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
DicomImage.getRoi | ( self, field=None ) | return self.array[ da["Y1"]:da["Y2"], da["X1"]:da["X2"] ] | holt region of interest des angegebenen Bereichs aus image.array
| holt region of interest des angegebenen Bereichs aus image.array
| def getRoi( self, field=None ):
""" holt region of interest des angegebenen Bereichs aus image.array
"""
da = self.getFieldDots( field )
return self.array[ da["Y1"]:da["Y2"], da["X1"]:da["X2"] ] | [
"def",
"getRoi",
"(",
"self",
",",
"field",
"=",
"None",
")",
":",
"da",
"=",
"self",
".",
"getFieldDots",
"(",
"field",
")",
"return",
"self",
".",
"array",
"[",
"da",
"[",
"\"Y1\"",
"]",
":",
"da",
"[",
"\"Y2\"",
"]",
",",
"da",
"[",
"\"X1\"",
"]",
":",
"da",
"[",
"\"X2\"",
"]",
"]"
] | [
628,
4
] | [
633,
65
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
ImpfterminService.login | (self) | Einloggen mittels Code, um qualifizierte Impfstoffe zu erhalten.
Dieser Schritt ist wahrscheinlich nicht zwingend notwendig, aber schadet auch nicht.
:return: bool
| Einloggen mittels Code, um qualifizierte Impfstoffe zu erhalten.
Dieser Schritt ist wahrscheinlich nicht zwingend notwendig, aber schadet auch nicht. | def login(self):
"""Einloggen mittels Code, um qualifizierte Impfstoffe zu erhalten.
Dieser Schritt ist wahrscheinlich nicht zwingend notwendig, aber schadet auch nicht.
:return: bool
"""
path = f"rest/login?plz={choice(self.plz_impfzentren)}"
res = self.s.get(self.domain + path, timeout=15)
if res.ok:
# Checken, welche Impfstoffe für das Alter zur Verfügung stehen
self.qualifikationen = res.json().get("qualifikationen")
if self.qualifikationen:
zugewiesene_impfstoffe = set()
for q in self.qualifikationen:
for verfuegbare_q in self.verfuegbare_qualifikationen:
if verfuegbare_q["qualification"] == q:
zugewiesene_impfstoffe.update(verfuegbare_q["impfstoffe"])
self.log.info("Erfolgreich mit Code eingeloggt")
self.log.info(f"Mögliche Impfstoffe: {list(zugewiesene_impfstoffe)}")
print(" ")
return True
else:
self.log.warn("Keine qualifizierten Impfstoffe verfügbar")
else:
return False | [
"def",
"login",
"(",
"self",
")",
":",
"path",
"=",
"f\"rest/login?plz={choice(self.plz_impfzentren)}\"",
"res",
"=",
"self",
".",
"s",
".",
"get",
"(",
"self",
".",
"domain",
"+",
"path",
",",
"timeout",
"=",
"15",
")",
"if",
"res",
".",
"ok",
":",
"# Checken, welche Impfstoffe für das Alter zur Verfügung stehen",
"self",
".",
"qualifikationen",
"=",
"res",
".",
"json",
"(",
")",
".",
"get",
"(",
"\"qualifikationen\"",
")",
"if",
"self",
".",
"qualifikationen",
":",
"zugewiesene_impfstoffe",
"=",
"set",
"(",
")",
"for",
"q",
"in",
"self",
".",
"qualifikationen",
":",
"for",
"verfuegbare_q",
"in",
"self",
".",
"verfuegbare_qualifikationen",
":",
"if",
"verfuegbare_q",
"[",
"\"qualification\"",
"]",
"==",
"q",
":",
"zugewiesene_impfstoffe",
".",
"update",
"(",
"verfuegbare_q",
"[",
"\"impfstoffe\"",
"]",
")",
"self",
".",
"log",
".",
"info",
"(",
"\"Erfolgreich mit Code eingeloggt\"",
")",
"self",
".",
"log",
".",
"info",
"(",
"f\"Mögliche Impfstoffe: {list(zugewiesene_impfstoffe)}\")",
"",
"print",
"(",
"\" \"",
")",
"return",
"True",
"else",
":",
"self",
".",
"log",
".",
"warn",
"(",
"\"Keine qualifizierten Impfstoffe verfügbar\")",
"",
"else",
":",
"return",
"False"
] | [
523,
4
] | [
553,
24
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
checkMlc.doMT_8_02_4 | (self, fileData ) | return self._doMLC_VMAT( fileData, overrideMD=md, withOffsets=False ) | Lamellenpositioniergenauigkeit
Picket Fence Test 1 mm Schlitzbreite,
Plan/Feld:
Monatstest/MLC 4 neu
Returns
-------
pdfFilename : str
Name der erzeugten Pdfdatei
result : list
list mit dicts der Testergebnisse
See Also
--------
isp.results : Aufbau von result
| Lamellenpositioniergenauigkeit
Picket Fence Test 1 mm Schlitzbreite, | def doMT_8_02_4(self, fileData ):
"""Lamellenpositioniergenauigkeit
Picket Fence Test 1 mm Schlitzbreite,
Plan/Feld:
Monatstest/MLC 4 neu
Returns
-------
pdfFilename : str
Name der erzeugten Pdfdatei
result : list
list mit dicts der Testergebnisse
See Also
--------
isp.results : Aufbau von result
"""
md = {
"_imgSize" : {"width" : 45, "height" : 55},
"_imgField": {"border": 10 },
"_chartSize" : { "width" : 90, "height" : 55}
}
return self._doMLC_VMAT( fileData, overrideMD=md, withOffsets=False ) | [
"def",
"doMT_8_02_4",
"(",
"self",
",",
"fileData",
")",
":",
"md",
"=",
"{",
"\"_imgSize\"",
":",
"{",
"\"width\"",
":",
"45",
",",
"\"height\"",
":",
"55",
"}",
",",
"\"_imgField\"",
":",
"{",
"\"border\"",
":",
"10",
"}",
",",
"\"_chartSize\"",
":",
"{",
"\"width\"",
":",
"90",
",",
"\"height\"",
":",
"55",
"}",
"}",
"return",
"self",
".",
"_doMLC_VMAT",
"(",
"fileData",
",",
"overrideMD",
"=",
"md",
",",
"withOffsets",
"=",
"False",
")"
] | [
2485,
4
] | [
2509,
77
] | null | python | de | ['de', 'de', 'de'] | False | true | null |
GeneratorGUI.__init__ | (self, hauptfenster) | Konstruktor, in dem das GUI des Funktionsgenerator aufgebaut wird | Konstruktor, in dem das GUI des Funktionsgenerator aufgebaut wird | def __init__(self, hauptfenster):
""" Konstruktor, in dem das GUI des Funktionsgenerator aufgebaut wird """
self.gen_gui_aktiv = True
self.hauptfenster = hauptfenster
self.controller = GeneratorController()
# Top-Level Fenster für Funktionsgenerator anlegen
self.generatorfenster = Toplevel(self.hauptfenster)
self.generatorfenster.iconbitmap("simstb.ico")
self.generatorfenster.title("SimSTB - Funktionsgenerator")
self.generatorfenster.protocol("WM_DELETE_WINDOW", lambda: self.schliessen())
# Hauptrahmen anlegen
hauptrahmen = ttk.Frame(master=self.generatorfenster, padding="5", style="Haupt.TFrame")
hauptrahmen.grid(column=1, row=1, sticky="NWES")
self.generatorfenster.columnconfigure(1, weight=1)
self.generatorfenster.rowconfigure(1, weight=1)
# Titelbereich einfügen
ttk.Label(master=hauptrahmen, text="SimSTB", style="HauptLabel1.TLabel").grid(column=1, row=1, sticky="NW")
ttk.Label(master=hauptrahmen, text="Analoger\nFunktionsgenerator", style="HauptLabel2.TLabel").grid(column=2, row=1, sticky="NW")
# Unterrahmen 1 und Elemente für Einstellungen
unterrahmen1 = ttk.Frame(master=hauptrahmen, padding="5", style="Block.TFrame")
unterrahmen1.grid(column=1, row=3, columnspan=2, sticky="NWES")
ttk.Label(master=unterrahmen1, text="Funktionsgenerator Einstellungen", style="BlockLabel2.TLabel").grid(column=1, row=1, columnspan=5, sticky="NW")
ttk.Label(master=unterrahmen1, text="Kanal", style="BlockLabel.TLabel").grid(column=1, row=2, sticky="NW")
ttk.Label(master=unterrahmen1, text="An/Aus", style="BlockLabel.TLabel").grid(column=2, row=2, sticky="NW")
ttk.Label(master=unterrahmen1, text="Signalform", style="BlockLabel.TLabel").grid(column=3, row=2, sticky="NW")
ttk.Label(master=unterrahmen1, text="Amplitude", style="BlockLabel.TLabel").grid(column=4, row=2, sticky="NW")
ttk.Label(master=unterrahmen1, text="P.Dauer (s)", style="BlockLabel.TLabel").grid(column=5, row=2, sticky="NW")
self.aktiviert = []
self.signalform = []
self.amplitude = []
self.pdauer = []
for i in range(Konfig.ANAMAXLAENGE):
ttk.Label(master=unterrahmen1, text="AE"+str(i), style="BlockLabel.TLabel").grid(column=1, row=i+3, sticky="NW") #Kanal
eintrag_aktiviert = IntVar() #An/Aus
eintrag_aktiviert.set(0)
ttk.Checkbutton(master=unterrahmen1, variable=eintrag_aktiviert, style="BlockCheckbutton.TCheckbutton").grid(column=2, row=i+3, sticky="N")
self.aktiviert.append(eintrag_aktiviert)
eintrag_signalform = StringVar() # Signalform
eintrag_signalform.set(Konfig.SIGNALFORMEN[0])
ttk.Combobox(master=unterrahmen1, values=Konfig.SIGNALFORMEN, state="readonly", textvariable=eintrag_signalform).grid(column=3, row=i+3, sticky="NEW")
self.signalform.append(eintrag_signalform)
eintrag_amplitude = DoubleVar() # Amplitude
eintrag_amplitude.set(0)
ttk.Entry(master=unterrahmen1, textvariable=eintrag_amplitude).grid(column=4, row=i+3, sticky="NW")
self.amplitude.append(eintrag_amplitude)
eintrag_pdauer = DoubleVar() # Periodendauer
eintrag_pdauer.set(0)
ttk.Entry(master=unterrahmen1, textvariable=eintrag_pdauer).grid(column=5, row=i+3, sticky="NW")
self.pdauer.append(eintrag_pdauer)
# Unterrahmen 2 und Knöpfe für Steuerung
unterrahmen2 = ttk.Frame(master=hauptrahmen, padding="5", style="Block.TFrame")
unterrahmen2.grid(column=1, row=4, columnspan=2, sticky="NWES")
ttk.Label(master=unterrahmen2, text="Funktionsgenerator Steuerung", style="BlockLabel2.TLabel").grid(column=1, row=1, columnspan=2, sticky="NW")
ttk.Button(master=unterrahmen2, text="Starten", command=lambda: self.start()).grid(column=1, row=2, sticky="NW")
ttk.Button(master=unterrahmen2, text="Stoppen", command=lambda: self.stop()).grid(column=2, row=2, sticky="NW")
self.gen_status = StringVar()
self.gen_status.set( "Generator nicht aktiv")
ttk.Label(master=unterrahmen2, textvariable=self.gen_status, style="BlockStatusLabel.TLabel").grid(column=3, row=2, columnspan=2, sticky="NE")
# Globaler Knopf
ttk.Button(master=hauptrahmen, text="Schließen", command=lambda: self.schliessen()).grid(column=2, row=5, sticky="NE")
for element in hauptrahmen.winfo_children():
element.grid_configure(padx="10", pady="10")
for element in unterrahmen1.winfo_children():
element.grid_configure(padx="2", pady="2") | [
"def",
"__init__",
"(",
"self",
",",
"hauptfenster",
")",
":",
"self",
".",
"gen_gui_aktiv",
"=",
"True",
"self",
".",
"hauptfenster",
"=",
"hauptfenster",
"self",
".",
"controller",
"=",
"GeneratorController",
"(",
")",
"# Top-Level Fenster für Funktionsgenerator anlegen",
"self",
".",
"generatorfenster",
"=",
"Toplevel",
"(",
"self",
".",
"hauptfenster",
")",
"self",
".",
"generatorfenster",
".",
"iconbitmap",
"(",
"\"simstb.ico\"",
")",
"self",
".",
"generatorfenster",
".",
"title",
"(",
"\"SimSTB - Funktionsgenerator\"",
")",
"self",
".",
"generatorfenster",
".",
"protocol",
"(",
"\"WM_DELETE_WINDOW\"",
",",
"lambda",
":",
"self",
".",
"schliessen",
"(",
")",
")",
"# Hauptrahmen anlegen",
"hauptrahmen",
"=",
"ttk",
".",
"Frame",
"(",
"master",
"=",
"self",
".",
"generatorfenster",
",",
"padding",
"=",
"\"5\"",
",",
"style",
"=",
"\"Haupt.TFrame\"",
")",
"hauptrahmen",
".",
"grid",
"(",
"column",
"=",
"1",
",",
"row",
"=",
"1",
",",
"sticky",
"=",
"\"NWES\"",
")",
"self",
".",
"generatorfenster",
".",
"columnconfigure",
"(",
"1",
",",
"weight",
"=",
"1",
")",
"self",
".",
"generatorfenster",
".",
"rowconfigure",
"(",
"1",
",",
"weight",
"=",
"1",
")",
"# Titelbereich einfügen",
"ttk",
".",
"Label",
"(",
"master",
"=",
"hauptrahmen",
",",
"text",
"=",
"\"SimSTB\"",
",",
"style",
"=",
"\"HauptLabel1.TLabel\"",
")",
".",
"grid",
"(",
"column",
"=",
"1",
",",
"row",
"=",
"1",
",",
"sticky",
"=",
"\"NW\"",
")",
"ttk",
".",
"Label",
"(",
"master",
"=",
"hauptrahmen",
",",
"text",
"=",
"\"Analoger\\nFunktionsgenerator\"",
",",
"style",
"=",
"\"HauptLabel2.TLabel\"",
")",
".",
"grid",
"(",
"column",
"=",
"2",
",",
"row",
"=",
"1",
",",
"sticky",
"=",
"\"NW\"",
")",
"# Unterrahmen 1 und Elemente für Einstellungen",
"unterrahmen1",
"=",
"ttk",
".",
"Frame",
"(",
"master",
"=",
"hauptrahmen",
",",
"padding",
"=",
"\"5\"",
",",
"style",
"=",
"\"Block.TFrame\"",
")",
"unterrahmen1",
".",
"grid",
"(",
"column",
"=",
"1",
",",
"row",
"=",
"3",
",",
"columnspan",
"=",
"2",
",",
"sticky",
"=",
"\"NWES\"",
")",
"ttk",
".",
"Label",
"(",
"master",
"=",
"unterrahmen1",
",",
"text",
"=",
"\"Funktionsgenerator Einstellungen\"",
",",
"style",
"=",
"\"BlockLabel2.TLabel\"",
")",
".",
"grid",
"(",
"column",
"=",
"1",
",",
"row",
"=",
"1",
",",
"columnspan",
"=",
"5",
",",
"sticky",
"=",
"\"NW\"",
")",
"ttk",
".",
"Label",
"(",
"master",
"=",
"unterrahmen1",
",",
"text",
"=",
"\"Kanal\"",
",",
"style",
"=",
"\"BlockLabel.TLabel\"",
")",
".",
"grid",
"(",
"column",
"=",
"1",
",",
"row",
"=",
"2",
",",
"sticky",
"=",
"\"NW\"",
")",
"ttk",
".",
"Label",
"(",
"master",
"=",
"unterrahmen1",
",",
"text",
"=",
"\"An/Aus\"",
",",
"style",
"=",
"\"BlockLabel.TLabel\"",
")",
".",
"grid",
"(",
"column",
"=",
"2",
",",
"row",
"=",
"2",
",",
"sticky",
"=",
"\"NW\"",
")",
"ttk",
".",
"Label",
"(",
"master",
"=",
"unterrahmen1",
",",
"text",
"=",
"\"Signalform\"",
",",
"style",
"=",
"\"BlockLabel.TLabel\"",
")",
".",
"grid",
"(",
"column",
"=",
"3",
",",
"row",
"=",
"2",
",",
"sticky",
"=",
"\"NW\"",
")",
"ttk",
".",
"Label",
"(",
"master",
"=",
"unterrahmen1",
",",
"text",
"=",
"\"Amplitude\"",
",",
"style",
"=",
"\"BlockLabel.TLabel\"",
")",
".",
"grid",
"(",
"column",
"=",
"4",
",",
"row",
"=",
"2",
",",
"sticky",
"=",
"\"NW\"",
")",
"ttk",
".",
"Label",
"(",
"master",
"=",
"unterrahmen1",
",",
"text",
"=",
"\"P.Dauer (s)\"",
",",
"style",
"=",
"\"BlockLabel.TLabel\"",
")",
".",
"grid",
"(",
"column",
"=",
"5",
",",
"row",
"=",
"2",
",",
"sticky",
"=",
"\"NW\"",
")",
"self",
".",
"aktiviert",
"=",
"[",
"]",
"self",
".",
"signalform",
"=",
"[",
"]",
"self",
".",
"amplitude",
"=",
"[",
"]",
"self",
".",
"pdauer",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"Konfig",
".",
"ANAMAXLAENGE",
")",
":",
"ttk",
".",
"Label",
"(",
"master",
"=",
"unterrahmen1",
",",
"text",
"=",
"\"AE\"",
"+",
"str",
"(",
"i",
")",
",",
"style",
"=",
"\"BlockLabel.TLabel\"",
")",
".",
"grid",
"(",
"column",
"=",
"1",
",",
"row",
"=",
"i",
"+",
"3",
",",
"sticky",
"=",
"\"NW\"",
")",
"#Kanal",
"eintrag_aktiviert",
"=",
"IntVar",
"(",
")",
"#An/Aus",
"eintrag_aktiviert",
".",
"set",
"(",
"0",
")",
"ttk",
".",
"Checkbutton",
"(",
"master",
"=",
"unterrahmen1",
",",
"variable",
"=",
"eintrag_aktiviert",
",",
"style",
"=",
"\"BlockCheckbutton.TCheckbutton\"",
")",
".",
"grid",
"(",
"column",
"=",
"2",
",",
"row",
"=",
"i",
"+",
"3",
",",
"sticky",
"=",
"\"N\"",
")",
"self",
".",
"aktiviert",
".",
"append",
"(",
"eintrag_aktiviert",
")",
"eintrag_signalform",
"=",
"StringVar",
"(",
")",
"# Signalform",
"eintrag_signalform",
".",
"set",
"(",
"Konfig",
".",
"SIGNALFORMEN",
"[",
"0",
"]",
")",
"ttk",
".",
"Combobox",
"(",
"master",
"=",
"unterrahmen1",
",",
"values",
"=",
"Konfig",
".",
"SIGNALFORMEN",
",",
"state",
"=",
"\"readonly\"",
",",
"textvariable",
"=",
"eintrag_signalform",
")",
".",
"grid",
"(",
"column",
"=",
"3",
",",
"row",
"=",
"i",
"+",
"3",
",",
"sticky",
"=",
"\"NEW\"",
")",
"self",
".",
"signalform",
".",
"append",
"(",
"eintrag_signalform",
")",
"eintrag_amplitude",
"=",
"DoubleVar",
"(",
")",
"# Amplitude",
"eintrag_amplitude",
".",
"set",
"(",
"0",
")",
"ttk",
".",
"Entry",
"(",
"master",
"=",
"unterrahmen1",
",",
"textvariable",
"=",
"eintrag_amplitude",
")",
".",
"grid",
"(",
"column",
"=",
"4",
",",
"row",
"=",
"i",
"+",
"3",
",",
"sticky",
"=",
"\"NW\"",
")",
"self",
".",
"amplitude",
".",
"append",
"(",
"eintrag_amplitude",
")",
"eintrag_pdauer",
"=",
"DoubleVar",
"(",
")",
"# Periodendauer",
"eintrag_pdauer",
".",
"set",
"(",
"0",
")",
"ttk",
".",
"Entry",
"(",
"master",
"=",
"unterrahmen1",
",",
"textvariable",
"=",
"eintrag_pdauer",
")",
".",
"grid",
"(",
"column",
"=",
"5",
",",
"row",
"=",
"i",
"+",
"3",
",",
"sticky",
"=",
"\"NW\"",
")",
"self",
".",
"pdauer",
".",
"append",
"(",
"eintrag_pdauer",
")",
"# Unterrahmen 2 und Knöpfe für Steuerung",
"unterrahmen2",
"=",
"ttk",
".",
"Frame",
"(",
"master",
"=",
"hauptrahmen",
",",
"padding",
"=",
"\"5\"",
",",
"style",
"=",
"\"Block.TFrame\"",
")",
"unterrahmen2",
".",
"grid",
"(",
"column",
"=",
"1",
",",
"row",
"=",
"4",
",",
"columnspan",
"=",
"2",
",",
"sticky",
"=",
"\"NWES\"",
")",
"ttk",
".",
"Label",
"(",
"master",
"=",
"unterrahmen2",
",",
"text",
"=",
"\"Funktionsgenerator Steuerung\"",
",",
"style",
"=",
"\"BlockLabel2.TLabel\"",
")",
".",
"grid",
"(",
"column",
"=",
"1",
",",
"row",
"=",
"1",
",",
"columnspan",
"=",
"2",
",",
"sticky",
"=",
"\"NW\"",
")",
"ttk",
".",
"Button",
"(",
"master",
"=",
"unterrahmen2",
",",
"text",
"=",
"\"Starten\"",
",",
"command",
"=",
"lambda",
":",
"self",
".",
"start",
"(",
")",
")",
".",
"grid",
"(",
"column",
"=",
"1",
",",
"row",
"=",
"2",
",",
"sticky",
"=",
"\"NW\"",
")",
"ttk",
".",
"Button",
"(",
"master",
"=",
"unterrahmen2",
",",
"text",
"=",
"\"Stoppen\"",
",",
"command",
"=",
"lambda",
":",
"self",
".",
"stop",
"(",
")",
")",
".",
"grid",
"(",
"column",
"=",
"2",
",",
"row",
"=",
"2",
",",
"sticky",
"=",
"\"NW\"",
")",
"self",
".",
"gen_status",
"=",
"StringVar",
"(",
")",
"self",
".",
"gen_status",
".",
"set",
"(",
"\"Generator nicht aktiv\"",
")",
"ttk",
".",
"Label",
"(",
"master",
"=",
"unterrahmen2",
",",
"textvariable",
"=",
"self",
".",
"gen_status",
",",
"style",
"=",
"\"BlockStatusLabel.TLabel\"",
")",
".",
"grid",
"(",
"column",
"=",
"3",
",",
"row",
"=",
"2",
",",
"columnspan",
"=",
"2",
",",
"sticky",
"=",
"\"NE\"",
")",
"# Globaler Knopf",
"ttk",
".",
"Button",
"(",
"master",
"=",
"hauptrahmen",
",",
"text",
"=",
"\"Schließen\",",
" ",
"ommand=",
"l",
"ambda:",
" ",
"elf.",
"s",
"chliessen(",
")",
")",
".",
"g",
"rid(",
"c",
"olumn=",
"2",
",",
" ",
"ow=",
"5",
",",
" ",
"ticky=",
"\"",
"NE\")",
"",
"for",
"element",
"in",
"hauptrahmen",
".",
"winfo_children",
"(",
")",
":",
"element",
".",
"grid_configure",
"(",
"padx",
"=",
"\"10\"",
",",
"pady",
"=",
"\"10\"",
")",
"for",
"element",
"in",
"unterrahmen1",
".",
"winfo_children",
"(",
")",
":",
"element",
".",
"grid_configure",
"(",
"padx",
"=",
"\"2\"",
",",
"pady",
"=",
"\"2\"",
")"
] | [
33,
4
] | [
103,
54
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
get_infoprovider_path | (path: str) | return get_resource_path(os.path.join(INFOPROVIDER_LOCATION, path)) | Erstellt einen absoluten Pfad zur übergebenen Infoprovider-Ressource.
Der Pfad wird dabei aus `RESOURCES_LOCATION`, `INFOPROVIDER_LOCATION` und dem übergebenen Pfad erstellt.
:param path: Pfad zum benötigten Infoprovider, relativ zum `resources/infoprovider`-Ordner.
:return: Absoluter Pfad zum übergebenen Infoprovider.
| Erstellt einen absoluten Pfad zur übergebenen Infoprovider-Ressource. | def get_infoprovider_path(path: str):
"""Erstellt einen absoluten Pfad zur übergebenen Infoprovider-Ressource.
Der Pfad wird dabei aus `RESOURCES_LOCATION`, `INFOPROVIDER_LOCATION` und dem übergebenen Pfad erstellt.
:param path: Pfad zum benötigten Infoprovider, relativ zum `resources/infoprovider`-Ordner.
:return: Absoluter Pfad zum übergebenen Infoprovider.
"""
return get_resource_path(os.path.join(INFOPROVIDER_LOCATION, path)) | [
"def",
"get_infoprovider_path",
"(",
"path",
":",
"str",
")",
":",
"return",
"get_resource_path",
"(",
"os",
".",
"path",
".",
"join",
"(",
"INFOPROVIDER_LOCATION",
",",
"path",
")",
")"
] | [
87,
0
] | [
95,
71
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
Line.split | (self, points, rtol=1e-03, atol=1e-03) | return [] | Die Funktion splittet das Line-Objekt an den vorgegebenen Punkten
und gibt eine Liste der neu enstandenen Elemente aus.
| Die Funktion splittet das Line-Objekt an den vorgegebenen Punkten
und gibt eine Liste der neu enstandenen Elemente aus.
| def split(self, points, rtol=1e-03, atol=1e-03):
""" Die Funktion splittet das Line-Objekt an den vorgegebenen Punkten
und gibt eine Liste der neu enstandenen Elemente aus.
"""
points_inside = [(distance(p, self.p1), p)
for p in points if self.is_point_inside(p,
rtol, atol,
False)]
if len(points_inside) > 0:
points_inside.append((0.0, self.p1))
points_inside.append((distance(self.p1, self.p2), self.p2))
points_inside.sort()
split_lines = []
p_start = None
for d, p in points_inside:
if p_start is not None:
split_lines.append(Line(Element(start=p_start, end=p)))
p_start = p
return split_lines
return [] | [
"def",
"split",
"(",
"self",
",",
"points",
",",
"rtol",
"=",
"1e-03",
",",
"atol",
"=",
"1e-03",
")",
":",
"points_inside",
"=",
"[",
"(",
"distance",
"(",
"p",
",",
"self",
".",
"p1",
")",
",",
"p",
")",
"for",
"p",
"in",
"points",
"if",
"self",
".",
"is_point_inside",
"(",
"p",
",",
"rtol",
",",
"atol",
",",
"False",
")",
"]",
"if",
"len",
"(",
"points_inside",
")",
">",
"0",
":",
"points_inside",
".",
"append",
"(",
"(",
"0.0",
",",
"self",
".",
"p1",
")",
")",
"points_inside",
".",
"append",
"(",
"(",
"distance",
"(",
"self",
".",
"p1",
",",
"self",
".",
"p2",
")",
",",
"self",
".",
"p2",
")",
")",
"points_inside",
".",
"sort",
"(",
")",
"split_lines",
"=",
"[",
"]",
"p_start",
"=",
"None",
"for",
"d",
",",
"p",
"in",
"points_inside",
":",
"if",
"p_start",
"is",
"not",
"None",
":",
"split_lines",
".",
"append",
"(",
"Line",
"(",
"Element",
"(",
"start",
"=",
"p_start",
",",
"end",
"=",
"p",
")",
")",
")",
"p_start",
"=",
"p",
"return",
"split_lines",
"return",
"[",
"]"
] | [
1078,
4
] | [
1100,
17
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
plotImage.dots2mm_X | ( self, dots ) | return ( dots - self.cax.x ) / self.dpmm | Wandelt eine X dot Angabe im mm Position des Image um.
Parameters
----------
dots : int
Position in mm.
Returns
-------
float
Umgewandelte Position
| Wandelt eine X dot Angabe im mm Position des Image um.
Parameters
----------
dots : int
Position in mm. | def dots2mm_X( self, dots ):
"""Wandelt eine X dot Angabe im mm Position des Image um.
Parameters
----------
dots : int
Position in mm.
Returns
-------
float
Umgewandelte Position
"""
return ( dots - self.cax.x ) / self.dpmm | [
"def",
"dots2mm_X",
"(",
"self",
",",
"dots",
")",
":",
"return",
"(",
"dots",
"-",
"self",
".",
"cax",
".",
"x",
")",
"/",
"self",
".",
"dpmm"
] | [
77,
4
] | [
91,
48
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
get_datasource_path | (path: str) | return get_resource_path(os.path.join(DATASOURCES_LOCATION, path)) | Erstellt einen absoluten Pfad zu der übergebenen Datasource-Ressource.
Erstellt den Pfad aus `RESOURCES_LOCATION`, `DATASOURCES_LOCATION` und dem übergebenen Pfad.
:param path: Pfad zur Ressource, relativ zum `resources/datasources`-Ordner.
:return: Absoluter Pfad zur übergebenen Ressource.
| Erstellt einen absoluten Pfad zu der übergebenen Datasource-Ressource. | def get_datasource_path(path: str):
"""Erstellt einen absoluten Pfad zu der übergebenen Datasource-Ressource.
Erstellt den Pfad aus `RESOURCES_LOCATION`, `DATASOURCES_LOCATION` und dem übergebenen Pfad.
:param path: Pfad zur Ressource, relativ zum `resources/datasources`-Ordner.
:return: Absoluter Pfad zur übergebenen Ressource.
"""
return get_resource_path(os.path.join(DATASOURCES_LOCATION, path)) | [
"def",
"get_datasource_path",
"(",
"path",
":",
"str",
")",
":",
"return",
"get_resource_path",
"(",
"os",
".",
"path",
".",
"join",
"(",
"DATASOURCES_LOCATION",
",",
"path",
")",
")"
] | [
120,
0
] | [
128,
70
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
checkMlc.doMT_VMAT_1_2 | ( self, fileData ) | return self._doMLC_VMAT( fileData, passedOn=False ) | PicketFence mit rot und absichtlichen Fehler
Returns
-------
pdfFilename : str
Name der erzeugten Pdfdatei
result : list
list mit dicts der Testergebnisse
See Also
--------
isp.results : Aufbau von result
| PicketFence mit rot und absichtlichen Fehler | def doMT_VMAT_1_2( self, fileData ):
"""PicketFence mit rot und absichtlichen Fehler
Returns
-------
pdfFilename : str
Name der erzeugten Pdfdatei
result : list
list mit dicts der Testergebnisse
See Also
--------
isp.results : Aufbau von result
"""
return self._doMLC_VMAT( fileData, passedOn=False ) | [
"def",
"doMT_VMAT_1_2",
"(",
"self",
",",
"fileData",
")",
":",
"return",
"self",
".",
"_doMLC_VMAT",
"(",
"fileData",
",",
"passedOn",
"=",
"False",
")"
] | [
2542,
4
] | [
2557,
59
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
register_overlay | (func) | return register_type_func(OVERLAY_TYPES, ImageError, func) | Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block.
Fügt eine Typ-Funktion dem Dictionary OVERLAY_TYPES hinzu.
:param func: die zu registrierende Funktion
:return: Funktion mit try/except-Block
| Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block.
Fügt eine Typ-Funktion dem Dictionary OVERLAY_TYPES hinzu. | def register_overlay(func):
"""Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block.
Fügt eine Typ-Funktion dem Dictionary OVERLAY_TYPES hinzu.
:param func: die zu registrierende Funktion
:return: Funktion mit try/except-Block
"""
return register_type_func(OVERLAY_TYPES, ImageError, func) | [
"def",
"register_overlay",
"(",
"func",
")",
":",
"return",
"register_type_func",
"(",
"OVERLAY_TYPES",
",",
"ImageError",
",",
"func",
")"
] | [
18,
0
] | [
25,
62
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
ispBaseWebApp.routeDocs | ( self, filepath:str="" ) | return self.routeFile( filepath, osp.join( docs_root, "build" ) ) | Die Dokumentation anzeigen oder erzeugen.
Aufruf::
/docs/index.html - Dokumentation anzeigen
/docs - Dokumentation im iframe anzeigen. Mit src="docs/index.html"
/docs/build - Dokumentation erzeugen
/docs/rebuild - Dokumentation komplett erneuern (ui-docs)
| Die Dokumentation anzeigen oder erzeugen. | def routeDocs( self, filepath:str="" ):
"""Die Dokumentation anzeigen oder erzeugen.
Aufruf::
/docs/index.html - Dokumentation anzeigen
/docs - Dokumentation im iframe anzeigen. Mit src="docs/index.html"
/docs/build - Dokumentation erzeugen
/docs/rebuild - Dokumentation komplett erneuern (ui-docs)
"""
# wenn nur docs angegeben wurde iframe erstellen
if len(filepath) == 4:
return '<div class="iframe-container overflow-hidden flex-1"><iframe src="/docs/index.html" ></iframe></div>'
# Ausführungspfad für docs festlegen
docs_root = osp.join( self._config.get( "BASE_DIR", "") , '.docs' )
docs_path = docs_root
# docs/ nicht verwenden
filepath = filepath[5:]
# prüfen ob es docs_path gibt, sonst zuerst die dokumentation erzeugen
if not osp.isdir( docs_path ) or not osp.isfile( osp.join( docs_path, "build", "index.html" ) ): # pragma: no cover
filepath = "build"
if filepath == "build" or filepath == "rebuild": # pragma: no cover
# Dokumentation erzeugen filepath als mode mitgeben
if not self.createDocs( docs_path, filepath ):
return "<h1>Eine Dokumentation ist nicht vorhanden.</h1>"
filepath = "index.html"
return self.routeFile( filepath, osp.join( docs_root, "build" ) ) | [
"def",
"routeDocs",
"(",
"self",
",",
"filepath",
":",
"str",
"=",
"\"\"",
")",
":",
"# wenn nur docs angegeben wurde iframe erstellen",
"if",
"len",
"(",
"filepath",
")",
"==",
"4",
":",
"return",
"'<div class=\"iframe-container overflow-hidden flex-1\"><iframe src=\"/docs/index.html\" ></iframe></div>'",
"# Ausführungspfad für docs festlegen",
"docs_root",
"=",
"osp",
".",
"join",
"(",
"self",
".",
"_config",
".",
"get",
"(",
"\"BASE_DIR\"",
",",
"\"\"",
")",
",",
"'.docs'",
")",
"docs_path",
"=",
"docs_root",
"# docs/ nicht verwenden",
"filepath",
"=",
"filepath",
"[",
"5",
":",
"]",
"# prüfen ob es docs_path gibt, sonst zuerst die dokumentation erzeugen",
"if",
"not",
"osp",
".",
"isdir",
"(",
"docs_path",
")",
"or",
"not",
"osp",
".",
"isfile",
"(",
"osp",
".",
"join",
"(",
"docs_path",
",",
"\"build\"",
",",
"\"index.html\"",
")",
")",
":",
"# pragma: no cover",
"filepath",
"=",
"\"build\"",
"if",
"filepath",
"==",
"\"build\"",
"or",
"filepath",
"==",
"\"rebuild\"",
":",
"# pragma: no cover",
"# Dokumentation erzeugen filepath als mode mitgeben",
"if",
"not",
"self",
".",
"createDocs",
"(",
"docs_path",
",",
"filepath",
")",
":",
"return",
"\"<h1>Eine Dokumentation ist nicht vorhanden.</h1>\"",
"filepath",
"=",
"\"index.html\"",
"return",
"self",
".",
"routeFile",
"(",
"filepath",
",",
"osp",
".",
"join",
"(",
"docs_root",
",",
"\"build\"",
")",
")"
] | [
758,
4
] | [
790,
73
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
RawData.Subscription_Handler | ( self, data ) | USERNOTICE - User hat einen Sub erhalten oder selbst gekauft
#########################################################################
VERFÜGBARE TAGS (Sub):
#########################################################################
- msg-id > Typ der Nachricht ("sub")
- user-id > User ID des Users
- login > Benutzername des Users
- display-name > DisplayName des Users
- color > Vom User festgelegte Farbe
- msg-param-cumulative-months > Anzahl der Subs (insgesamt)
- msg-param-streak-months > Anzahl der Monate in Serie
- msg-param-sub-plan > Sub Stufe (Prime, 1000, 2000, 3000)
#########################################################################
VERFÜGBARE TAGS (Sub-Gift):
#########################################################################
- msg-id > Typ der Nachricht ("subgift")
- user-id > User ID des Users
- login > Benutzername des Users
- display-name > DisplayName des Users
- color > Vom User festgelegte Farbe
- msg-param-cumulative-months > Anzahl der Subs (insgesamt)
- msg-param-streak-months > Anzahl der Monate in Serie
- msg-param-sub-plan > Sub Stufe (Prime, 1000, 2000, 3000)
- msg-param-recipient-id > UserID des Empfängers
- msg-param-recipient-user-name > Username des Empfängers
- msg-param-recipient-display-name > DisplayName des Empfängers
#########################################################################
VERFÜGBARE TAGS (Resub):
#########################################################################
- msg-id > Typ der Nachricht ("resub")
- user-id > User ID des Users
- login > Benutzername des Users
- display-name > DisplayName des Users
- color > Vom User festgelegte Farbe
- msg-param-cumulative-months > Anzahl der Subs (insgesamt)
- msg-param-streak-months > Anzahl der Monate in Serie
- msg-param-sub-plan > Sub Stufe (Prime, 1000, 2000, 3000)
| USERNOTICE - User hat einen Sub erhalten oder selbst gekauft
#########################################################################
VERFÜGBARE TAGS (Sub):
#########################################################################
- msg-id > Typ der Nachricht ("sub")
- user-id > User ID des Users
- login > Benutzername des Users
- display-name > DisplayName des Users
- color > Vom User festgelegte Farbe
- msg-param-cumulative-months > Anzahl der Subs (insgesamt)
- msg-param-streak-months > Anzahl der Monate in Serie
- msg-param-sub-plan > Sub Stufe (Prime, 1000, 2000, 3000)
#########################################################################
VERFÜGBARE TAGS (Sub-Gift):
#########################################################################
- msg-id > Typ der Nachricht ("subgift")
- user-id > User ID des Users
- login > Benutzername des Users
- display-name > DisplayName des Users
- color > Vom User festgelegte Farbe
- msg-param-cumulative-months > Anzahl der Subs (insgesamt)
- msg-param-streak-months > Anzahl der Monate in Serie
- msg-param-sub-plan > Sub Stufe (Prime, 1000, 2000, 3000)
- msg-param-recipient-id > UserID des Empfängers
- msg-param-recipient-user-name > Username des Empfängers
- msg-param-recipient-display-name > DisplayName des Empfängers
#########################################################################
VERFÜGBARE TAGS (Resub):
#########################################################################
- msg-id > Typ der Nachricht ("resub")
- user-id > User ID des Users
- login > Benutzername des Users
- display-name > DisplayName des Users
- color > Vom User festgelegte Farbe
- msg-param-cumulative-months > Anzahl der Subs (insgesamt)
- msg-param-streak-months > Anzahl der Monate in Serie
- msg-param-sub-plan > Sub Stufe (Prime, 1000, 2000, 3000)
| def Subscription_Handler( self, data ):
''' USERNOTICE - User hat einen Sub erhalten oder selbst gekauft
#########################################################################
VERFÜGBARE TAGS (Sub):
#########################################################################
- msg-id > Typ der Nachricht ("sub")
- user-id > User ID des Users
- login > Benutzername des Users
- display-name > DisplayName des Users
- color > Vom User festgelegte Farbe
- msg-param-cumulative-months > Anzahl der Subs (insgesamt)
- msg-param-streak-months > Anzahl der Monate in Serie
- msg-param-sub-plan > Sub Stufe (Prime, 1000, 2000, 3000)
#########################################################################
VERFÜGBARE TAGS (Sub-Gift):
#########################################################################
- msg-id > Typ der Nachricht ("subgift")
- user-id > User ID des Users
- login > Benutzername des Users
- display-name > DisplayName des Users
- color > Vom User festgelegte Farbe
- msg-param-cumulative-months > Anzahl der Subs (insgesamt)
- msg-param-streak-months > Anzahl der Monate in Serie
- msg-param-sub-plan > Sub Stufe (Prime, 1000, 2000, 3000)
- msg-param-recipient-id > UserID des Empfängers
- msg-param-recipient-user-name > Username des Empfängers
- msg-param-recipient-display-name > DisplayName des Empfängers
#########################################################################
VERFÜGBARE TAGS (Resub):
#########################################################################
- msg-id > Typ der Nachricht ("resub")
- user-id > User ID des Users
- login > Benutzername des Users
- display-name > DisplayName des Users
- color > Vom User festgelegte Farbe
- msg-param-cumulative-months > Anzahl der Subs (insgesamt)
- msg-param-streak-months > Anzahl der Monate in Serie
- msg-param-sub-plan > Sub Stufe (Prime, 1000, 2000, 3000)
'''
thisActionName = "Subscription_Handler"
testdata = ""
# RawData auswerten
USERNOTICE = self.reUSERNOTICE.search( data.RawData )
# Basisparameter für Subgifts festlegen
thisIsSubGift = False
thisSubGifterName = ""
thisDictResult = {}
if USERNOTICE:
# Schreibe Daten in Raw-Data-Log, wenn aktiviert
if self.Settings.ActivateRawLog:
self.write_RawDataLog(data=data)
tags = dict( re.findall( r"([^=]+)=([^;]*)(?:;|$)", USERNOTICE.group( "irctags" ) ) )
# Nachricht enthält Informationen zu einer Subscription
if ( ( tags["msg-id"] in self.SubTypeList ) and ( tags["msg-param-sub-plan"] in self.SubPlanList ) ):
# Sub von User
if ( tags["msg-id"] == "sub" or tags["msg-id"] == "resub" ):
thisSubType = tags["msg-id"]
thisSubPlan = tags["msg-param-sub-plan"]
thisSubMonth = tags["msg-param-cumulative-months"]
thisUser = tags["login"]
thisUserName = tags["display-name"]
thisIsSubGift = False
thisSubGifterName = False
self.Logger.WriteLog( " !!! '{0}' WURDE ERKANNT: ( SubPlan = '{1}' - User = '{2}' ( {3}. Monat ) )".format(
thisSubType,
thisSubPlan,
thisUserName,
str(thisSubMonth) )
)
self.write_SubscriberDataLog(
userDisplayName = thisUserName,
subtype = thisSubType,
subplan = thisSubPlan,
submonth = str(thisSubMonth)
)
# Ist ein Gifted Sub
elif ( tags["msg-id"] == "subgift" ):
thisSubType = tags["msg-id"]
thisSubPlan = tags["msg-param-sub-plan"]
thisSubMonth = tags["msg-param-months"]
thisUser = tags["msg-param-recipient-user-name"]
thisUserName = tags["msg-param-recipient-display-name"]
thisIsSubGift = True
thisSubGifterName = tags["login"]
self.Logger.WriteLog( " !!! '{0}' WURDE ERKANNT: ( SubPlan = '{1}' - User = '{2}' ( {3}. Monat ) ) von User = '{4}'".format(
str.upper( thisSubType ),
str(thisSubPlan),
thisUserName,
str(thisSubMonth),
thisSubGifterName )
)
self.write_SubscriberDataLog(
userDisplayName = thisUserName,
subtype = thisSubType,
subplan = thisSubPlan,
submonth = thisSubMonth
)
# Daten des Subgifters in Datei schreiben
self.write_SubGifterDataLog(userDisplayName=self.Parent.GetDisplayName(thisSubGifterName))
# Nur ausführen, wenn die CounterDB bei der Initialisierung übergeben wurde
if self.CounterDB:
counterValue = self.CounterDB.increase_CounterValue( "SubCounter" )
# Counter für Subgoal erhöhen
if int( counterValue ) == int( self.Settings.Raffle_GiveAwaySubGoal ):
self.CounterDB.increase_CounterValue( "GiveAwayCounter" )
self.CounterDB.reset_CounterValue( "SubCounter" )
# Aktuelle Subscriber Daten in File schreiben
self.write_LatestSubscriberFile(
userDisplayName = thisUserName,
submonth = str(thisSubMonth)
)
# Nur ausführen, wenn die CounterDB bei der Initialisierung übergeben wurde
if self.CounterDB:
self.write_CurrentSubgoalFile( subs = self.CounterDB.get_Value( "SubCounter" ) )
self.write_GiveawayCountFile( counter = self.CounterDB.get_Value( "GiveAwayCounter" ) )
# Daten in Dict aufnehmen
thisDictResult = {
"UserName": thisUser,
"UserDisplayName": thisUserName,
"IsSubGift": thisIsSubGift,
"SubGifterName": thisSubGifterName,
"SubPlan": thisSubPlan
}
# Daten an Funktion zurückliefern
return thisDictResult
else:
''' Enthält keine Informationen zu Subscribtions '''
return False
else:
''' Ist nicht vom Typ USERNOTICE '''
return False | [
"def",
"Subscription_Handler",
"(",
"self",
",",
"data",
")",
":",
"thisActionName",
"=",
"\"Subscription_Handler\"",
"testdata",
"=",
"\"\"",
"# RawData auswerten\r",
"USERNOTICE",
"=",
"self",
".",
"reUSERNOTICE",
".",
"search",
"(",
"data",
".",
"RawData",
")",
"# Basisparameter für Subgifts festlegen\r",
"thisIsSubGift",
"=",
"False",
"thisSubGifterName",
"=",
"\"\"",
"thisDictResult",
"=",
"{",
"}",
"if",
"USERNOTICE",
":",
"# Schreibe Daten in Raw-Data-Log, wenn aktiviert\r",
"if",
"self",
".",
"Settings",
".",
"ActivateRawLog",
":",
"self",
".",
"write_RawDataLog",
"(",
"data",
"=",
"data",
")",
"tags",
"=",
"dict",
"(",
"re",
".",
"findall",
"(",
"r\"([^=]+)=([^;]*)(?:;|$)\"",
",",
"USERNOTICE",
".",
"group",
"(",
"\"irctags\"",
")",
")",
")",
"# Nachricht enthält Informationen zu einer Subscription\r",
"if",
"(",
"(",
"tags",
"[",
"\"msg-id\"",
"]",
"in",
"self",
".",
"SubTypeList",
")",
"and",
"(",
"tags",
"[",
"\"msg-param-sub-plan\"",
"]",
"in",
"self",
".",
"SubPlanList",
")",
")",
":",
"# Sub von User\r",
"if",
"(",
"tags",
"[",
"\"msg-id\"",
"]",
"==",
"\"sub\"",
"or",
"tags",
"[",
"\"msg-id\"",
"]",
"==",
"\"resub\"",
")",
":",
"thisSubType",
"=",
"tags",
"[",
"\"msg-id\"",
"]",
"thisSubPlan",
"=",
"tags",
"[",
"\"msg-param-sub-plan\"",
"]",
"thisSubMonth",
"=",
"tags",
"[",
"\"msg-param-cumulative-months\"",
"]",
"thisUser",
"=",
"tags",
"[",
"\"login\"",
"]",
"thisUserName",
"=",
"tags",
"[",
"\"display-name\"",
"]",
"thisIsSubGift",
"=",
"False",
"thisSubGifterName",
"=",
"False",
"self",
".",
"Logger",
".",
"WriteLog",
"(",
"\" !!! '{0}' WURDE ERKANNT: ( SubPlan = '{1}' - User = '{2}' ( {3}. Monat ) )\"",
".",
"format",
"(",
"thisSubType",
",",
"thisSubPlan",
",",
"thisUserName",
",",
"str",
"(",
"thisSubMonth",
")",
")",
")",
"self",
".",
"write_SubscriberDataLog",
"(",
"userDisplayName",
"=",
"thisUserName",
",",
"subtype",
"=",
"thisSubType",
",",
"subplan",
"=",
"thisSubPlan",
",",
"submonth",
"=",
"str",
"(",
"thisSubMonth",
")",
")",
"# Ist ein Gifted Sub\r",
"elif",
"(",
"tags",
"[",
"\"msg-id\"",
"]",
"==",
"\"subgift\"",
")",
":",
"thisSubType",
"=",
"tags",
"[",
"\"msg-id\"",
"]",
"thisSubPlan",
"=",
"tags",
"[",
"\"msg-param-sub-plan\"",
"]",
"thisSubMonth",
"=",
"tags",
"[",
"\"msg-param-months\"",
"]",
"thisUser",
"=",
"tags",
"[",
"\"msg-param-recipient-user-name\"",
"]",
"thisUserName",
"=",
"tags",
"[",
"\"msg-param-recipient-display-name\"",
"]",
"thisIsSubGift",
"=",
"True",
"thisSubGifterName",
"=",
"tags",
"[",
"\"login\"",
"]",
"self",
".",
"Logger",
".",
"WriteLog",
"(",
"\" !!! '{0}' WURDE ERKANNT: ( SubPlan = '{1}' - User = '{2}' ( {3}. Monat ) ) von User = '{4}'\"",
".",
"format",
"(",
"str",
".",
"upper",
"(",
"thisSubType",
")",
",",
"str",
"(",
"thisSubPlan",
")",
",",
"thisUserName",
",",
"str",
"(",
"thisSubMonth",
")",
",",
"thisSubGifterName",
")",
")",
"self",
".",
"write_SubscriberDataLog",
"(",
"userDisplayName",
"=",
"thisUserName",
",",
"subtype",
"=",
"thisSubType",
",",
"subplan",
"=",
"thisSubPlan",
",",
"submonth",
"=",
"thisSubMonth",
")",
"# Daten des Subgifters in Datei schreiben\r",
"self",
".",
"write_SubGifterDataLog",
"(",
"userDisplayName",
"=",
"self",
".",
"Parent",
".",
"GetDisplayName",
"(",
"thisSubGifterName",
")",
")",
"# Nur ausführen, wenn die CounterDB bei der Initialisierung übergeben wurde\r",
"if",
"self",
".",
"CounterDB",
":",
"counterValue",
"=",
"self",
".",
"CounterDB",
".",
"increase_CounterValue",
"(",
"\"SubCounter\"",
")",
"# Counter für Subgoal erhöhen\r",
"if",
"int",
"(",
"counterValue",
")",
"==",
"int",
"(",
"self",
".",
"Settings",
".",
"Raffle_GiveAwaySubGoal",
")",
":",
"self",
".",
"CounterDB",
".",
"increase_CounterValue",
"(",
"\"GiveAwayCounter\"",
")",
"self",
".",
"CounterDB",
".",
"reset_CounterValue",
"(",
"\"SubCounter\"",
")",
"# Aktuelle Subscriber Daten in File schreiben\r",
"self",
".",
"write_LatestSubscriberFile",
"(",
"userDisplayName",
"=",
"thisUserName",
",",
"submonth",
"=",
"str",
"(",
"thisSubMonth",
")",
")",
"# Nur ausführen, wenn die CounterDB bei der Initialisierung übergeben wurde\r",
"if",
"self",
".",
"CounterDB",
":",
"self",
".",
"write_CurrentSubgoalFile",
"(",
"subs",
"=",
"self",
".",
"CounterDB",
".",
"get_Value",
"(",
"\"SubCounter\"",
")",
")",
"self",
".",
"write_GiveawayCountFile",
"(",
"counter",
"=",
"self",
".",
"CounterDB",
".",
"get_Value",
"(",
"\"GiveAwayCounter\"",
")",
")",
"# Daten in Dict aufnehmen\r",
"thisDictResult",
"=",
"{",
"\"UserName\"",
":",
"thisUser",
",",
"\"UserDisplayName\"",
":",
"thisUserName",
",",
"\"IsSubGift\"",
":",
"thisIsSubGift",
",",
"\"SubGifterName\"",
":",
"thisSubGifterName",
",",
"\"SubPlan\"",
":",
"thisSubPlan",
"}",
"# Daten an Funktion zurückliefern\r",
"return",
"thisDictResult",
"else",
":",
"''' Enthält keine Informationen zu Subscribtions '''\r",
"return",
"False",
"else",
":",
"''' Ist nicht vom Typ USERNOTICE '''",
"return",
"False"
] | [
389,
4
] | [
545,
24
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
replace | (values: dict, data: StepData) | Ersetzt ein Zeichen, Symbol, Wort, einen Satz oder eine ganzen Text in einem String.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
| Ersetzt ein Zeichen, Symbol, Wort, einen Satz oder eine ganzen Text in einem String. | def replace(values: dict, data: StepData):
"""Ersetzt ein Zeichen, Symbol, Wort, einen Satz oder eine ganzen Text in einem String.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
"""
for idx, key in data.loop_key(values["keys"], values):
value = str(data.get_data(key, values))
new_key = get_new_keys(values, idx)
new_value = value.replace(data.format(values["old_value"], values),
data.format(values["new_value"], values),
data.get_data(values.get("count", -1), values, int))
# test mit RegEx
# new_value = re.sub(data.format(values["old_value"], values), data.format(values["new_value"], values), value, count=data.get_data(values.get("count", 0), values, int))
data.insert_data(new_key, new_value, values) | [
"def",
"replace",
"(",
"values",
":",
"dict",
",",
"data",
":",
"StepData",
")",
":",
"for",
"idx",
",",
"key",
"in",
"data",
".",
"loop_key",
"(",
"values",
"[",
"\"keys\"",
"]",
",",
"values",
")",
":",
"value",
"=",
"str",
"(",
"data",
".",
"get_data",
"(",
"key",
",",
"values",
")",
")",
"new_key",
"=",
"get_new_keys",
"(",
"values",
",",
"idx",
")",
"new_value",
"=",
"value",
".",
"replace",
"(",
"data",
".",
"format",
"(",
"values",
"[",
"\"old_value\"",
"]",
",",
"values",
")",
",",
"data",
".",
"format",
"(",
"values",
"[",
"\"new_value\"",
"]",
",",
"values",
")",
",",
"data",
".",
"get_data",
"(",
"values",
".",
"get",
"(",
"\"count\"",
",",
"-",
"1",
")",
",",
"values",
",",
"int",
")",
")",
"# test mit RegEx",
"# new_value = re.sub(data.format(values[\"old_value\"], values), data.format(values[\"new_value\"], values), value, count=data.get_data(values.get(\"count\", 0), values, int))",
"data",
".",
"insert_data",
"(",
"new_key",
",",
"new_value",
",",
"values",
")"
] | [
191,
0
] | [
206,
52
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
Execute | (data) | Ausführung der Aktionen | Ausführung der Aktionen | def Execute(data):
'''Ausführung der Aktionen'''
global myParent
global myScriptSettings
global myLogger
global myRawDataHandler
# Reagiere nur wenn:
# - die Erweiterung aktiv ist
# - der Stream Live ist (sofern eingestellt)
# ansonsten Skript hier abbrechen
if (not myScriptSettings.ActivateExtention) or (
(myScriptSettings.OnlyLive) and not myParent.IsLive()):
return
###############################################################################
# alle Daten vom Twitch-Chat (keine Whisper)
###############################################################################
if (data.IsFromTwitch() and (data.IsChatMessage() and not data.IsWhisper())):
if (data.GetParam(0).lower() == str(myScriptSettings.Game_Command).lower()): # and myParent.HasPermission(data.User, myScriptSettings.Game_Command_Permission, ""):
myGameHeist.game_StartHeist(data) | [
"def",
"Execute",
"(",
"data",
")",
":",
"global",
"myParent",
"global",
"myScriptSettings",
"global",
"myLogger",
"global",
"myRawDataHandler",
"# Reagiere nur wenn:\r",
"# - die Erweiterung aktiv ist\r",
"# - der Stream Live ist (sofern eingestellt)\r",
"# ansonsten Skript hier abbrechen\r",
"if",
"(",
"not",
"myScriptSettings",
".",
"ActivateExtention",
")",
"or",
"(",
"(",
"myScriptSettings",
".",
"OnlyLive",
")",
"and",
"not",
"myParent",
".",
"IsLive",
"(",
")",
")",
":",
"return",
"###############################################################################\r",
"# alle Daten vom Twitch-Chat (keine Whisper)\r",
"###############################################################################\r",
"if",
"(",
"data",
".",
"IsFromTwitch",
"(",
")",
"and",
"(",
"data",
".",
"IsChatMessage",
"(",
")",
"and",
"not",
"data",
".",
"IsWhisper",
"(",
")",
")",
")",
":",
"if",
"(",
"data",
".",
"GetParam",
"(",
"0",
")",
".",
"lower",
"(",
")",
"==",
"str",
"(",
"myScriptSettings",
".",
"Game_Command",
")",
".",
"lower",
"(",
")",
")",
":",
"# and myParent.HasPermission(data.User, myScriptSettings.Game_Command_Permission, \"\"):\r",
"myGameHeist",
".",
"game_StartHeist",
"(",
"data",
")"
] | [
346,
0
] | [
369,
45
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
checkField.doMT_VMAT_0_1 | ( self, fileData ) | return self.pdf.finish(), result | PicketFence DMLC Dosimetrie eines 40x100 großen Feldes
Parameters
----------
fileData : pandas.DataFrame
Returns
-------
pdfFilename : str
Name der erzeugten Pdfdatei
result : list
list mit dicts der Testergebnisse
See Also
--------
isp.results : Aufbau von result
| PicketFence DMLC Dosimetrie eines 40x100 großen Feldes | def doMT_VMAT_0_1( self, fileData ):
"""PicketFence DMLC Dosimetrie eines 40x100 großen Feldes
Parameters
----------
fileData : pandas.DataFrame
Returns
-------
pdfFilename : str
Name der erzeugten Pdfdatei
result : list
list mit dicts der Testergebnisse
See Also
--------
isp.results : Aufbau von result
"""
result=[]
# wird für progress verwendet
filesMax=len( fileData )
self.fileCount = 0
# metadata ergänzen und lokal als md bereitstellen
md = dict_merge( DotMap( {
"series_sort_values": ["MLCPlanType", "gantry"],
"series_groupby": ["day", "SeriesNumber"],
"current": {
"field_count": self.metadata.current.get("fields", 0) - 1, # 4
},
"querys" : {
"base" : 'MLCPlanType!="DynMLCPlan"', # "check_subtag == 'base'",
"fields" : 'MLCPlanType=="DynMLCPlan"', # "check_subtag != 'base'",
},
"manual": {
"filename": self.metadata.info["anleitung"],
"attrs": {"class":"layout-fill-width", "margin-bottom": "5mm"},
},
"doseArea" : { "X1":-0.75, "X2": 0.75, "Y1": -4, "Y2": 4 },
"_imgSize" : {"width" : 36, "height" : 70},
"_imgField": {"border": 10 },
"_chart": { "width" : 180, "height" : 60},
"table_fields" : [
{'field': 'Kennung', 'label':'Kennung', 'format':'{0}', 'style': [('text-align', 'left')] },
{'field': 'gantry', 'label':'Gantry', 'format':'{0:1.1f}' },
# {'field': 'Mof', 'label':'M<sub>OF</sub>', 'format':'{0:.5f}' },
{'field': 'Mcorr', 'label':'M<sub>corr</sub>', 'format':'{0:.4f}' },
{'field': 'Mdev', 'label':'M<sub>dev</sub> [%]', 'format':'{0:.2f}' },
{'field': 'Mdev_passed', 'label':'Passed' },
]
} ), self.metadata )
def groupBySeries( df_group ):
"""Datumsweise Auswertung und PDF Ausgabe.
"""
# get base and fields check number of data
ok, df_base, df_fields = self.evaluationPrepare(df_group, md, result)
if not ok:
return
# base Field und dosis bereitstellen
baseField = qa_field( self.getFullData( df_base.loc[df_base.index[0]] ) )
Mof = baseField.image.getRoi( md["doseArea"] ).copy()
data = [{
'Kennung': baseField.infos["Kennung"],
'gantry': baseField.infos["gantry"],
'Mcorr': np.nan,
'Mdev': np.nan,
'Passed' : np.nan
}]
img = baseField.image.plotImage( original=False
, field = md["_imgField"]
, metadata = md
, plotTitle = "{Kennung}"
, invert=False, plotCax=False, plotField=True )
# Bild anzeigen
self.pdf.image( img, md["_imgSize"] )
# alle felder durchgehen
for info in df_fields.itertuples():
field = qa_field( self.getFullData( info ) )
#Mdmlc = field.getMeanDose( md["doseArea"] )
Mdmlc = field.image.getRoi( md["doseArea"] ).copy()
Mcorr = (Mdmlc / Mof).mean()
data.append( {
'Kennung': field.infos["Kennung"],
'gantry': field.infos["gantry"],
'Mcorr': Mcorr,
'Mdev': np.nan,
'Pass' : np.nan
} )
img = field.image.plotImage( original=False
, field = md["_imgField"]
, metadata = md
, plotTitle = "{Kennung}"
, invert=False, plotCax=False, plotField=True )
# Bild anzeigen
self.pdf.image( img, md["_imgSize"] )
# progress pro file stimmt nicht immer genau (baseimage)
# 40% für die dicom daten 40% für die Auswertung 20 % für das pdf
self.fileCount += 1
if hasattr( logger, "progress"):
logger.progress( md["testId"], 40 + ( 40 / filesMax * self.fileCount ) )
df = pd.DataFrame( data )
McorrMean = df['Mcorr'].mean( )
df[ 'Mdev' ] = (df[ 'Mcorr' ] - McorrMean ) / McorrMean * 100
#
# Abweichung ausrechnen und Passed setzen
#
check = [
{ "field": 'Mdev', 'tolerance':'default' }
]
acceptance = self.check_acceptance( df, md, check )
#
# Ergebnis in result merken
#
result.append( self.createResult( df, md, check,
df_group['AcquisitionDateTime'].iloc[0].strftime("%Y%m%d"),
len( result ), # bisherige Ergebnisse in result
acceptance
) )
# Formel
self.pdf.mathtext( r"Berechnung des Flatness-korrigierten Bildes: $M_{corr,i}(x,y) = \frac{M_{DMLC,i}(x,y)}{M_{OF}(x,y)}$", attrs={ "margin-top": "5mm" } )
self.pdf.mathtext( r"Dosimetrische Abweichung aus den ROI-Mittelwerten: $M_{dev,i} = \frac{\overline{M_{corr,i}}-\overline{M_{corr}}}{\overline{M_{corr}}}$", attrs={ "margin-top": "5mm" } )
#
# Tabelle erzeugen
#
self.pdf.pandas( df,
attrs={"class":"layout-fill-width", "margin-top": "5mm"},
fields=md["table_fields"]
)
text_values = {
"f_warning": md.current.tolerance.default.warning.get("f",""),
"f_error": md.current.tolerance.default.error.get("f","")
}
text = """<br>
Warnung bei: <b style="position:absolute;left:45mm;">{f_warning}</b><br>
Fehler bei: <b style="position:absolute;left:45mm;">{f_error}</b>
""".format( **text_values ).replace("{value}", "M<sub>dev</sub>")
self.pdf.text( text )
# Gesamt check
self.pdf.resultIcon( acceptance )
#
# Gruppiert nach SeriesNumber abarbeiten
#
fileData.sort_values(["MLCPlanType", "gantry"], na_position='first').groupby( [ 'day', 'SeriesNumber' ] ).apply( groupBySeries )
# abschließen pdfdaten und result zurückgeben
return self.pdf.finish(), result | [
"def",
"doMT_VMAT_0_1",
"(",
"self",
",",
"fileData",
")",
":",
"result",
"=",
"[",
"]",
"# wird für progress verwendet",
"filesMax",
"=",
"len",
"(",
"fileData",
")",
"self",
".",
"fileCount",
"=",
"0",
"# metadata ergänzen und lokal als md bereitstellen",
"md",
"=",
"dict_merge",
"(",
"DotMap",
"(",
"{",
"\"series_sort_values\"",
":",
"[",
"\"MLCPlanType\"",
",",
"\"gantry\"",
"]",
",",
"\"series_groupby\"",
":",
"[",
"\"day\"",
",",
"\"SeriesNumber\"",
"]",
",",
"\"current\"",
":",
"{",
"\"field_count\"",
":",
"self",
".",
"metadata",
".",
"current",
".",
"get",
"(",
"\"fields\"",
",",
"0",
")",
"-",
"1",
",",
"# 4",
"}",
",",
"\"querys\"",
":",
"{",
"\"base\"",
":",
"'MLCPlanType!=\"DynMLCPlan\"'",
",",
"# \"check_subtag == 'base'\",",
"\"fields\"",
":",
"'MLCPlanType==\"DynMLCPlan\"'",
",",
"# \"check_subtag != 'base'\",",
"}",
",",
"\"manual\"",
":",
"{",
"\"filename\"",
":",
"self",
".",
"metadata",
".",
"info",
"[",
"\"anleitung\"",
"]",
",",
"\"attrs\"",
":",
"{",
"\"class\"",
":",
"\"layout-fill-width\"",
",",
"\"margin-bottom\"",
":",
"\"5mm\"",
"}",
",",
"}",
",",
"\"doseArea\"",
":",
"{",
"\"X1\"",
":",
"-",
"0.75",
",",
"\"X2\"",
":",
"0.75",
",",
"\"Y1\"",
":",
"-",
"4",
",",
"\"Y2\"",
":",
"4",
"}",
",",
"\"_imgSize\"",
":",
"{",
"\"width\"",
":",
"36",
",",
"\"height\"",
":",
"70",
"}",
",",
"\"_imgField\"",
":",
"{",
"\"border\"",
":",
"10",
"}",
",",
"\"_chart\"",
":",
"{",
"\"width\"",
":",
"180",
",",
"\"height\"",
":",
"60",
"}",
",",
"\"table_fields\"",
":",
"[",
"{",
"'field'",
":",
"'Kennung'",
",",
"'label'",
":",
"'Kennung'",
",",
"'format'",
":",
"'{0}'",
",",
"'style'",
":",
"[",
"(",
"'text-align'",
",",
"'left'",
")",
"]",
"}",
",",
"{",
"'field'",
":",
"'gantry'",
",",
"'label'",
":",
"'Gantry'",
",",
"'format'",
":",
"'{0:1.1f}'",
"}",
",",
"# {'field': 'Mof', 'label':'M<sub>OF</sub>', 'format':'{0:.5f}' },",
"{",
"'field'",
":",
"'Mcorr'",
",",
"'label'",
":",
"'M<sub>corr</sub>'",
",",
"'format'",
":",
"'{0:.4f}'",
"}",
",",
"{",
"'field'",
":",
"'Mdev'",
",",
"'label'",
":",
"'M<sub>dev</sub> [%]'",
",",
"'format'",
":",
"'{0:.2f}'",
"}",
",",
"{",
"'field'",
":",
"'Mdev_passed'",
",",
"'label'",
":",
"'Passed'",
"}",
",",
"]",
"}",
")",
",",
"self",
".",
"metadata",
")",
"def",
"groupBySeries",
"(",
"df_group",
")",
":",
"\"\"\"Datumsweise Auswertung und PDF Ausgabe.\n\n \"\"\"",
"# get base and fields check number of data",
"ok",
",",
"df_base",
",",
"df_fields",
"=",
"self",
".",
"evaluationPrepare",
"(",
"df_group",
",",
"md",
",",
"result",
")",
"if",
"not",
"ok",
":",
"return",
"# base Field und dosis bereitstellen",
"baseField",
"=",
"qa_field",
"(",
"self",
".",
"getFullData",
"(",
"df_base",
".",
"loc",
"[",
"df_base",
".",
"index",
"[",
"0",
"]",
"]",
")",
")",
"Mof",
"=",
"baseField",
".",
"image",
".",
"getRoi",
"(",
"md",
"[",
"\"doseArea\"",
"]",
")",
".",
"copy",
"(",
")",
"data",
"=",
"[",
"{",
"'Kennung'",
":",
"baseField",
".",
"infos",
"[",
"\"Kennung\"",
"]",
",",
"'gantry'",
":",
"baseField",
".",
"infos",
"[",
"\"gantry\"",
"]",
",",
"'Mcorr'",
":",
"np",
".",
"nan",
",",
"'Mdev'",
":",
"np",
".",
"nan",
",",
"'Passed'",
":",
"np",
".",
"nan",
"}",
"]",
"img",
"=",
"baseField",
".",
"image",
".",
"plotImage",
"(",
"original",
"=",
"False",
",",
"field",
"=",
"md",
"[",
"\"_imgField\"",
"]",
",",
"metadata",
"=",
"md",
",",
"plotTitle",
"=",
"\"{Kennung}\"",
",",
"invert",
"=",
"False",
",",
"plotCax",
"=",
"False",
",",
"plotField",
"=",
"True",
")",
"# Bild anzeigen",
"self",
".",
"pdf",
".",
"image",
"(",
"img",
",",
"md",
"[",
"\"_imgSize\"",
"]",
")",
"# alle felder durchgehen",
"for",
"info",
"in",
"df_fields",
".",
"itertuples",
"(",
")",
":",
"field",
"=",
"qa_field",
"(",
"self",
".",
"getFullData",
"(",
"info",
")",
")",
"#Mdmlc = field.getMeanDose( md[\"doseArea\"] )",
"Mdmlc",
"=",
"field",
".",
"image",
".",
"getRoi",
"(",
"md",
"[",
"\"doseArea\"",
"]",
")",
".",
"copy",
"(",
")",
"Mcorr",
"=",
"(",
"Mdmlc",
"/",
"Mof",
")",
".",
"mean",
"(",
")",
"data",
".",
"append",
"(",
"{",
"'Kennung'",
":",
"field",
".",
"infos",
"[",
"\"Kennung\"",
"]",
",",
"'gantry'",
":",
"field",
".",
"infos",
"[",
"\"gantry\"",
"]",
",",
"'Mcorr'",
":",
"Mcorr",
",",
"'Mdev'",
":",
"np",
".",
"nan",
",",
"'Pass'",
":",
"np",
".",
"nan",
"}",
")",
"img",
"=",
"field",
".",
"image",
".",
"plotImage",
"(",
"original",
"=",
"False",
",",
"field",
"=",
"md",
"[",
"\"_imgField\"",
"]",
",",
"metadata",
"=",
"md",
",",
"plotTitle",
"=",
"\"{Kennung}\"",
",",
"invert",
"=",
"False",
",",
"plotCax",
"=",
"False",
",",
"plotField",
"=",
"True",
")",
"# Bild anzeigen",
"self",
".",
"pdf",
".",
"image",
"(",
"img",
",",
"md",
"[",
"\"_imgSize\"",
"]",
")",
"# progress pro file stimmt nicht immer genau (baseimage)",
"# 40% für die dicom daten 40% für die Auswertung 20 % für das pdf",
"self",
".",
"fileCount",
"+=",
"1",
"if",
"hasattr",
"(",
"logger",
",",
"\"progress\"",
")",
":",
"logger",
".",
"progress",
"(",
"md",
"[",
"\"testId\"",
"]",
",",
"40",
"+",
"(",
"40",
"/",
"filesMax",
"*",
"self",
".",
"fileCount",
")",
")",
"df",
"=",
"pd",
".",
"DataFrame",
"(",
"data",
")",
"McorrMean",
"=",
"df",
"[",
"'Mcorr'",
"]",
".",
"mean",
"(",
")",
"df",
"[",
"'Mdev'",
"]",
"=",
"(",
"df",
"[",
"'Mcorr'",
"]",
"-",
"McorrMean",
")",
"/",
"McorrMean",
"*",
"100",
"#",
"# Abweichung ausrechnen und Passed setzen",
"#",
"check",
"=",
"[",
"{",
"\"field\"",
":",
"'Mdev'",
",",
"'tolerance'",
":",
"'default'",
"}",
"]",
"acceptance",
"=",
"self",
".",
"check_acceptance",
"(",
"df",
",",
"md",
",",
"check",
")",
"#",
"# Ergebnis in result merken",
"#",
"result",
".",
"append",
"(",
"self",
".",
"createResult",
"(",
"df",
",",
"md",
",",
"check",
",",
"df_group",
"[",
"'AcquisitionDateTime'",
"]",
".",
"iloc",
"[",
"0",
"]",
".",
"strftime",
"(",
"\"%Y%m%d\"",
")",
",",
"len",
"(",
"result",
")",
",",
"# bisherige Ergebnisse in result",
"acceptance",
")",
")",
"# Formel",
"self",
".",
"pdf",
".",
"mathtext",
"(",
"r\"Berechnung des Flatness-korrigierten Bildes: $M_{corr,i}(x,y) = \\frac{M_{DMLC,i}(x,y)}{M_{OF}(x,y)}$\"",
",",
"attrs",
"=",
"{",
"\"margin-top\"",
":",
"\"5mm\"",
"}",
")",
"self",
".",
"pdf",
".",
"mathtext",
"(",
"r\"Dosimetrische Abweichung aus den ROI-Mittelwerten: $M_{dev,i} = \\frac{\\overline{M_{corr,i}}-\\overline{M_{corr}}}{\\overline{M_{corr}}}$\"",
",",
"attrs",
"=",
"{",
"\"margin-top\"",
":",
"\"5mm\"",
"}",
")",
"#",
"# Tabelle erzeugen",
"#",
"self",
".",
"pdf",
".",
"pandas",
"(",
"df",
",",
"attrs",
"=",
"{",
"\"class\"",
":",
"\"layout-fill-width\"",
",",
"\"margin-top\"",
":",
"\"5mm\"",
"}",
",",
"fields",
"=",
"md",
"[",
"\"table_fields\"",
"]",
")",
"text_values",
"=",
"{",
"\"f_warning\"",
":",
"md",
".",
"current",
".",
"tolerance",
".",
"default",
".",
"warning",
".",
"get",
"(",
"\"f\"",
",",
"\"\"",
")",
",",
"\"f_error\"",
":",
"md",
".",
"current",
".",
"tolerance",
".",
"default",
".",
"error",
".",
"get",
"(",
"\"f\"",
",",
"\"\"",
")",
"}",
"text",
"=",
"\"\"\"<br>\n Warnung bei: <b style=\"position:absolute;left:45mm;\">{f_warning}</b><br>\n Fehler bei: <b style=\"position:absolute;left:45mm;\">{f_error}</b>\n \"\"\"",
".",
"format",
"(",
"*",
"*",
"text_values",
")",
".",
"replace",
"(",
"\"{value}\"",
",",
"\"M<sub>dev</sub>\"",
")",
"self",
".",
"pdf",
".",
"text",
"(",
"text",
")",
"# Gesamt check",
"self",
".",
"pdf",
".",
"resultIcon",
"(",
"acceptance",
")",
"#",
"# Gruppiert nach SeriesNumber abarbeiten",
"#",
"fileData",
".",
"sort_values",
"(",
"[",
"\"MLCPlanType\"",
",",
"\"gantry\"",
"]",
",",
"na_position",
"=",
"'first'",
")",
".",
"groupby",
"(",
"[",
"'day'",
",",
"'SeriesNumber'",
"]",
")",
".",
"apply",
"(",
"groupBySeries",
")",
"# abschließen pdfdaten und result zurückgeben",
"return",
"self",
".",
"pdf",
".",
"finish",
"(",
")",
",",
"result"
] | [
1305,
4
] | [
1472,
40
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
transform_dict | (values: dict, data: StepData) | Führt alle angegebenen `"transform"`-Funktionen für alle Werte eines Dictionaries aus.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
| Führt alle angegebenen `"transform"`-Funktionen für alle Werte eines Dictionaries aus. | def transform_dict(values: dict, data: StepData):
"""Führt alle angegebenen `"transform"`-Funktionen für alle Werte eines Dictionaries aus.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
"""
for _ in data.loop_dict(data.get_data(values["dict_key"], values), values):
transform(values, data) | [
"def",
"transform_dict",
"(",
"values",
":",
"dict",
",",
"data",
":",
"StepData",
")",
":",
"for",
"_",
"in",
"data",
".",
"loop_dict",
"(",
"data",
".",
"get_data",
"(",
"values",
"[",
"\"dict_key\"",
"]",
",",
"values",
")",
",",
"values",
")",
":",
"transform",
"(",
"values",
",",
"data",
")"
] | [
63,
0
] | [
70,
31
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
ariaDicomClass.__init__ | ( self, database=None, server="VMSDBD", config=None ) | Klasse sowie ariaClass und dicomClass initialisieren
| Klasse sowie ariaClass und dicomClass initialisieren | def __init__( self, database=None, server="VMSDBD", config=None ):
"""Klasse sowie ariaClass und dicomClass initialisieren
"""
# Klassen defaults setzen und übergaben
self.config = config
self.variables = self.config.variables
self.infoFields = infoFields
self.dicomfiles: dict = {}
self.pd_results = None
self.resultfile = None
self.lastSQL = ""
# ariaClass initialisieren
ariaClass.__init__( self, database )
# dicomClass initialisieren. Der Erfolg kann über dicomClass.initialized abgefragt werden
ispDicom.__init__( self, server, self.config )
# Datei mit Ergebnissen als pandas laden
self.resultfile = osp.join( self.config.get("resultsPath", ".."), self.config.get("database.gqa.name", "gqa.json") )
self.pd_results = ispResults( self.config, self.resultfile ) | [
"def",
"__init__",
"(",
"self",
",",
"database",
"=",
"None",
",",
"server",
"=",
"\"VMSDBD\"",
",",
"config",
"=",
"None",
")",
":",
"# Klassen defaults setzen und übergaben",
"self",
".",
"config",
"=",
"config",
"self",
".",
"variables",
"=",
"self",
".",
"config",
".",
"variables",
"self",
".",
"infoFields",
"=",
"infoFields",
"self",
".",
"dicomfiles",
":",
"dict",
"=",
"{",
"}",
"self",
".",
"pd_results",
"=",
"None",
"self",
".",
"resultfile",
"=",
"None",
"self",
".",
"lastSQL",
"=",
"\"\"",
"# ariaClass initialisieren",
"ariaClass",
".",
"__init__",
"(",
"self",
",",
"database",
")",
"# dicomClass initialisieren. Der Erfolg kann über dicomClass.initialized abgefragt werden",
"ispDicom",
".",
"__init__",
"(",
"self",
",",
"server",
",",
"self",
".",
"config",
")",
"# Datei mit Ergebnissen als pandas laden",
"self",
".",
"resultfile",
"=",
"osp",
".",
"join",
"(",
"self",
".",
"config",
".",
"get",
"(",
"\"resultsPath\"",
",",
"\"..\"",
")",
",",
"self",
".",
"config",
".",
"get",
"(",
"\"database.gqa.name\"",
",",
"\"gqa.json\"",
")",
")",
"self",
".",
"pd_results",
"=",
"ispResults",
"(",
"self",
".",
"config",
",",
"self",
".",
"resultfile",
")"
] | [
57,
4
] | [
81,
68
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
HeistSystem.WriteMessage_StakeOverMaximum | (self, data) | return | Schreibt eine Benachrichtigung in den Chat, dass der Einsatz des Spieler
das Maximum überschritten hat und somit reduziert wird.
| Schreibt eine Benachrichtigung in den Chat, dass der Einsatz des Spieler
das Maximum überschritten hat und somit reduziert wird.
| def WriteMessage_StakeOverMaximum(self, data):
''' Schreibt eine Benachrichtigung in den Chat, dass der Einsatz des Spieler
das Maximum überschritten hat und somit reduziert wird.
'''
thisActionName = "WriteMessage_StakeOverMaximum"
# Benachrichtigung aus der Datenbank auslesen
messageText = self.RandomMessage_ByType(
messageType=self.MessageType_StakeOverMaximum
)
# Nachricht in den Chat schreiben
self.chat_WriteTextMessage(
messageText=str(messageText).format(
user=data.UserName,
target=self.GameTargetName,
command=self.Settings.Game_Command,
maxpoints=TransformLocale_Decimals(
int(self.Settings.Game_Settings_MaxStake)
),
pointsname=self.Parent.GetCurrencyName()
)
)
return | [
"def",
"WriteMessage_StakeOverMaximum",
"(",
"self",
",",
"data",
")",
":",
"thisActionName",
"=",
"\"WriteMessage_StakeOverMaximum\"",
"# Benachrichtigung aus der Datenbank auslesen\r",
"messageText",
"=",
"self",
".",
"RandomMessage_ByType",
"(",
"messageType",
"=",
"self",
".",
"MessageType_StakeOverMaximum",
")",
"# Nachricht in den Chat schreiben\r",
"self",
".",
"chat_WriteTextMessage",
"(",
"messageText",
"=",
"str",
"(",
"messageText",
")",
".",
"format",
"(",
"user",
"=",
"data",
".",
"UserName",
",",
"target",
"=",
"self",
".",
"GameTargetName",
",",
"command",
"=",
"self",
".",
"Settings",
".",
"Game_Command",
",",
"maxpoints",
"=",
"TransformLocale_Decimals",
"(",
"int",
"(",
"self",
".",
"Settings",
".",
"Game_Settings_MaxStake",
")",
")",
",",
"pointsname",
"=",
"self",
".",
"Parent",
".",
"GetCurrencyName",
"(",
")",
")",
")",
"return"
] | [
1278,
4
] | [
1302,
14
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
HeistSystem.Writemessage_GameStartIntroMessage | (self) | return | Schreibt die Intro-Nachricht in den Chat | Schreibt die Intro-Nachricht in den Chat | def Writemessage_GameStartIntroMessage(self):
''' Schreibt die Intro-Nachricht in den Chat '''
thisActionName = "Writemessage_GameStartIntroMessage"
# Benachrichtigung aus der Datenbank auslesen
messageText = self.RandomMessage_ByType(
messageType=self.MessageType_GameStartIntro
)
# Nachricht in den Chat schreiben
self.chat_WriteTextMessage(
messageText=str(messageText).format(
target=self.GameTargetName
)
)
return | [
"def",
"Writemessage_GameStartIntroMessage",
"(",
"self",
")",
":",
"thisActionName",
"=",
"\"Writemessage_GameStartIntroMessage\"",
"# Benachrichtigung aus der Datenbank auslesen\r",
"messageText",
"=",
"self",
".",
"RandomMessage_ByType",
"(",
"messageType",
"=",
"self",
".",
"MessageType_GameStartIntro",
")",
"# Nachricht in den Chat schreiben\r",
"self",
".",
"chat_WriteTextMessage",
"(",
"messageText",
"=",
"str",
"(",
"messageText",
")",
".",
"format",
"(",
"target",
"=",
"self",
".",
"GameTargetName",
")",
")",
"return"
] | [
1161,
4
] | [
1177,
14
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
checkMlc._doLamellenpositioniergenauigkeit | (self, fileData, md ) | return self.pdf.finish(), result | Erstellen der Lamellenpositioniergenauigkeit für die Tests MT_8_02_1_2 und MT_8_02_3
Parameters
----------
fileData : Pandas
md : dotmap
Returns
-------
pdfFilename : str
Name der erzeugten Pdfdatei
result : list
list mit dicts der Testergebnisse
See Also
--------
isp.results : Aufbau von result
| Erstellen der Lamellenpositioniergenauigkeit für die Tests MT_8_02_1_2 und MT_8_02_3 | def _doLamellenpositioniergenauigkeit(self, fileData, md ):
"""Erstellen der Lamellenpositioniergenauigkeit für die Tests MT_8_02_1_2 und MT_8_02_3
Parameters
----------
fileData : Pandas
md : dotmap
Returns
-------
pdfFilename : str
Name der erzeugten Pdfdatei
result : list
list mit dicts der Testergebnisse
See Also
--------
isp.results : Aufbau von result
"""
# used on progress
filesMax=len( fileData )
self.fileCount = 0
# holds evaluation results
result=[]
# Auszuwertende Leaf Nummern
leaf_from = md.options.leafs.get("from", 1)
leaf_to = md.options.leafs.get("to", 60)
leafs = np.arange( leaf_from, leaf_to + 1, 1 )
def evaluate( df_group ):
"""Evaluate grouped Fields.
create PDF output and fills result
Parameters
----------
df_group : pandas Dataframe
"""
# das Datum vom ersten Datensatz verwenden
checkDate = df_group['AcquisitionDateTime'].iloc[0].strftime("%d.%m.%Y")
self.pdf.setContentName( checkDate )
#
# Anleitung
#
self.pdf.textFile( **md.manual )
# array für die chartdaten bereitstellen
fwxm_plot = {}
shift_plot = {}
# für jeden Datensatz
leafData = []
i = 0
for info in df_group.itertuples():
i += 1
# mlc Prüfung aktivieren
check = qa_mlc( self.getFullData( info._asdict() ) )
# leafPair Transmissionen suchen und merken
data = check.FWHM_findLeafs( leafs=leafs, lfd=i, variante=md.current["testTag"] )
# Daten für die Tabelle merken
leafData.append( data )
#Daten für den Boxplot merken
fwxm_plot[i] = data["fwxm.data"]
shift_plot[i] = data["shift.data"]
# LeafError Chart erzeugen
img = check.FWHM_plot_error( data, md["_leafPlot"], plotTitle=md["plotTitle"], leaf_from=leaf_from )
self.pdf.image(img, md["_leafPlot"] )
# progress pro file stimmt nicht immer genau (baseimage)
# 40% für die dicom daten 40% für die Auswertung 20 % für das pdf
self.fileCount += 1
if hasattr( logger, "progress"):
logger.progress( md["testId"], 40 + ( 40 / filesMax * self.fileCount ) )
#
# chart leafError als BoxPlot
#
img = check.FWHM_plot_errorBox( {"fwxm.data": fwxm_plot, "shift.data":shift_plot }, md["_boxPlot"] )
self.pdf.image(img, md["_boxPlot"] )
if len(leafData) < 1:
result.append( self.pdf_error_result(
md, date=checkDate, group_len=len( result ),
msg='<b>Datenfehler</b>: keine Felder gefunden.'
) )
return
# dataFrame erzeugen und passed spalten einfügen
df = pd.DataFrame( leafData )
#
# Abweichung ausrechnen und Passed setzen
#
check = [
{ "field": 'fwxm.min', 'tolerance':'FWXMMean' },
{ "field": 'fwxm.mean', 'tolerance':'FWXMMean' },
{ "field": 'fwxm.max', 'tolerance':'FWXMMean' },
{ "field": 'shift.min', 'tolerance':'ShiftMean' },
{ "field": 'shift.mean', 'tolerance':'ShiftMean' },
{ "field": 'shift.max', 'tolerance':'ShiftMean' }
]
acceptance = self.check_acceptance( df, md, check )
# im Dataframe die Abweichungen nur in einer Spalte für FWXM und Shift anzeigen
#print( df )
df["fwxm.passed"] = df["fwxm.min_passed"] + df["fwxm.mean_passed"] + df["fwxm.max_passed"]
df["shift.passed"] = df["shift.min_passed"] + df["shift.mean_passed"] + df["shift.max_passed"]
#
# Ergebnis in result merken
#
result.append( self.createResult( df, md, check,
df_group['AcquisitionDateTime'].iloc[0].strftime("%Y%m%d"),
len( result ), # bisherige Ergebnisse in result
acceptance
) )
#
# Tabelle erzeugen
#
self.pdf.pandas( df,
attrs={"class":"layout-fill-width", "margin-top": "5mm"},
fields=md["table_fields"]
)
# Formeln anzeigen
text_values = {
"fwxm warning" : md.current.tolerance.FWXMMean.warning.get("f","").replace("{value}", "FWXM mean"),
"fwxm_error": md.current.tolerance.FWXMMean.error.get("f","").replace("{value}", "FWXM mean"),
"shift_warning" : md.current.tolerance.ShiftMean.warning.get("f","").replace("{value}", "Shift mean"),
"shift_error": md.current.tolerance.ShiftMean.error.get("f","").replace("{value}", "Shift mean")
}
text = """
Warnung bei: <b style="position:absolute;left:25mm;">{fwxm warning}</b>
<b style="position:absolute;left:90mm;">{shift_warning}</b><br>
Fehler bei: <b style="position:absolute;left:25mm;">{fwxm_error}</b>
<b style="position:absolute;left:90mm;">{shift_error}</b>
""".format( **text_values )
self.pdf.text( text, replaceNewLine=False, attrs={ "margin-top": "5mm"} )
# Gesamt check - das schlechteste aus beiden tabelle Spalten
#minBoth = df[ [ "fwxm.acceptance", "shift.acceptance" ] ].min(axis=None)
#self.pdf.resultIcon( minBoth.min() )
self.pdf.resultIcon( acceptance )
#
# Gruppiert nach SeriesNumber abarbeiten
#
fileData.sort_values(md["series_sort_values"]).groupby( md["series_groupby"] ).apply( evaluate )
# abschließen pdfdaten und result zurückgeben
return self.pdf.finish(), result | [
"def",
"_doLamellenpositioniergenauigkeit",
"(",
"self",
",",
"fileData",
",",
"md",
")",
":",
"# used on progress",
"filesMax",
"=",
"len",
"(",
"fileData",
")",
"self",
".",
"fileCount",
"=",
"0",
"# holds evaluation results",
"result",
"=",
"[",
"]",
"# Auszuwertende Leaf Nummern",
"leaf_from",
"=",
"md",
".",
"options",
".",
"leafs",
".",
"get",
"(",
"\"from\"",
",",
"1",
")",
"leaf_to",
"=",
"md",
".",
"options",
".",
"leafs",
".",
"get",
"(",
"\"to\"",
",",
"60",
")",
"leafs",
"=",
"np",
".",
"arange",
"(",
"leaf_from",
",",
"leaf_to",
"+",
"1",
",",
"1",
")",
"def",
"evaluate",
"(",
"df_group",
")",
":",
"\"\"\"Evaluate grouped Fields.\n\n create PDF output and fills result\n\n Parameters\n ----------\n df_group : pandas Dataframe\n\n \"\"\"",
"# das Datum vom ersten Datensatz verwenden",
"checkDate",
"=",
"df_group",
"[",
"'AcquisitionDateTime'",
"]",
".",
"iloc",
"[",
"0",
"]",
".",
"strftime",
"(",
"\"%d.%m.%Y\"",
")",
"self",
".",
"pdf",
".",
"setContentName",
"(",
"checkDate",
")",
"#",
"# Anleitung",
"#",
"self",
".",
"pdf",
".",
"textFile",
"(",
"*",
"*",
"md",
".",
"manual",
")",
"# array für die chartdaten bereitstellen",
"fwxm_plot",
"=",
"{",
"}",
"shift_plot",
"=",
"{",
"}",
"# für jeden Datensatz",
"leafData",
"=",
"[",
"]",
"i",
"=",
"0",
"for",
"info",
"in",
"df_group",
".",
"itertuples",
"(",
")",
":",
"i",
"+=",
"1",
"# mlc Prüfung aktivieren",
"check",
"=",
"qa_mlc",
"(",
"self",
".",
"getFullData",
"(",
"info",
".",
"_asdict",
"(",
")",
")",
")",
"# leafPair Transmissionen suchen und merken",
"data",
"=",
"check",
".",
"FWHM_findLeafs",
"(",
"leafs",
"=",
"leafs",
",",
"lfd",
"=",
"i",
",",
"variante",
"=",
"md",
".",
"current",
"[",
"\"testTag\"",
"]",
")",
"# Daten für die Tabelle merken",
"leafData",
".",
"append",
"(",
"data",
")",
"#Daten für den Boxplot merken",
"fwxm_plot",
"[",
"i",
"]",
"=",
"data",
"[",
"\"fwxm.data\"",
"]",
"shift_plot",
"[",
"i",
"]",
"=",
"data",
"[",
"\"shift.data\"",
"]",
"# LeafError Chart erzeugen",
"img",
"=",
"check",
".",
"FWHM_plot_error",
"(",
"data",
",",
"md",
"[",
"\"_leafPlot\"",
"]",
",",
"plotTitle",
"=",
"md",
"[",
"\"plotTitle\"",
"]",
",",
"leaf_from",
"=",
"leaf_from",
")",
"self",
".",
"pdf",
".",
"image",
"(",
"img",
",",
"md",
"[",
"\"_leafPlot\"",
"]",
")",
"# progress pro file stimmt nicht immer genau (baseimage)",
"# 40% für die dicom daten 40% für die Auswertung 20 % für das pdf",
"self",
".",
"fileCount",
"+=",
"1",
"if",
"hasattr",
"(",
"logger",
",",
"\"progress\"",
")",
":",
"logger",
".",
"progress",
"(",
"md",
"[",
"\"testId\"",
"]",
",",
"40",
"+",
"(",
"40",
"/",
"filesMax",
"*",
"self",
".",
"fileCount",
")",
")",
"#",
"# chart leafError als BoxPlot",
"#",
"img",
"=",
"check",
".",
"FWHM_plot_errorBox",
"(",
"{",
"\"fwxm.data\"",
":",
"fwxm_plot",
",",
"\"shift.data\"",
":",
"shift_plot",
"}",
",",
"md",
"[",
"\"_boxPlot\"",
"]",
")",
"self",
".",
"pdf",
".",
"image",
"(",
"img",
",",
"md",
"[",
"\"_boxPlot\"",
"]",
")",
"if",
"len",
"(",
"leafData",
")",
"<",
"1",
":",
"result",
".",
"append",
"(",
"self",
".",
"pdf_error_result",
"(",
"md",
",",
"date",
"=",
"checkDate",
",",
"group_len",
"=",
"len",
"(",
"result",
")",
",",
"msg",
"=",
"'<b>Datenfehler</b>: keine Felder gefunden.'",
")",
")",
"return",
"# dataFrame erzeugen und passed spalten einfügen",
"df",
"=",
"pd",
".",
"DataFrame",
"(",
"leafData",
")",
"#",
"# Abweichung ausrechnen und Passed setzen",
"#",
"check",
"=",
"[",
"{",
"\"field\"",
":",
"'fwxm.min'",
",",
"'tolerance'",
":",
"'FWXMMean'",
"}",
",",
"{",
"\"field\"",
":",
"'fwxm.mean'",
",",
"'tolerance'",
":",
"'FWXMMean'",
"}",
",",
"{",
"\"field\"",
":",
"'fwxm.max'",
",",
"'tolerance'",
":",
"'FWXMMean'",
"}",
",",
"{",
"\"field\"",
":",
"'shift.min'",
",",
"'tolerance'",
":",
"'ShiftMean'",
"}",
",",
"{",
"\"field\"",
":",
"'shift.mean'",
",",
"'tolerance'",
":",
"'ShiftMean'",
"}",
",",
"{",
"\"field\"",
":",
"'shift.max'",
",",
"'tolerance'",
":",
"'ShiftMean'",
"}",
"]",
"acceptance",
"=",
"self",
".",
"check_acceptance",
"(",
"df",
",",
"md",
",",
"check",
")",
"# im Dataframe die Abweichungen nur in einer Spalte für FWXM und Shift anzeigen",
"#print( df )",
"df",
"[",
"\"fwxm.passed\"",
"]",
"=",
"df",
"[",
"\"fwxm.min_passed\"",
"]",
"+",
"df",
"[",
"\"fwxm.mean_passed\"",
"]",
"+",
"df",
"[",
"\"fwxm.max_passed\"",
"]",
"df",
"[",
"\"shift.passed\"",
"]",
"=",
"df",
"[",
"\"shift.min_passed\"",
"]",
"+",
"df",
"[",
"\"shift.mean_passed\"",
"]",
"+",
"df",
"[",
"\"shift.max_passed\"",
"]",
"#",
"# Ergebnis in result merken",
"#",
"result",
".",
"append",
"(",
"self",
".",
"createResult",
"(",
"df",
",",
"md",
",",
"check",
",",
"df_group",
"[",
"'AcquisitionDateTime'",
"]",
".",
"iloc",
"[",
"0",
"]",
".",
"strftime",
"(",
"\"%Y%m%d\"",
")",
",",
"len",
"(",
"result",
")",
",",
"# bisherige Ergebnisse in result",
"acceptance",
")",
")",
"#",
"# Tabelle erzeugen",
"#",
"self",
".",
"pdf",
".",
"pandas",
"(",
"df",
",",
"attrs",
"=",
"{",
"\"class\"",
":",
"\"layout-fill-width\"",
",",
"\"margin-top\"",
":",
"\"5mm\"",
"}",
",",
"fields",
"=",
"md",
"[",
"\"table_fields\"",
"]",
")",
"# Formeln anzeigen",
"text_values",
"=",
"{",
"\"fwxm warning\"",
":",
"md",
".",
"current",
".",
"tolerance",
".",
"FWXMMean",
".",
"warning",
".",
"get",
"(",
"\"f\"",
",",
"\"\"",
")",
".",
"replace",
"(",
"\"{value}\"",
",",
"\"FWXM mean\"",
")",
",",
"\"fwxm_error\"",
":",
"md",
".",
"current",
".",
"tolerance",
".",
"FWXMMean",
".",
"error",
".",
"get",
"(",
"\"f\"",
",",
"\"\"",
")",
".",
"replace",
"(",
"\"{value}\"",
",",
"\"FWXM mean\"",
")",
",",
"\"shift_warning\"",
":",
"md",
".",
"current",
".",
"tolerance",
".",
"ShiftMean",
".",
"warning",
".",
"get",
"(",
"\"f\"",
",",
"\"\"",
")",
".",
"replace",
"(",
"\"{value}\"",
",",
"\"Shift mean\"",
")",
",",
"\"shift_error\"",
":",
"md",
".",
"current",
".",
"tolerance",
".",
"ShiftMean",
".",
"error",
".",
"get",
"(",
"\"f\"",
",",
"\"\"",
")",
".",
"replace",
"(",
"\"{value}\"",
",",
"\"Shift mean\"",
")",
"}",
"text",
"=",
"\"\"\"\n Warnung bei: <b style=\"position:absolute;left:25mm;\">{fwxm warning}</b>\n <b style=\"position:absolute;left:90mm;\">{shift_warning}</b><br>\n Fehler bei: <b style=\"position:absolute;left:25mm;\">{fwxm_error}</b>\n <b style=\"position:absolute;left:90mm;\">{shift_error}</b>\n \"\"\"",
".",
"format",
"(",
"*",
"*",
"text_values",
")",
"self",
".",
"pdf",
".",
"text",
"(",
"text",
",",
"replaceNewLine",
"=",
"False",
",",
"attrs",
"=",
"{",
"\"margin-top\"",
":",
"\"5mm\"",
"}",
")",
"# Gesamt check - das schlechteste aus beiden tabelle Spalten",
"#minBoth = df[ [ \"fwxm.acceptance\", \"shift.acceptance\" ] ].min(axis=None)",
"#self.pdf.resultIcon( minBoth.min() )",
"self",
".",
"pdf",
".",
"resultIcon",
"(",
"acceptance",
")",
"#",
"# Gruppiert nach SeriesNumber abarbeiten",
"#",
"fileData",
".",
"sort_values",
"(",
"md",
"[",
"\"series_sort_values\"",
"]",
")",
".",
"groupby",
"(",
"md",
"[",
"\"series_groupby\"",
"]",
")",
".",
"apply",
"(",
"evaluate",
")",
"# abschließen pdfdaten und result zurückgeben",
"return",
"self",
".",
"pdf",
".",
"finish",
"(",
")",
",",
"result"
] | [
1993,
4
] | [
2153,
40
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
ispConfig.update | (self, config:dict={} ) | return self | Führt ein update wie bei dict.update aber mit dict_merge aus.
Parameters
----------
config : dict
In die config zu mischendes dict.
Returns
-------
self
| Führt ein update wie bei dict.update aber mit dict_merge aus. | def update(self, config:dict={} ):
"""Führt ein update wie bei dict.update aber mit dict_merge aus.
Parameters
----------
config : dict
In die config zu mischendes dict.
Returns
-------
self
"""
self._config = dict_merge(self._config, DotMap( config ) )
return self | [
"def",
"update",
"(",
"self",
",",
"config",
":",
"dict",
"=",
"{",
"}",
")",
":",
"self",
".",
"_config",
"=",
"dict_merge",
"(",
"self",
".",
"_config",
",",
"DotMap",
"(",
"config",
")",
")",
"return",
"self"
] | [
229,
4
] | [
243,
19
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
calculate_mode | (values: dict, data: StepData) | Bestimmt den am häufigsten in einem Array vorkommenden Wert.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
| Bestimmt den am häufigsten in einem Array vorkommenden Wert. | def calculate_mode(values: dict, data: StepData):
"""Bestimmt den am häufigsten in einem Array vorkommenden Wert.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
"""
for idx, key in data.loop_key(values["keys"], values):
value = data.get_data(key, values)
new_key = get_new_keys(values, idx)
new_value = collections.Counter(value).most_common()[0][0]
data.insert_data(new_key, new_value, values) | [
"def",
"calculate_mode",
"(",
"values",
":",
"dict",
",",
"data",
":",
"StepData",
")",
":",
"for",
"idx",
",",
"key",
"in",
"data",
".",
"loop_key",
"(",
"values",
"[",
"\"keys\"",
"]",
",",
"values",
")",
":",
"value",
"=",
"data",
".",
"get_data",
"(",
"key",
",",
"values",
")",
"new_key",
"=",
"get_new_keys",
"(",
"values",
",",
"idx",
")",
"new_value",
"=",
"collections",
".",
"Counter",
"(",
"value",
")",
".",
"most_common",
"(",
")",
"[",
"0",
"]",
"[",
"0",
"]",
"data",
".",
"insert_data",
"(",
"new_key",
",",
"new_value",
",",
"values",
")"
] | [
141,
0
] | [
151,
52
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
ispBase.getIcon | (self, acceptance) | return icon | Gibt das Icon das der acceptance Angabe entspricht
Parameters
----------
acceptance : int
1-5 oder 999
Returns
-------
icon : str
htmlcode des Icon
| Gibt das Icon das der acceptance Angabe entspricht | def getIcon(self, acceptance):
"""Gibt das Icon das der acceptance Angabe entspricht
Parameters
----------
acceptance : int
1-5 oder 999
Returns
-------
icon : str
htmlcode des Icon
"""
try:
icon = self.icons[ int(acceptance) ]
acceptance = int(acceptance)
except: # pragma: no cover
icon = self.icons[ 999 ]
return icon | [
"def",
"getIcon",
"(",
"self",
",",
"acceptance",
")",
":",
"try",
":",
"icon",
"=",
"self",
".",
"icons",
"[",
"int",
"(",
"acceptance",
")",
"]",
"acceptance",
"=",
"int",
"(",
"acceptance",
")",
"except",
":",
"# pragma: no cover",
"icon",
"=",
"self",
".",
"icons",
"[",
"999",
"]",
"return",
"icon"
] | [
635,
4
] | [
656,
19
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
qa_mlc.picketfence_results | (self) | return {
'filename': self.infos["filename"],
'Kennung': self._kennung.format( **self.infos ),
'unit': self.infos['unit'],
'energy': self.infos['energy'],
'gantry' : self.infos['gantry'],
'collimator': self.infos['collimator'],
'checkPositions': self.pickets,
"offsets" : offsets,
"pass_pct" : pass_pct,
"abs_median_error": self.abs_median_error,
"abs_mean_error": self.abs_mean_error,
"mean_spacing" : self.pickets.mean_spacing,
"max_error": self.max_error,
"max_error_picket" : self.max_error_picket,
"max_error_leaf" :self.max_error_leaf,
"passed": self.passed,
"passed_action" : self.passed_action,
"max_from_mean" : max_from_mean_error,
"max_from_mean_leaf" : max_from_mean_leaf,
} | Gibt die Ergebnisse der Picketfence Auswertung als dict
Verwendet dabei zusätzlich eine Auswertung über error_hist ( daten für subplot )
| Gibt die Ergebnisse der Picketfence Auswertung als dict
Verwendet dabei zusätzlich eine Auswertung über error_hist ( daten für subplot )
| def picketfence_results(self):
"""Gibt die Ergebnisse der Picketfence Auswertung als dict
Verwendet dabei zusätzlich eine Auswertung über error_hist ( daten für subplot )
"""
pass_pct = self.percent_passing
offsets = ' '.join('{:.1f}'.format(pk.dist2cax) for pk in self.pickets)
# mean statt np.median(np.abs(self.error_array))
self.abs_mean_error = np.mean(np.hstack([picket.error_array for picket in self.pickets]))
# max von mean bestimmen
error_plot_positions, error_means, error_stds, mlc_leaves = self.pickets.error_hist()
# pandas serie max und position bestimmen
pd_error_means = pd.Series( error_means)
max_from_mean_error = pd_error_means.max()
# FIXME: image muss up/down getauscht werden, deshalb auch die MLC Nummern ändern
max_from_mean_leaf = mlc_leaves[ pd_error_means.idxmax() ][1] - 60
return {
'filename': self.infos["filename"],
'Kennung': self._kennung.format( **self.infos ),
'unit': self.infos['unit'],
'energy': self.infos['energy'],
'gantry' : self.infos['gantry'],
'collimator': self.infos['collimator'],
'checkPositions': self.pickets,
"offsets" : offsets,
"pass_pct" : pass_pct,
"abs_median_error": self.abs_median_error,
"abs_mean_error": self.abs_mean_error,
"mean_spacing" : self.pickets.mean_spacing,
"max_error": self.max_error,
"max_error_picket" : self.max_error_picket,
"max_error_leaf" :self.max_error_leaf,
"passed": self.passed,
"passed_action" : self.passed_action,
"max_from_mean" : max_from_mean_error,
"max_from_mean_leaf" : max_from_mean_leaf,
}
"""
string = f"Picket Fence Results: \n{pass_pct:2.1f}% " \
f"Passed\nMedian Error: {self.abs_median_error:2.3f}mm \n" \
f"Mean picket spacing: {self.pickets.mean_spacing:2.1f}mm \n" \
f"Picket offsets from CAX (mm): {offsets}\n" \
f"Max Error: {self.max_error:2.3f}mm on Picket: {self.max_error_picket}, Leaf: {self.max_error_leaf}"
""" | [
"def",
"picketfence_results",
"(",
"self",
")",
":",
"pass_pct",
"=",
"self",
".",
"percent_passing",
"offsets",
"=",
"' '",
".",
"join",
"(",
"'{:.1f}'",
".",
"format",
"(",
"pk",
".",
"dist2cax",
")",
"for",
"pk",
"in",
"self",
".",
"pickets",
")",
"# mean statt np.median(np.abs(self.error_array))",
"self",
".",
"abs_mean_error",
"=",
"np",
".",
"mean",
"(",
"np",
".",
"hstack",
"(",
"[",
"picket",
".",
"error_array",
"for",
"picket",
"in",
"self",
".",
"pickets",
"]",
")",
")",
"# max von mean bestimmen",
"error_plot_positions",
",",
"error_means",
",",
"error_stds",
",",
"mlc_leaves",
"=",
"self",
".",
"pickets",
".",
"error_hist",
"(",
")",
"# pandas serie max und position bestimmen",
"pd_error_means",
"=",
"pd",
".",
"Series",
"(",
"error_means",
")",
"max_from_mean_error",
"=",
"pd_error_means",
".",
"max",
"(",
")",
"# FIXME: image muss up/down getauscht werden, deshalb auch die MLC Nummern ändern",
"max_from_mean_leaf",
"=",
"mlc_leaves",
"[",
"pd_error_means",
".",
"idxmax",
"(",
")",
"]",
"[",
"1",
"]",
"-",
"60",
"return",
"{",
"'filename'",
":",
"self",
".",
"infos",
"[",
"\"filename\"",
"]",
",",
"'Kennung'",
":",
"self",
".",
"_kennung",
".",
"format",
"(",
"*",
"*",
"self",
".",
"infos",
")",
",",
"'unit'",
":",
"self",
".",
"infos",
"[",
"'unit'",
"]",
",",
"'energy'",
":",
"self",
".",
"infos",
"[",
"'energy'",
"]",
",",
"'gantry'",
":",
"self",
".",
"infos",
"[",
"'gantry'",
"]",
",",
"'collimator'",
":",
"self",
".",
"infos",
"[",
"'collimator'",
"]",
",",
"'checkPositions'",
":",
"self",
".",
"pickets",
",",
"\"offsets\"",
":",
"offsets",
",",
"\"pass_pct\"",
":",
"pass_pct",
",",
"\"abs_median_error\"",
":",
"self",
".",
"abs_median_error",
",",
"\"abs_mean_error\"",
":",
"self",
".",
"abs_mean_error",
",",
"\"mean_spacing\"",
":",
"self",
".",
"pickets",
".",
"mean_spacing",
",",
"\"max_error\"",
":",
"self",
".",
"max_error",
",",
"\"max_error_picket\"",
":",
"self",
".",
"max_error_picket",
",",
"\"max_error_leaf\"",
":",
"self",
".",
"max_error_leaf",
",",
"\"passed\"",
":",
"self",
".",
"passed",
",",
"\"passed_action\"",
":",
"self",
".",
"passed_action",
",",
"\"max_from_mean\"",
":",
"max_from_mean_error",
",",
"\"max_from_mean_leaf\"",
":",
"max_from_mean_leaf",
",",
"}",
"\"\"\"\n string = f\"Picket Fence Results: \\n{pass_pct:2.1f}% \" \\\n f\"Passed\\nMedian Error: {self.abs_median_error:2.3f}mm \\n\" \\\n f\"Mean picket spacing: {self.pickets.mean_spacing:2.1f}mm \\n\" \\\n f\"Picket offsets from CAX (mm): {offsets}\\n\" \\\n f\"Max Error: {self.max_error:2.3f}mm on Picket: {self.max_error_picket}, Leaf: {self.max_error_leaf}\"\n \"\"\""
] | [
644,
4
] | [
689,
11
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
IntelHexParser.__repr__ | (self) | return ''.join(buffer) | Gibt die geladene IntelHex Datei aus | Gibt die geladene IntelHex Datei aus | def __repr__(self):
""" Gibt die geladene IntelHex Datei aus """
buffer = []
if self.segments:
for segment in self.segments:
buffer.append("Startadresse: %s, Daten:\n" % segment.address)
counter = 0
for value in segment:
buffer.append("%02x " % ord(value))
counter += 1
if counter >= 26:
counter = 0
buffer.append("\n")
buffer.append("\n\n")
buffer.pop()
return ''.join(buffer) | [
"def",
"__repr__",
"(",
"self",
")",
":",
"buffer",
"=",
"[",
"]",
"if",
"self",
".",
"segments",
":",
"for",
"segment",
"in",
"self",
".",
"segments",
":",
"buffer",
".",
"append",
"(",
"\"Startadresse: %s, Daten:\\n\"",
"%",
"segment",
".",
"address",
")",
"counter",
"=",
"0",
"for",
"value",
"in",
"segment",
":",
"buffer",
".",
"append",
"(",
"\"%02x \"",
"%",
"ord",
"(",
"value",
")",
")",
"counter",
"+=",
"1",
"if",
"counter",
">=",
"26",
":",
"counter",
"=",
"0",
"buffer",
".",
"append",
"(",
"\"\\n\"",
")",
"buffer",
".",
"append",
"(",
"\"\\n\\n\"",
")",
"buffer",
".",
"pop",
"(",
")",
"return",
"''",
".",
"join",
"(",
"buffer",
")"
] | [
110,
4
] | [
128,
30
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
fetch | (values: dict, data: StepData, save_key) | Abfrage einer API und Umwandlung der API-Antwort in ein angegebenes Format.
:param req_data: Dictionary, das alle Informationen für den request enthält.
:return: Antwort der API im angegebenen Format
| Abfrage einer API und Umwandlung der API-Antwort in ein angegebenes Format. | def fetch(values: dict, data: StepData, save_key):
"""Abfrage einer API und Umwandlung der API-Antwort in ein angegebenes Format.
:param req_data: Dictionary, das alle Informationen für den request enthält.
:return: Antwort der API im angegebenen Format
"""
# Build http request
req_data = _create_query(values, data)
req = requests.Request(req_data["method"], req_data["url"], headers=req_data["headers"],
json=req_data.get("json", None),
data=req_data.get("other", None), params=req_data["params"])
# Make the http request
s = requests.session()
response = s.send(req.prepare())
if not response.ok:
raise APiRequestError(response)
# Get the right return format
if req_data["res_format"].__eq__("json"):
res = response.json()
elif req_data["res_format"].__eq__("text"):
res = response.text
elif req_data["res_format"].__eq__("xml"):
res = xmltodict.parse(response.text, **req_data["xml_config"])
else:
res = response.content
if req_data["include_headers"]:
res = {"headers": response.headers, "content": res}
data.insert_data(save_key, res, values) | [
"def",
"fetch",
"(",
"values",
":",
"dict",
",",
"data",
":",
"StepData",
",",
"save_key",
")",
":",
"# Build http request",
"req_data",
"=",
"_create_query",
"(",
"values",
",",
"data",
")",
"req",
"=",
"requests",
".",
"Request",
"(",
"req_data",
"[",
"\"method\"",
"]",
",",
"req_data",
"[",
"\"url\"",
"]",
",",
"headers",
"=",
"req_data",
"[",
"\"headers\"",
"]",
",",
"json",
"=",
"req_data",
".",
"get",
"(",
"\"json\"",
",",
"None",
")",
",",
"data",
"=",
"req_data",
".",
"get",
"(",
"\"other\"",
",",
"None",
")",
",",
"params",
"=",
"req_data",
"[",
"\"params\"",
"]",
")",
"# Make the http request",
"s",
"=",
"requests",
".",
"session",
"(",
")",
"response",
"=",
"s",
".",
"send",
"(",
"req",
".",
"prepare",
"(",
")",
")",
"if",
"not",
"response",
".",
"ok",
":",
"raise",
"APiRequestError",
"(",
"response",
")",
"# Get the right return format",
"if",
"req_data",
"[",
"\"res_format\"",
"]",
".",
"__eq__",
"(",
"\"json\"",
")",
":",
"res",
"=",
"response",
".",
"json",
"(",
")",
"elif",
"req_data",
"[",
"\"res_format\"",
"]",
".",
"__eq__",
"(",
"\"text\"",
")",
":",
"res",
"=",
"response",
".",
"text",
"elif",
"req_data",
"[",
"\"res_format\"",
"]",
".",
"__eq__",
"(",
"\"xml\"",
")",
":",
"res",
"=",
"xmltodict",
".",
"parse",
"(",
"response",
".",
"text",
",",
"*",
"*",
"req_data",
"[",
"\"xml_config\"",
"]",
")",
"else",
":",
"res",
"=",
"response",
".",
"content",
"if",
"req_data",
"[",
"\"include_headers\"",
"]",
":",
"res",
"=",
"{",
"\"headers\"",
":",
"response",
".",
"headers",
",",
"\"content\"",
":",
"res",
"}",
"data",
".",
"insert_data",
"(",
"save_key",
",",
"res",
",",
"values",
")"
] | [
168,
0
] | [
200,
43
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
get_specific_memory_path | (job_name: str, name: str, number: int, skip: bool) | return get_resource_path(os.path.join(MEMORY_LOCATION, job_name, name, files[number])) | Erstellt einen absoluten Pfad zu der Memory-Datei im übergebenen Ordner.
:param job_name: Name des Jobs von der die Funktion aufgerufen wurde.
:param name: Name des Dictionaries, das exportiert wurde.
:param number: Angabe, welche Datei ausgewählt werden soll (0= zuletzt erstellt, 1 = als vorletztes erstellt etc.)
| Erstellt einen absoluten Pfad zu der Memory-Datei im übergebenen Ordner. | def get_specific_memory_path(job_name: str, name: str, number: int, skip: bool):
"""Erstellt einen absoluten Pfad zu der Memory-Datei im übergebenen Ordner.
:param job_name: Name des Jobs von der die Funktion aufgerufen wurde.
:param name: Name des Dictionaries, das exportiert wurde.
:param number: Angabe, welche Datei ausgewählt werden soll (0= zuletzt erstellt, 1 = als vorletztes erstellt etc.)
"""
files = os.listdir(get_resource_path(os.path.join(MEMORY_LOCATION, job_name, name)))
files.sort(reverse=True)
if skip:
now = datetime.now()
if files[0] == now.strftime("%Y-%m-%d.json"):
number += 1
return get_resource_path(os.path.join(MEMORY_LOCATION, job_name, name, files[number])) | [
"def",
"get_specific_memory_path",
"(",
"job_name",
":",
"str",
",",
"name",
":",
"str",
",",
"number",
":",
"int",
",",
"skip",
":",
"bool",
")",
":",
"files",
"=",
"os",
".",
"listdir",
"(",
"get_resource_path",
"(",
"os",
".",
"path",
".",
"join",
"(",
"MEMORY_LOCATION",
",",
"job_name",
",",
"name",
")",
")",
")",
"files",
".",
"sort",
"(",
"reverse",
"=",
"True",
")",
"if",
"skip",
":",
"now",
"=",
"datetime",
".",
"now",
"(",
")",
"if",
"files",
"[",
"0",
"]",
"==",
"now",
".",
"strftime",
"(",
"\"%Y-%m-%d.json\"",
")",
":",
"number",
"+=",
"1",
"return",
"get_resource_path",
"(",
"os",
".",
"path",
".",
"join",
"(",
"MEMORY_LOCATION",
",",
"job_name",
",",
"name",
",",
"files",
"[",
"number",
"]",
")",
")"
] | [
187,
0
] | [
200,
90
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
Line.intersect_arc | (self, arc, rtol=1e-03, atol=1e-03, include_end=False) | return arc.intersect_line(self, rtol, atol, include_end) | Von einem Line-Objekt und einem Arc-Objekt werden die
Schnittpunkte bestimmt und in einer Liste ausgegeben.
| Von einem Line-Objekt und einem Arc-Objekt werden die
Schnittpunkte bestimmt und in einer Liste ausgegeben.
| def intersect_arc(self, arc, rtol=1e-03, atol=1e-03, include_end=False):
""" Von einem Line-Objekt und einem Arc-Objekt werden die
Schnittpunkte bestimmt und in einer Liste ausgegeben.
"""
return arc.intersect_line(self, rtol, atol, include_end) | [
"def",
"intersect_arc",
"(",
"self",
",",
"arc",
",",
"rtol",
"=",
"1e-03",
",",
"atol",
"=",
"1e-03",
",",
"include_end",
"=",
"False",
")",
":",
"return",
"arc",
".",
"intersect_line",
"(",
"self",
",",
"rtol",
",",
"atol",
",",
"include_end",
")"
] | [
1065,
4
] | [
1069,
64
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
RQLQuery.rql_parse | (self, rql, limit=None) | return self._rql_where_clause | Wie rql, es wird aber nur ausgewertet und nicht die query geändert.
Parameters
----------
rql : string
rql query string.
limit : int, optional
Limit Angabe, wird hier aber nicht verwendet. The default is None.
Raises
------
NotImplementedError
Returns
-------
_rql_where_clause
| Wie rql, es wird aber nur ausgewertet und nicht die query geändert. | def rql_parse(self, rql, limit=None):
"""Wie rql, es wird aber nur ausgewertet und nicht die query geändert.
Parameters
----------
rql : string
rql query string.
limit : int, optional
Limit Angabe, wird hier aber nicht verwendet. The default is None.
Raises
------
NotImplementedError
Returns
-------
_rql_where_clause
"""
if len(self._entities) > 1: # pragma: no cover
raise NotImplementedError("Query must have a single entity")
expr = rql
if not expr:
self.rql_parsed = None
self.rql_expr = ""
else:
self.rql_expr = expr
try:
self.rql_parsed = parse(expr)
except RQLSyntaxError as exc:
raise self._rql_error_cls("RQL Syntax error: %r" % (exc.args,))
self._rql_select_clause = []
self._rql_values_clause = None
self._rql_scalar_clause = None
self._rql_where_clause = None
self._rql_order_by_clause = None
self._rql_limit_clause = None
self._rql_offset_clause = None
self._rql_one_clause = None
self._rql_distinct_clause = None
self._rql_group_by_clause = None
self._rql_joins = []
# rql auswerten
self._rql_walk(self.rql_parsed)
return self._rql_where_clause | [
"def",
"rql_parse",
"(",
"self",
",",
"rql",
",",
"limit",
"=",
"None",
")",
":",
"if",
"len",
"(",
"self",
".",
"_entities",
")",
">",
"1",
":",
"# pragma: no cover",
"raise",
"NotImplementedError",
"(",
"\"Query must have a single entity\"",
")",
"expr",
"=",
"rql",
"if",
"not",
"expr",
":",
"self",
".",
"rql_parsed",
"=",
"None",
"self",
".",
"rql_expr",
"=",
"\"\"",
"else",
":",
"self",
".",
"rql_expr",
"=",
"expr",
"try",
":",
"self",
".",
"rql_parsed",
"=",
"parse",
"(",
"expr",
")",
"except",
"RQLSyntaxError",
"as",
"exc",
":",
"raise",
"self",
".",
"_rql_error_cls",
"(",
"\"RQL Syntax error: %r\"",
"%",
"(",
"exc",
".",
"args",
",",
")",
")",
"self",
".",
"_rql_select_clause",
"=",
"[",
"]",
"self",
".",
"_rql_values_clause",
"=",
"None",
"self",
".",
"_rql_scalar_clause",
"=",
"None",
"self",
".",
"_rql_where_clause",
"=",
"None",
"self",
".",
"_rql_order_by_clause",
"=",
"None",
"self",
".",
"_rql_limit_clause",
"=",
"None",
"self",
".",
"_rql_offset_clause",
"=",
"None",
"self",
".",
"_rql_one_clause",
"=",
"None",
"self",
".",
"_rql_distinct_clause",
"=",
"None",
"self",
".",
"_rql_group_by_clause",
"=",
"None",
"self",
".",
"_rql_joins",
"=",
"[",
"]",
"# rql auswerten",
"self",
".",
"_rql_walk",
"(",
"self",
".",
"rql_parsed",
")",
"return",
"self",
".",
"_rql_where_clause"
] | [
204,
4
] | [
257,
37
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
RawData.__init__ | ( self, scriptname, settings, parent, logger, logfilepath = False, datafilespath = False, cooldown = False, counterdb = False, supressMessage = False ) | return | Initialisierung | Initialisierung | def __init__( self, scriptname, settings, parent, logger, logfilepath = False, datafilespath = False, cooldown = False, counterdb = False, supressMessage = False ):
''' Initialisierung '''
thisActionName = "__init__"
# Übernahme der Variablen für ganze Klasse
self.ClassName = "RawData"
self.Version = "2.0.3"
self.ScriptName = scriptname
self.LogFilesPath = logfilepath
self.DataFilesPath = datafilespath
self.Parent = parent
self.Settings = settings
self.Logger = logger
self.CoolDown = cooldown
self.CounterDB = counterdb
self.DataFilesPrefix = "GIVEAWAY_"
self.RawLogFile = ""
self.SubscriberLogFile = ""
self.LatestSubscriberFile = ""
self.LatestCheerFile = ""
self.CurrentSubgoalCounterFile = ""
self.GiveawayCounterFile = ""
# Sub Informationen
self.SubTypeList = list( ( "subgift", "sub", "resub" ) )
self.SubPlanList = list( ( "Prime", "1000", "2000", "3000" ) )
self.LastSubGifterUserName = ""
self.LastSubGifterCounter = int(0)
# Regex-Compile
self.reUserName = re.compile(r"^[a-z0-9][a-z0-9_]{3,24}$")
self.rePRIVMSG = re.compile(r"(@(?P<irctags>[^\ ]*)=([^;]*)(?:;|$)?\.tmi\.twitch\.tv\ PRIVMSG)")
self.reUSERNOTICE = re.compile(r"(?:^(?:@(?P<irctags>[^\ ]*)\ )?:tmi\.twitch\.tv\ USERNOTICE)")
self.reCheckArgument = re.compile(r"^(?:(?P<t1>\d{1,3})\|(?P<t2>\d{1,3})\|(?P<t3>\d{1,3}))$")
self.reJOIN = re.compile(r"(@(?P<name>[^;]*)(?:;|$)?\.tmi\.twitch\.tv\ JOIN)")
self.reWHISPER = re.compile(r"(@(?P<name>[^;]*)(?:;|$)?\.tmi\.twitch\.tv\ WHISPER)")
# Log Verzeichniss erstellen
if self.LogFilesPath:
if not os.path.exists(self.LogFilesPath):
os.makedirs(self.LogFilesPath)
# Log Files definieren
self.RawLogFile = os.path.join( self.LogFilesPath, str(myTime.FileTimeStamp()) + "_GiveawayExtention_RawData.txt" )
self.SubscriberLogFile = os.path.join( self.LogFilesPath, str(myTime.FileTimeStamp()) + "_GiveawayExtention_SubscriberData.txt" )
self.SubGifterLogFile = os.path.join( self.LogFilesPath, str(myTime.FileTimeStamp()) + "_GiveawayExtention_SubGifterData.txt" )
self.CheerLogFile = os.path.join( self.LogFilesPath, str(myTime.FileTimeStamp()) + "_GiveawayExtention_CheerData.txt" )
# Text-Data Verzeichniss erstellen
if self.DataFilesPath:
if not os.path.exists(self.DataFilesPath):
os.makedirs(self.DataFilesPath)
# Daten-Files definieren
self.LatestSubscriberFile = os.path.join( self.DataFilesPath, self.DataFilesPrefix + "LATESTSUBSCRIBER.txt")
self.LatestSubsGifterFile = os.path.join( self.DataFilesPath, self.DataFilesPrefix + "LATESTSUBGIFTER.txt")
self.LatestCheerFile = os.path.join( self.DataFilesPath, self.DataFilesPrefix + "LATESTCHEER.txt")
self.CurrentSubgoalCounterFile = os.path.join( self.DataFilesPath, self.DataFilesPrefix + "COUNTER_CURRENTSUBGOAL.txt" )
self.GiveawayCounterFile = os.path.join( self.DataFilesPath, self.DataFilesPrefix + "COUNTER_GIVEAWAYS.txt" )
# Nur ins Log schreiben, wenn nicht unterdrückt
if not supressMessage:
# Meldung ins Log schreiben
self.Logger.WriteLog(" - 'RawData'")
return | [
"def",
"__init__",
"(",
"self",
",",
"scriptname",
",",
"settings",
",",
"parent",
",",
"logger",
",",
"logfilepath",
"=",
"False",
",",
"datafilespath",
"=",
"False",
",",
"cooldown",
"=",
"False",
",",
"counterdb",
"=",
"False",
",",
"supressMessage",
"=",
"False",
")",
":",
"thisActionName",
"=",
"\"__init__\"",
"# Übernahme der Variablen für ganze Klasse\r",
"self",
".",
"ClassName",
"=",
"\"RawData\"",
"self",
".",
"Version",
"=",
"\"2.0.3\"",
"self",
".",
"ScriptName",
"=",
"scriptname",
"self",
".",
"LogFilesPath",
"=",
"logfilepath",
"self",
".",
"DataFilesPath",
"=",
"datafilespath",
"self",
".",
"Parent",
"=",
"parent",
"self",
".",
"Settings",
"=",
"settings",
"self",
".",
"Logger",
"=",
"logger",
"self",
".",
"CoolDown",
"=",
"cooldown",
"self",
".",
"CounterDB",
"=",
"counterdb",
"self",
".",
"DataFilesPrefix",
"=",
"\"GIVEAWAY_\"",
"self",
".",
"RawLogFile",
"=",
"\"\"",
"self",
".",
"SubscriberLogFile",
"=",
"\"\"",
"self",
".",
"LatestSubscriberFile",
"=",
"\"\"",
"self",
".",
"LatestCheerFile",
"=",
"\"\"",
"self",
".",
"CurrentSubgoalCounterFile",
"=",
"\"\"",
"self",
".",
"GiveawayCounterFile",
"=",
"\"\"",
"# Sub Informationen\r",
"self",
".",
"SubTypeList",
"=",
"list",
"(",
"(",
"\"subgift\"",
",",
"\"sub\"",
",",
"\"resub\"",
")",
")",
"self",
".",
"SubPlanList",
"=",
"list",
"(",
"(",
"\"Prime\"",
",",
"\"1000\"",
",",
"\"2000\"",
",",
"\"3000\"",
")",
")",
"self",
".",
"LastSubGifterUserName",
"=",
"\"\"",
"self",
".",
"LastSubGifterCounter",
"=",
"int",
"(",
"0",
")",
"# Regex-Compile\r",
"self",
".",
"reUserName",
"=",
"re",
".",
"compile",
"(",
"r\"^[a-z0-9][a-z0-9_]{3,24}$\"",
")",
"self",
".",
"rePRIVMSG",
"=",
"re",
".",
"compile",
"(",
"r\"(@(?P<irctags>[^\\ ]*)=([^;]*)(?:;|$)?\\.tmi\\.twitch\\.tv\\ PRIVMSG)\"",
")",
"self",
".",
"reUSERNOTICE",
"=",
"re",
".",
"compile",
"(",
"r\"(?:^(?:@(?P<irctags>[^\\ ]*)\\ )?:tmi\\.twitch\\.tv\\ USERNOTICE)\"",
")",
"self",
".",
"reCheckArgument",
"=",
"re",
".",
"compile",
"(",
"r\"^(?:(?P<t1>\\d{1,3})\\|(?P<t2>\\d{1,3})\\|(?P<t3>\\d{1,3}))$\"",
")",
"self",
".",
"reJOIN",
"=",
"re",
".",
"compile",
"(",
"r\"(@(?P<name>[^;]*)(?:;|$)?\\.tmi\\.twitch\\.tv\\ JOIN)\"",
")",
"self",
".",
"reWHISPER",
"=",
"re",
".",
"compile",
"(",
"r\"(@(?P<name>[^;]*)(?:;|$)?\\.tmi\\.twitch\\.tv\\ WHISPER)\"",
")",
"# Log Verzeichniss erstellen\r",
"if",
"self",
".",
"LogFilesPath",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"LogFilesPath",
")",
":",
"os",
".",
"makedirs",
"(",
"self",
".",
"LogFilesPath",
")",
"# Log Files definieren\r",
"self",
".",
"RawLogFile",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"LogFilesPath",
",",
"str",
"(",
"myTime",
".",
"FileTimeStamp",
"(",
")",
")",
"+",
"\"_GiveawayExtention_RawData.txt\"",
")",
"self",
".",
"SubscriberLogFile",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"LogFilesPath",
",",
"str",
"(",
"myTime",
".",
"FileTimeStamp",
"(",
")",
")",
"+",
"\"_GiveawayExtention_SubscriberData.txt\"",
")",
"self",
".",
"SubGifterLogFile",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"LogFilesPath",
",",
"str",
"(",
"myTime",
".",
"FileTimeStamp",
"(",
")",
")",
"+",
"\"_GiveawayExtention_SubGifterData.txt\"",
")",
"self",
".",
"CheerLogFile",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"LogFilesPath",
",",
"str",
"(",
"myTime",
".",
"FileTimeStamp",
"(",
")",
")",
"+",
"\"_GiveawayExtention_CheerData.txt\"",
")",
"# Text-Data Verzeichniss erstellen\r",
"if",
"self",
".",
"DataFilesPath",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"DataFilesPath",
")",
":",
"os",
".",
"makedirs",
"(",
"self",
".",
"DataFilesPath",
")",
"# Daten-Files definieren\r",
"self",
".",
"LatestSubscriberFile",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"DataFilesPath",
",",
"self",
".",
"DataFilesPrefix",
"+",
"\"LATESTSUBSCRIBER.txt\"",
")",
"self",
".",
"LatestSubsGifterFile",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"DataFilesPath",
",",
"self",
".",
"DataFilesPrefix",
"+",
"\"LATESTSUBGIFTER.txt\"",
")",
"self",
".",
"LatestCheerFile",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"DataFilesPath",
",",
"self",
".",
"DataFilesPrefix",
"+",
"\"LATESTCHEER.txt\"",
")",
"self",
".",
"CurrentSubgoalCounterFile",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"DataFilesPath",
",",
"self",
".",
"DataFilesPrefix",
"+",
"\"COUNTER_CURRENTSUBGOAL.txt\"",
")",
"self",
".",
"GiveawayCounterFile",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"DataFilesPath",
",",
"self",
".",
"DataFilesPrefix",
"+",
"\"COUNTER_GIVEAWAYS.txt\"",
")",
"# Nur ins Log schreiben, wenn nicht unterdrückt\r",
"if",
"not",
"supressMessage",
":",
"# Meldung ins Log schreiben\r",
"self",
".",
"Logger",
".",
"WriteLog",
"(",
"\" - 'RawData'\"",
")",
"return"
] | [
20,
4
] | [
86,
14
] | null | python | de | ['de', 'de', 'de'] | False | true | null |
HeistSystem.DB_create_Tables | (self) | return | Erzeuge Tabelle für statistische Zwecke | Erzeuge Tabelle für statistische Zwecke | def DB_create_Tables(self):
''' Erzeuge Tabelle für statistische Zwecke '''
thisActionName = "DB_create_Tables"
# --- Tabellen - falls notwendig - erzeugen
# Tabelle 'game_heist_gamedata' vorbereiten
sql_game_heist_gamedata = "CREATE TABLE IF NOT EXISTS game_heist_gamedata("
sql_game_heist_gamedata += "autoID INTEGER NOT NULL UNIQUE PRIMARY KEY AUTOINCREMENT,"
sql_game_heist_gamedata += "gameID INTEGER NOT NULL,"
sql_game_heist_gamedata += "gameStartTimestamp DATETIME DEFAULT '',"
sql_game_heist_gamedata += "userName TEXT NOT NULL DEFAULT '',"
sql_game_heist_gamedata += "userStake INTEGER DEFAULT 0,"
sql_game_heist_gamedata += "lastChange DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP)"
# Tabelle 'game_heist_messages' vorbereiten
sql_game_heist_messages = "CREATE TABLE IF NOT EXISTS game_heist_messages("
sql_game_heist_messages += "autoID INTEGER NOT NULL UNIQUE PRIMARY KEY AUTOINCREMENT,"
sql_game_heist_messages += "messageType TEXT NOT NULL DEFAULT '',"
sql_game_heist_messages += "messageText TEXT NOT NULL DEFAULT '' UNIQUE)"
# Tabelle 'game_heist_targets' vorbereiten
sql_game_heist_targets = "CREATE TABLE IF NOT EXISTS game_heist_targets("
sql_game_heist_targets += "autoID INTEGER NOT NULL UNIQUE PRIMARY KEY AUTOINCREMENT,"
sql_game_heist_targets += "targetName TEXT NOT NULL DEFAULT '' UNIQUE)"
# SQL ausführen
self.GameDB.execute(sql_game_heist_gamedata)
self.GameDB.execute(sql_game_heist_messages)
self.GameDB.execute(sql_game_heist_targets)
self.GameDB.commit()
return | [
"def",
"DB_create_Tables",
"(",
"self",
")",
":",
"thisActionName",
"=",
"\"DB_create_Tables\"",
"# --- Tabellen - falls notwendig - erzeugen\r",
"# Tabelle 'game_heist_gamedata' vorbereiten\r",
"sql_game_heist_gamedata",
"=",
"\"CREATE TABLE IF NOT EXISTS game_heist_gamedata(\"",
"sql_game_heist_gamedata",
"+=",
"\"autoID INTEGER NOT NULL UNIQUE PRIMARY KEY AUTOINCREMENT,\"",
"sql_game_heist_gamedata",
"+=",
"\"gameID INTEGER NOT NULL,\"",
"sql_game_heist_gamedata",
"+=",
"\"gameStartTimestamp DATETIME DEFAULT '',\"",
"sql_game_heist_gamedata",
"+=",
"\"userName TEXT NOT NULL DEFAULT '',\"",
"sql_game_heist_gamedata",
"+=",
"\"userStake INTEGER DEFAULT 0,\"",
"sql_game_heist_gamedata",
"+=",
"\"lastChange DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP)\"",
"# Tabelle 'game_heist_messages' vorbereiten\r",
"sql_game_heist_messages",
"=",
"\"CREATE TABLE IF NOT EXISTS game_heist_messages(\"",
"sql_game_heist_messages",
"+=",
"\"autoID INTEGER NOT NULL UNIQUE PRIMARY KEY AUTOINCREMENT,\"",
"sql_game_heist_messages",
"+=",
"\"messageType TEXT NOT NULL DEFAULT '',\"",
"sql_game_heist_messages",
"+=",
"\"messageText TEXT NOT NULL DEFAULT '' UNIQUE)\"",
"# Tabelle 'game_heist_targets' vorbereiten\r",
"sql_game_heist_targets",
"=",
"\"CREATE TABLE IF NOT EXISTS game_heist_targets(\"",
"sql_game_heist_targets",
"+=",
"\"autoID INTEGER NOT NULL UNIQUE PRIMARY KEY AUTOINCREMENT,\"",
"sql_game_heist_targets",
"+=",
"\"targetName TEXT NOT NULL DEFAULT '' UNIQUE)\"",
"# SQL ausführen\r",
"self",
".",
"GameDB",
".",
"execute",
"(",
"sql_game_heist_gamedata",
")",
"self",
".",
"GameDB",
".",
"execute",
"(",
"sql_game_heist_messages",
")",
"self",
".",
"GameDB",
".",
"execute",
"(",
"sql_game_heist_targets",
")",
"self",
".",
"GameDB",
".",
"commit",
"(",
")",
"return"
] | [
111,
4
] | [
143,
14
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
ispDicom.STUDY | ( self, query:dict={} ) | return self.query( ds ) | Führt eine suche nach STUDY durch.
Wie query mit einem default Dataset
Parameters
----------
query : dict, optional
query parameter für ds. The default is {}.
Returns
-------
results : list
gefundene daten
status : hex
Rückgabecode von send_c_find::
| Führt eine suche nach STUDY durch. | def STUDY( self, query:dict={} ):
"""Führt eine suche nach STUDY durch.
Wie query mit einem default Dataset
Parameters
----------
query : dict, optional
query parameter für ds. The default is {}.
Returns
-------
results : list
gefundene daten
status : hex
Rückgabecode von send_c_find::
"""
ds_model = dicomQueryDefaults["PATIENT"].copy()
ds_model.update( dicomQueryDefaults["STUDY"] )
ds_model.update( query )
ds = Dataset()
for name, value in ds_model.items():
ds.__setattr__(name, value)
# Abfrage durchführen
return self.query( ds ) | [
"def",
"STUDY",
"(",
"self",
",",
"query",
":",
"dict",
"=",
"{",
"}",
")",
":",
"ds_model",
"=",
"dicomQueryDefaults",
"[",
"\"PATIENT\"",
"]",
".",
"copy",
"(",
")",
"ds_model",
".",
"update",
"(",
"dicomQueryDefaults",
"[",
"\"STUDY\"",
"]",
")",
"ds_model",
".",
"update",
"(",
"query",
")",
"ds",
"=",
"Dataset",
"(",
")",
"for",
"name",
",",
"value",
"in",
"ds_model",
".",
"items",
"(",
")",
":",
"ds",
".",
"__setattr__",
"(",
"name",
",",
"value",
")",
"# Abfrage durchführen",
"return",
"self",
".",
"query",
"(",
"ds",
")"
] | [
730,
4
] | [
757,
31
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
plotImage.dots2mm | ( self, point ) | return Point( self.dots2mm_X(point.x), self.dots2mm_Y(point.y) ) | Wandelt Point Angaben von dot nach mm
Parameters
----------
point : Point
| Wandelt Point Angaben von dot nach mm
Parameters
----------
point : Point
| def dots2mm( self, point ):
"""Wandelt Point Angaben von dot nach mm
Parameters
----------
point : Point
"""
return Point( self.dots2mm_X(point.x), self.dots2mm_Y(point.y) ) | [
"def",
"dots2mm",
"(",
"self",
",",
"point",
")",
":",
"return",
"Point",
"(",
"self",
".",
"dots2mm_X",
"(",
"point",
".",
"x",
")",
",",
"self",
".",
"dots2mm_Y",
"(",
"point",
".",
"y",
")",
")"
] | [
119,
4
] | [
127,
72
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
remove_from_list | (values: dict, data: StepData) | Bekommt Stopwords und wandelt die jeweiligen Wörter so um, dass Groß- und Kleinschreibung unwichtig ist.
Bekommt eine Stopword-Liste aus der Textdatei resources/stopwords/stopwords.txt und ggf. die bei der Job-Erstellung
eingegebenen wurden und wandelt die jeweiligen Wörter so um, dass Groß- und Kleinschreibung unwichtig ist.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
:return:
| Bekommt Stopwords und wandelt die jeweiligen Wörter so um, dass Groß- und Kleinschreibung unwichtig ist. | def remove_from_list(values: dict, data: StepData):
"""Bekommt Stopwords und wandelt die jeweiligen Wörter so um, dass Groß- und Kleinschreibung unwichtig ist.
Bekommt eine Stopword-Liste aus der Textdatei resources/stopwords/stopwords.txt und ggf. die bei der Job-Erstellung
eingegebenen wurden und wandelt die jeweiligen Wörter so um, dass Groß- und Kleinschreibung unwichtig ist.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
:return:
"""
for idx, key in data.loop_key(values["keys"], values):
value = data.get_data(key, values)
new_key = get_new_keys(values, idx)
to_remove = data.get_data(values.get("to_remove", []), values, list)
if data.get_data(values.get("use_stopwords", False), values, bool):
try:
file = resources.get_resource_path("stopwords/stopwords.txt")
with open(file, "r", encoding='utf-8') as f:
list_stopwords = f.read().splitlines()
to_remove = to_remove + list_stopwords
except IOError:
pass
if data.get_data(values.get("ignore_case", False), values, bool):
to_remove = [r.lower() for r in to_remove]
new_value = [v for v in value if v.lower() not in to_remove]
else:
new_value = [v for v in value if v not in to_remove]
data.insert_data(new_key, new_value, values) | [
"def",
"remove_from_list",
"(",
"values",
":",
"dict",
",",
"data",
":",
"StepData",
")",
":",
"for",
"idx",
",",
"key",
"in",
"data",
".",
"loop_key",
"(",
"values",
"[",
"\"keys\"",
"]",
",",
"values",
")",
":",
"value",
"=",
"data",
".",
"get_data",
"(",
"key",
",",
"values",
")",
"new_key",
"=",
"get_new_keys",
"(",
"values",
",",
"idx",
")",
"to_remove",
"=",
"data",
".",
"get_data",
"(",
"values",
".",
"get",
"(",
"\"to_remove\"",
",",
"[",
"]",
")",
",",
"values",
",",
"list",
")",
"if",
"data",
".",
"get_data",
"(",
"values",
".",
"get",
"(",
"\"use_stopwords\"",
",",
"False",
")",
",",
"values",
",",
"bool",
")",
":",
"try",
":",
"file",
"=",
"resources",
".",
"get_resource_path",
"(",
"\"stopwords/stopwords.txt\"",
")",
"with",
"open",
"(",
"file",
",",
"\"r\"",
",",
"encoding",
"=",
"'utf-8'",
")",
"as",
"f",
":",
"list_stopwords",
"=",
"f",
".",
"read",
"(",
")",
".",
"splitlines",
"(",
")",
"to_remove",
"=",
"to_remove",
"+",
"list_stopwords",
"except",
"IOError",
":",
"pass",
"if",
"data",
".",
"get_data",
"(",
"values",
".",
"get",
"(",
"\"ignore_case\"",
",",
"False",
")",
",",
"values",
",",
"bool",
")",
":",
"to_remove",
"=",
"[",
"r",
".",
"lower",
"(",
")",
"for",
"r",
"in",
"to_remove",
"]",
"new_value",
"=",
"[",
"v",
"for",
"v",
"in",
"value",
"if",
"v",
".",
"lower",
"(",
")",
"not",
"in",
"to_remove",
"]",
"else",
":",
"new_value",
"=",
"[",
"v",
"for",
"v",
"in",
"value",
"if",
"v",
"not",
"in",
"to_remove",
"]",
"data",
".",
"insert_data",
"(",
"new_key",
",",
"new_value",
",",
"values",
")"
] | [
634,
0
] | [
665,
52
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
get_image_path | (path: str) | return get_resource_path(os.path.join(IMAGES_LOCATION, path)) | Erstellt einen absoluten Pfad zu der übergebenen Image-Ressource.
Erstellt den Pfad aus `RESOURCES_LOCATION`, `IMAGE_LOCATION` und dem übergebenen Pfad.
:param path: Pfad zur Ressource, relativ zum `resources/images`-Ordner.
:return: Absoluter Pfad zur übergebenen Ressource.
| Erstellt einen absoluten Pfad zu der übergebenen Image-Ressource. | def get_image_path(path: str):
"""Erstellt einen absoluten Pfad zu der übergebenen Image-Ressource.
Erstellt den Pfad aus `RESOURCES_LOCATION`, `IMAGE_LOCATION` und dem übergebenen Pfad.
:param path: Pfad zur Ressource, relativ zum `resources/images`-Ordner.
:return: Absoluter Pfad zur übergebenen Ressource.
"""
return get_resource_path(os.path.join(IMAGES_LOCATION, path)) | [
"def",
"get_image_path",
"(",
"path",
":",
"str",
")",
":",
"return",
"get_resource_path",
"(",
"os",
".",
"path",
".",
"join",
"(",
"IMAGES_LOCATION",
",",
"path",
")",
")"
] | [
98,
0
] | [
106,
65
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
ImpfterminService.termin_buchen | (self) | return False | Termin wird gebucht für die Kontaktdaten, die beim Starten des
Programms eingetragen oder aus der JSON-Datei importiert wurden.
:return: bool
| Termin wird gebucht für die Kontaktdaten, die beim Starten des
Programms eingetragen oder aus der JSON-Datei importiert wurden. | def termin_buchen(self):
"""Termin wird gebucht für die Kontaktdaten, die beim Starten des
Programms eingetragen oder aus der JSON-Datei importiert wurden.
:return: bool
"""
path = "rest/buchung"
# Daten für Impftermin sammeln
data = {
"plz": self.plz_termin,
"slots": [termin.get("slotId") for termin in self.terminpaar],
"qualifikationen": self.qualifikationen,
"contact": self.kontakt
}
res = self.s.post(self.domain + path, json=data, timeout=15)
if res.status_code == 201:
msg = "Termin erfolgreich gebucht!"
self.log.success(msg)
desktop_notification(operating_system=self.operating_system, title="Terminbuchung:", message=msg)
return True
elif res.status_code == 429:
msg = "Anfrage wurde von der Botprotection geblockt. Cookies werden erneuert und die Buchung wiederholt."
self.log.error(msg)
self.renew_cookies_code()
res = self.s.post(self.domain + path, json=data, timeout=15)
if res.status_code == 201:
msg = "Termin erfolgreich gebucht!"
self.log.success(msg)
desktop_notification(operating_system=self.operating_system, title="Terminbuchung:", message=msg)
return True
else:
# Termin über Selenium Buchen
return self.book_appointment()
elif res.status_code >= 400:
data = res.json()
try:
error = data['errors']['status']
except KeyError:
error = ''
if 'nicht mehr verfügbar' in error:
msg = f"Diesen Termin gibts nicht mehr: {error}"
#Bei Terminanzahl = 1 11 Minuten warten und danach fortsetzen.
if self.termin_anzahl == 1:
msg = f"Diesen Termin gibts nicht mehr: {error}. Die Suche wird in 11 Minuten fortgesetzt"
self.log.error(msg)
time.sleep(11*60)
return False
else:
msg = f"Termin konnte nicht gebucht werden: {data}"
else:
msg = f"Unbekannter Statuscode: {res.status_code}"
self.log.error(msg)
desktop_notification(operating_system=self.operating_system, title="Terminbuchung:", message=msg)
return False | [
"def",
"termin_buchen",
"(",
"self",
")",
":",
"path",
"=",
"\"rest/buchung\"",
"# Daten für Impftermin sammeln",
"data",
"=",
"{",
"\"plz\"",
":",
"self",
".",
"plz_termin",
",",
"\"slots\"",
":",
"[",
"termin",
".",
"get",
"(",
"\"slotId\"",
")",
"for",
"termin",
"in",
"self",
".",
"terminpaar",
"]",
",",
"\"qualifikationen\"",
":",
"self",
".",
"qualifikationen",
",",
"\"contact\"",
":",
"self",
".",
"kontakt",
"}",
"res",
"=",
"self",
".",
"s",
".",
"post",
"(",
"self",
".",
"domain",
"+",
"path",
",",
"json",
"=",
"data",
",",
"timeout",
"=",
"15",
")",
"if",
"res",
".",
"status_code",
"==",
"201",
":",
"msg",
"=",
"\"Termin erfolgreich gebucht!\"",
"self",
".",
"log",
".",
"success",
"(",
"msg",
")",
"desktop_notification",
"(",
"operating_system",
"=",
"self",
".",
"operating_system",
",",
"title",
"=",
"\"Terminbuchung:\"",
",",
"message",
"=",
"msg",
")",
"return",
"True",
"elif",
"res",
".",
"status_code",
"==",
"429",
":",
"msg",
"=",
"\"Anfrage wurde von der Botprotection geblockt. Cookies werden erneuert und die Buchung wiederholt.\"",
"self",
".",
"log",
".",
"error",
"(",
"msg",
")",
"self",
".",
"renew_cookies_code",
"(",
")",
"res",
"=",
"self",
".",
"s",
".",
"post",
"(",
"self",
".",
"domain",
"+",
"path",
",",
"json",
"=",
"data",
",",
"timeout",
"=",
"15",
")",
"if",
"res",
".",
"status_code",
"==",
"201",
":",
"msg",
"=",
"\"Termin erfolgreich gebucht!\"",
"self",
".",
"log",
".",
"success",
"(",
"msg",
")",
"desktop_notification",
"(",
"operating_system",
"=",
"self",
".",
"operating_system",
",",
"title",
"=",
"\"Terminbuchung:\"",
",",
"message",
"=",
"msg",
")",
"return",
"True",
"else",
":",
"# Termin über Selenium Buchen",
"return",
"self",
".",
"book_appointment",
"(",
")",
"elif",
"res",
".",
"status_code",
">=",
"400",
":",
"data",
"=",
"res",
".",
"json",
"(",
")",
"try",
":",
"error",
"=",
"data",
"[",
"'errors'",
"]",
"[",
"'status'",
"]",
"except",
"KeyError",
":",
"error",
"=",
"''",
"if",
"'nicht mehr verfügbar' ",
"n ",
"rror:",
"",
"msg",
"=",
"f\"Diesen Termin gibts nicht mehr: {error}\"",
"#Bei Terminanzahl = 1 11 Minuten warten und danach fortsetzen.",
"if",
"self",
".",
"termin_anzahl",
"==",
"1",
":",
"msg",
"=",
"f\"Diesen Termin gibts nicht mehr: {error}. Die Suche wird in 11 Minuten fortgesetzt\"",
"self",
".",
"log",
".",
"error",
"(",
"msg",
")",
"time",
".",
"sleep",
"(",
"11",
"*",
"60",
")",
"return",
"False",
"else",
":",
"msg",
"=",
"f\"Termin konnte nicht gebucht werden: {data}\"",
"else",
":",
"msg",
"=",
"f\"Unbekannter Statuscode: {res.status_code}\"",
"self",
".",
"log",
".",
"error",
"(",
"msg",
")",
"desktop_notification",
"(",
"operating_system",
"=",
"self",
".",
"operating_system",
",",
"title",
"=",
"\"Terminbuchung:\"",
",",
"message",
"=",
"msg",
")",
"return",
"False"
] | [
638,
4
] | [
698,
20
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
Scheduler.start_unblocking | (self) | Startet den Scheduler in einem neuen Thread.
Testet jede Minute, ob Jobs oder Datenquellen ausgeführt werden müssen. Ist dies der Fall, werden diese in
einem anderen Thread ausgeführt.
| Startet den Scheduler in einem neuen Thread. | def start_unblocking(self):
"""Startet den Scheduler in einem neuen Thread.
Testet jede Minute, ob Jobs oder Datenquellen ausgeführt werden müssen. Ist dies der Fall, werden diese in
einem anderen Thread ausgeführt.
"""
threading.Thread(target=self.start, daemon=True).start() | [
"def",
"start_unblocking",
"(",
"self",
")",
":",
"threading",
".",
"Thread",
"(",
"target",
"=",
"self",
".",
"start",
",",
"daemon",
"=",
"True",
")",
".",
"start",
"(",
")"
] | [
154,
4
] | [
160,
64
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
DicomImage.cropField | ( self, field ) | return self.array | Das image auf die angegebene Größe beschneiden
Dabei wird image.cax auf das neue Zentrum gesetzt
{ "X1":-200, "X2": 200, "Y1": -200, "Y2":200 }
| Das image auf die angegebene Größe beschneiden
Dabei wird image.cax auf das neue Zentrum gesetzt
{ "X1":-200, "X2": 200, "Y1": -200, "Y2":200 }
| def cropField( self, field ):
""" Das image auf die angegebene Größe beschneiden
Dabei wird image.cax auf das neue Zentrum gesetzt
{ "X1":-200, "X2": 200, "Y1": -200, "Y2":200 }
"""
self.array = self.array[
self.mm2dots_Y( field["Y1"] ) : self.mm2dots_Y(field["Y2"]),
self.mm2dots_X( field["X1"] ) : self.mm2dots_X(field["X2"])
]
#print( self.image.array.shape, self.image.array )
self.center.x = self.array.shape[0] / 2
self.center.y = self.array.shape[1] / 2
return self.array | [
"def",
"cropField",
"(",
"self",
",",
"field",
")",
":",
"self",
".",
"array",
"=",
"self",
".",
"array",
"[",
"self",
".",
"mm2dots_Y",
"(",
"field",
"[",
"\"Y1\"",
"]",
")",
":",
"self",
".",
"mm2dots_Y",
"(",
"field",
"[",
"\"Y2\"",
"]",
")",
",",
"self",
".",
"mm2dots_X",
"(",
"field",
"[",
"\"X1\"",
"]",
")",
":",
"self",
".",
"mm2dots_X",
"(",
"field",
"[",
"\"X2\"",
"]",
")",
"]",
"#print( self.image.array.shape, self.image.array )",
"self",
".",
"center",
".",
"x",
"=",
"self",
".",
"array",
".",
"shape",
"[",
"0",
"]",
"/",
"2",
"self",
".",
"center",
".",
"y",
"=",
"self",
".",
"array",
".",
"shape",
"[",
"1",
"]",
"/",
"2",
"return",
"self",
".",
"array"
] | [
653,
4
] | [
666,
25
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
ImpfterminService.termin_suchen | (self, plz: str, zeitrahmen: dict) | return False, res.status_code | Es wird nach einen verfügbaren Termin in der gewünschten PLZ gesucht.
Ausgewählt wird der erstbeste Termin, welcher im entsprechenden Zeitraum liegt (!).
Zurückgegeben wird das Ergebnis der Abfrage und der Status-Code.
Bei Status-Code > 400 müssen die Cookies erneuert werden.
Beispiel für ein Termin-Paar:
[{
'slotId': 'slot-56817da7-3f46-4f97-9868-30a6ddabcdef',
'begin': 1616999901000,
'bsnr': '005221080'
}, {
'slotId': 'slot-d29f5c22-384c-4928-922a-30a6ddabcdef',
'begin': 1623999901000,
'bsnr': '005221080'
}]
:return: bool, status-code
| Es wird nach einen verfügbaren Termin in der gewünschten PLZ gesucht.
Ausgewählt wird der erstbeste Termin, welcher im entsprechenden Zeitraum liegt (!).
Zurückgegeben wird das Ergebnis der Abfrage und der Status-Code.
Bei Status-Code > 400 müssen die Cookies erneuert werden. | def termin_suchen(self, plz: str, zeitrahmen: dict):
"""Es wird nach einen verfügbaren Termin in der gewünschten PLZ gesucht.
Ausgewählt wird der erstbeste Termin, welcher im entsprechenden Zeitraum liegt (!).
Zurückgegeben wird das Ergebnis der Abfrage und der Status-Code.
Bei Status-Code > 400 müssen die Cookies erneuert werden.
Beispiel für ein Termin-Paar:
[{
'slotId': 'slot-56817da7-3f46-4f97-9868-30a6ddabcdef',
'begin': 1616999901000,
'bsnr': '005221080'
}, {
'slotId': 'slot-d29f5c22-384c-4928-922a-30a6ddabcdef',
'begin': 1623999901000,
'bsnr': '005221080'
}]
:return: bool, status-code
"""
path = f"rest/suche/impfterminsuche?plz={plz}"
while True:
res = self.s.get(self.domain + path, timeout=15)
if not res.ok or 'Virtueller Warteraum des Impfterminservice' not in res.text:
break
self.log.info('Warteraum... zZz...')
time.sleep(30)
if res.ok:
res_json = res.json()
terminpaare = res_json.get("termine")
self.termin_anzahl=len(terminpaare)
if terminpaare:
terminpaare_angenommen = [
tp for tp in terminpaare
if terminpaar_im_zeitrahmen(tp, zeitrahmen)
]
terminpaare_abgelehnt = [
tp for tp in terminpaare
if tp not in terminpaare_angenommen
]
impfzentrum = self.verfuegbare_impfzentren.get(plz)
zentrumsname = impfzentrum.get('Zentrumsname').strip()
ort = impfzentrum.get('Ort')
for tp_abgelehnt in terminpaare_abgelehnt:
self.log.warn(
"Termin gefunden - jedoch nicht im entsprechenden Zeitraum:")
self.log.info('-' * 50)
self.log.warn(f"'{zentrumsname}' in {plz} {ort}")
for num, termin in enumerate(tp_abgelehnt, 1):
ts = datetime.fromtimestamp(termin["begin"] / 1000).strftime(
'%d.%m.%Y um %H:%M Uhr')
self.log.warn(f"{num}. Termin: {ts}")
self.log.info('-' * 50)
if terminpaare_angenommen:
# Auswahl des erstbesten Terminpaares
self.terminpaar = choice(terminpaare_angenommen)
self.plz_termin = plz
self.log.success(f"Termin gefunden!")
self.log.success(f"'{zentrumsname}' in {plz} {ort}")
for num, termin in enumerate(self.terminpaar, 1):
ts = datetime.fromtimestamp(termin["begin"] / 1000).strftime(
'%d.%m.%Y um %H:%M Uhr')
self.log.success(f"{num}. Termin: {ts}")
if ENABLE_BEEPY:
beepy.beep('coin')
else:
print("\a")
return True, 200
else:
self.log.info(f"Keine Termine verfügbar in {plz}")
elif res.status_code == 401:
self.log.error(f"Terminpaare können nicht geladen werden: Impf-Code kann nicht für "
f"die PLZ '{plz}' verwendet werden.")
quit()
else:
self.log.error(f"Terminpaare können nicht geladen werden: {res.text}")
return False, res.status_code | [
"def",
"termin_suchen",
"(",
"self",
",",
"plz",
":",
"str",
",",
"zeitrahmen",
":",
"dict",
")",
":",
"path",
"=",
"f\"rest/suche/impfterminsuche?plz={plz}\"",
"while",
"True",
":",
"res",
"=",
"self",
".",
"s",
".",
"get",
"(",
"self",
".",
"domain",
"+",
"path",
",",
"timeout",
"=",
"15",
")",
"if",
"not",
"res",
".",
"ok",
"or",
"'Virtueller Warteraum des Impfterminservice'",
"not",
"in",
"res",
".",
"text",
":",
"break",
"self",
".",
"log",
".",
"info",
"(",
"'Warteraum... zZz...'",
")",
"time",
".",
"sleep",
"(",
"30",
")",
"if",
"res",
".",
"ok",
":",
"res_json",
"=",
"res",
".",
"json",
"(",
")",
"terminpaare",
"=",
"res_json",
".",
"get",
"(",
"\"termine\"",
")",
"self",
".",
"termin_anzahl",
"=",
"len",
"(",
"terminpaare",
")",
"if",
"terminpaare",
":",
"terminpaare_angenommen",
"=",
"[",
"tp",
"for",
"tp",
"in",
"terminpaare",
"if",
"terminpaar_im_zeitrahmen",
"(",
"tp",
",",
"zeitrahmen",
")",
"]",
"terminpaare_abgelehnt",
"=",
"[",
"tp",
"for",
"tp",
"in",
"terminpaare",
"if",
"tp",
"not",
"in",
"terminpaare_angenommen",
"]",
"impfzentrum",
"=",
"self",
".",
"verfuegbare_impfzentren",
".",
"get",
"(",
"plz",
")",
"zentrumsname",
"=",
"impfzentrum",
".",
"get",
"(",
"'Zentrumsname'",
")",
".",
"strip",
"(",
")",
"ort",
"=",
"impfzentrum",
".",
"get",
"(",
"'Ort'",
")",
"for",
"tp_abgelehnt",
"in",
"terminpaare_abgelehnt",
":",
"self",
".",
"log",
".",
"warn",
"(",
"\"Termin gefunden - jedoch nicht im entsprechenden Zeitraum:\"",
")",
"self",
".",
"log",
".",
"info",
"(",
"'-'",
"*",
"50",
")",
"self",
".",
"log",
".",
"warn",
"(",
"f\"'{zentrumsname}' in {plz} {ort}\"",
")",
"for",
"num",
",",
"termin",
"in",
"enumerate",
"(",
"tp_abgelehnt",
",",
"1",
")",
":",
"ts",
"=",
"datetime",
".",
"fromtimestamp",
"(",
"termin",
"[",
"\"begin\"",
"]",
"/",
"1000",
")",
".",
"strftime",
"(",
"'%d.%m.%Y um %H:%M Uhr'",
")",
"self",
".",
"log",
".",
"warn",
"(",
"f\"{num}. Termin: {ts}\"",
")",
"self",
".",
"log",
".",
"info",
"(",
"'-'",
"*",
"50",
")",
"if",
"terminpaare_angenommen",
":",
"# Auswahl des erstbesten Terminpaares",
"self",
".",
"terminpaar",
"=",
"choice",
"(",
"terminpaare_angenommen",
")",
"self",
".",
"plz_termin",
"=",
"plz",
"self",
".",
"log",
".",
"success",
"(",
"f\"Termin gefunden!\"",
")",
"self",
".",
"log",
".",
"success",
"(",
"f\"'{zentrumsname}' in {plz} {ort}\"",
")",
"for",
"num",
",",
"termin",
"in",
"enumerate",
"(",
"self",
".",
"terminpaar",
",",
"1",
")",
":",
"ts",
"=",
"datetime",
".",
"fromtimestamp",
"(",
"termin",
"[",
"\"begin\"",
"]",
"/",
"1000",
")",
".",
"strftime",
"(",
"'%d.%m.%Y um %H:%M Uhr'",
")",
"self",
".",
"log",
".",
"success",
"(",
"f\"{num}. Termin: {ts}\"",
")",
"if",
"ENABLE_BEEPY",
":",
"beepy",
".",
"beep",
"(",
"'coin'",
")",
"else",
":",
"print",
"(",
"\"\\a\"",
")",
"return",
"True",
",",
"200",
"else",
":",
"self",
".",
"log",
".",
"info",
"(",
"f\"Keine Termine verfügbar in {plz}\")",
"",
"elif",
"res",
".",
"status_code",
"==",
"401",
":",
"self",
".",
"log",
".",
"error",
"(",
"f\"Terminpaare können nicht geladen werden: Impf-Code kann nicht für \"",
"f\"die PLZ '{plz}' verwendet werden.\"",
")",
"quit",
"(",
")",
"else",
":",
"self",
".",
"log",
".",
"error",
"(",
"f\"Terminpaare können nicht geladen werden: {res.text}\")",
"",
"return",
"False",
",",
"res",
".",
"status_code"
] | [
556,
4
] | [
635,
37
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
ispTest.test_isp_mpdf_base | ( self ) | Ein PDF Dokument erstellen
| Ein PDF Dokument erstellen
| def test_isp_mpdf_base( self ):
''' Ein PDF Dokument erstellen
'''
response = self.app.get( "api/dummy/pdf" )
self.assertEqual(response.status_code, 400, "Status nicht 400")
self.assertEqual(
response.data,
b"Keine PDF Datei (nofile.pdf) gefunden",
"Testet Fehler bei Rückgabe eine fehlenden PDF Datei "
)
# zuerst nur ein leeres PDF mit overlay
response = self.app.get( "api/dummy/pdf", query_string={
"name" : "test-1"
} )
self.assertEqual(response.status_code, 200, "Status nicht 200")
self.assertEqual( response.json["data"]["body"], "", "PDF body ist nicht leer" )
self.check_pdf_data( response.json["data"], contents=0, pages=1, intern_check=True )
# text und markdown mit Header (h2)
response = self.app.get( "api/dummy/pdf", query_string={
"name" : "test-2"
} )
self.assertEqual(response.status_code, 200, "Status nicht 200")
# kommt es hier zu einem Fehler stimmt die css Einbindung von weasyprint nicht
self.check_pdf_data( response.json["data"], contents=1, pages=1, intern_check=True )
# wie test 2 aber markdown zuerst
response = self.app.get( "api/dummy/pdf", query_string={
"name" : "test-2a"
} )
self.assertEqual(response.status_code, 200, "Status nicht 200")
#print( response.json["data"] )
self.check_pdf_data( response.json["data"], contents=1, pages=1, intern_check=True )
response = self.app.get( "api/dummy/pdf", query_string={
"name" : "test-3"
} )
self.assertEqual(response.status_code, 200, "Status nicht 200")
self.check_pdf_data( response.json["data"], contents=2, pages=4, intern_check=True )
response = self.app.get( "api/dummy/pdf", query_string={
"name" : "test-4"
} )
self.assertEqual(response.status_code, 200, "Status nicht 200")
# kommt es hier zu einem Fehler stimmt die font Einbindung von weasyprint nicht
self.check_pdf_data( response.json["data"], contents=2, pages=3, intern_check=True ) | [
"def",
"test_isp_mpdf_base",
"(",
"self",
")",
":",
"response",
"=",
"self",
".",
"app",
".",
"get",
"(",
"\"api/dummy/pdf\"",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"400",
",",
"\"Status nicht 400\"",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"data",
",",
"b\"Keine PDF Datei (nofile.pdf) gefunden\"",
",",
"\"Testet Fehler bei Rückgabe eine fehlenden PDF Datei \"",
")",
"# zuerst nur ein leeres PDF mit overlay",
"response",
"=",
"self",
".",
"app",
".",
"get",
"(",
"\"api/dummy/pdf\"",
",",
"query_string",
"=",
"{",
"\"name\"",
":",
"\"test-1\"",
"}",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"200",
",",
"\"Status nicht 200\"",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"json",
"[",
"\"data\"",
"]",
"[",
"\"body\"",
"]",
",",
"\"\"",
",",
"\"PDF body ist nicht leer\"",
")",
"self",
".",
"check_pdf_data",
"(",
"response",
".",
"json",
"[",
"\"data\"",
"]",
",",
"contents",
"=",
"0",
",",
"pages",
"=",
"1",
",",
"intern_check",
"=",
"True",
")",
"# text und markdown mit Header (h2) ",
"response",
"=",
"self",
".",
"app",
".",
"get",
"(",
"\"api/dummy/pdf\"",
",",
"query_string",
"=",
"{",
"\"name\"",
":",
"\"test-2\"",
"}",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"200",
",",
"\"Status nicht 200\"",
")",
"# kommt es hier zu einem Fehler stimmt die css Einbindung von weasyprint nicht",
"self",
".",
"check_pdf_data",
"(",
"response",
".",
"json",
"[",
"\"data\"",
"]",
",",
"contents",
"=",
"1",
",",
"pages",
"=",
"1",
",",
"intern_check",
"=",
"True",
")",
"# wie test 2 aber markdown zuerst",
"response",
"=",
"self",
".",
"app",
".",
"get",
"(",
"\"api/dummy/pdf\"",
",",
"query_string",
"=",
"{",
"\"name\"",
":",
"\"test-2a\"",
"}",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"200",
",",
"\"Status nicht 200\"",
")",
"#print( response.json[\"data\"] )",
"self",
".",
"check_pdf_data",
"(",
"response",
".",
"json",
"[",
"\"data\"",
"]",
",",
"contents",
"=",
"1",
",",
"pages",
"=",
"1",
",",
"intern_check",
"=",
"True",
")",
"response",
"=",
"self",
".",
"app",
".",
"get",
"(",
"\"api/dummy/pdf\"",
",",
"query_string",
"=",
"{",
"\"name\"",
":",
"\"test-3\"",
"}",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"200",
",",
"\"Status nicht 200\"",
")",
"self",
".",
"check_pdf_data",
"(",
"response",
".",
"json",
"[",
"\"data\"",
"]",
",",
"contents",
"=",
"2",
",",
"pages",
"=",
"4",
",",
"intern_check",
"=",
"True",
")",
"response",
"=",
"self",
".",
"app",
".",
"get",
"(",
"\"api/dummy/pdf\"",
",",
"query_string",
"=",
"{",
"\"name\"",
":",
"\"test-4\"",
"}",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"200",
",",
"\"Status nicht 200\"",
")",
"# kommt es hier zu einem Fehler stimmt die font Einbindung von weasyprint nicht",
"self",
".",
"check_pdf_data",
"(",
"response",
".",
"json",
"[",
"\"data\"",
"]",
",",
"contents",
"=",
"2",
",",
"pages",
"=",
"3",
",",
"intern_check",
"=",
"True",
")"
] | [
2450,
4
] | [
2503,
92
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
shapeAusKurvenSegmentListe | (slide, kurvenSegmentListe) | return ffb.ConvertToShape() | Erstelle neues Shape auf Slide aus geschachtelter Bezierkurvenliste: [[bezier1, bezier2],[bezier3, bezier4]] | Erstelle neues Shape auf Slide aus geschachtelter Bezierkurvenliste: [[bezier1, bezier2],[bezier3, bezier4]] | def shapeAusKurvenSegmentListe(slide, kurvenSegmentListe):
'''Erstelle neues Shape auf Slide aus geschachtelter Bezierkurvenliste: [[bezier1, bezier2],[bezier3, bezier4]]'''
# erster Punkt
P = kurvenSegmentListe[0][0][0]
ffb = slide.Shapes.BuildFreeform(1, P[0], P[1])
for segment in kurvenSegmentListe:
for k in segment:
# von den naechsten Beziers immer die naechsten Punkte angeben
ffb.AddNodes(1, 3, k[1][0], k[1][1], k[2][0], k[2][1], k[3][0], k[3][1])
# Parameter: SegmentType, EditingType, X1,Y1, X2,Y2, X3,Y3
# SegmentType: 0=Line, 1=Curve
# EditingType: 0=Auto, 1=Corner (keine Verbindungspunkte), 2=Smooth, 3=Symmetric --> Zweck?
return ffb.ConvertToShape() | [
"def",
"shapeAusKurvenSegmentListe",
"(",
"slide",
",",
"kurvenSegmentListe",
")",
":",
"# erster Punkt",
"P",
"=",
"kurvenSegmentListe",
"[",
"0",
"]",
"[",
"0",
"]",
"[",
"0",
"]",
"ffb",
"=",
"slide",
".",
"Shapes",
".",
"BuildFreeform",
"(",
"1",
",",
"P",
"[",
"0",
"]",
",",
"P",
"[",
"1",
"]",
")",
"for",
"segment",
"in",
"kurvenSegmentListe",
":",
"for",
"k",
"in",
"segment",
":",
"# von den naechsten Beziers immer die naechsten Punkte angeben",
"ffb",
".",
"AddNodes",
"(",
"1",
",",
"3",
",",
"k",
"[",
"1",
"]",
"[",
"0",
"]",
",",
"k",
"[",
"1",
"]",
"[",
"1",
"]",
",",
"k",
"[",
"2",
"]",
"[",
"0",
"]",
",",
"k",
"[",
"2",
"]",
"[",
"1",
"]",
",",
"k",
"[",
"3",
"]",
"[",
"0",
"]",
",",
"k",
"[",
"3",
"]",
"[",
"1",
"]",
")",
"# Parameter: SegmentType, EditingType, X1,Y1, X2,Y2, X3,Y3",
"# SegmentType: 0=Line, 1=Curve",
"# EditingType: 0=Auto, 1=Corner (keine Verbindungspunkte), 2=Smooth, 3=Symmetric --> Zweck?",
"return",
"ffb",
".",
"ConvertToShape",
"(",
")"
] | [
117,
0
] | [
130,
31
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
HeistSystem.DB_delete_Data | (self) | return | Lösche nicht mehr benötigte Daten aus der Datenbank | Lösche nicht mehr benötigte Daten aus der Datenbank | def DB_delete_Data(self):
''' Lösche nicht mehr benötigte Daten aus der Datenbank '''
thisActionName = "DB_delete_Data"
# Eine GameID ist vorhanden
if not (self.GameID == 0):
# Daten der letzten 5 Spiele erhalten
gameHeistOldesID = int(self.GameID) - 5
# SQL vorbereiten
sql = "DELETE FROM game_heist_gamedata WHERE gameID <= ?"
val = (int(gameHeistOldesID), )
# SQL ausführen
try:
self.GameDB.execute(sql, val)
except:
self.Logger.WriteLog(
" FEHLER: Datenbank 'MultiRaffle' - {0}".format(thisActionName))
self.Logger.WriteLog(
" --- EXCEPTION: {0}".format(str(sys.exc_info())))
return | [
"def",
"DB_delete_Data",
"(",
"self",
")",
":",
"thisActionName",
"=",
"\"DB_delete_Data\"",
"# Eine GameID ist vorhanden\r",
"if",
"not",
"(",
"self",
".",
"GameID",
"==",
"0",
")",
":",
"# Daten der letzten 5 Spiele erhalten\r",
"gameHeistOldesID",
"=",
"int",
"(",
"self",
".",
"GameID",
")",
"-",
"5",
"# SQL vorbereiten\r",
"sql",
"=",
"\"DELETE FROM game_heist_gamedata WHERE gameID <= ?\"",
"val",
"=",
"(",
"int",
"(",
"gameHeistOldesID",
")",
",",
")",
"# SQL ausführen\r",
"try",
":",
"self",
".",
"GameDB",
".",
"execute",
"(",
"sql",
",",
"val",
")",
"except",
":",
"self",
".",
"Logger",
".",
"WriteLog",
"(",
"\" FEHLER: Datenbank 'MultiRaffle' - {0}\"",
".",
"format",
"(",
"thisActionName",
")",
")",
"self",
".",
"Logger",
".",
"WriteLog",
"(",
"\" --- EXCEPTION: {0}\"",
".",
"format",
"(",
"str",
"(",
"sys",
".",
"exc_info",
"(",
")",
")",
")",
")",
"return"
] | [
810,
4
] | [
834,
14
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
plotImage.mm2dots_Y | ( self, position ) | return int(round( self.dpmm * position + self.cax.y )) | Wandelt eine Y mm Angabe in die Pixel Positionen des image um.
Parameters
----------
position : int, float
Position in mm.
Returns
-------
int
Umgewandelte Position
| Wandelt eine Y mm Angabe in die Pixel Positionen des image um.
Parameters
----------
position : int, float
Position in mm. | def mm2dots_Y( self, position ):
"""Wandelt eine Y mm Angabe in die Pixel Positionen des image um.
Parameters
----------
position : int, float
Position in mm.
Returns
-------
int
Umgewandelte Position
"""
return int(round( self.dpmm * position + self.cax.y )) | [
"def",
"mm2dots_Y",
"(",
"self",
",",
"position",
")",
":",
"return",
"int",
"(",
"round",
"(",
"self",
".",
"dpmm",
"*",
"position",
"+",
"self",
".",
"cax",
".",
"y",
")",
")"
] | [
60,
4
] | [
74,
62
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
calculate_multiply | (values: dict, data: StepData) | Multipliziert gegebene Werte mit Werten, die in multiply_by stehen und rundet auf die gewünschte Nachkommastelle,
die unter decimal angegeben wird.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
| Multipliziert gegebene Werte mit Werten, die in multiply_by stehen und rundet auf die gewünschte Nachkommastelle,
die unter decimal angegeben wird. | def calculate_multiply(values: dict, data: StepData):
"""Multipliziert gegebene Werte mit Werten, die in multiply_by stehen und rundet auf die gewünschte Nachkommastelle,
die unter decimal angegeben wird.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
"""
_bi_calculate(values, data, operator.mul) | [
"def",
"calculate_multiply",
"(",
"values",
":",
"dict",
",",
"data",
":",
"StepData",
")",
":",
"_bi_calculate",
"(",
"values",
",",
"data",
",",
"operator",
".",
"mul",
")"
] | [
217,
0
] | [
224,
45
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
init_db | (coll_args, coll_translation, directory) | Initialisiere die Argumente aus der Ursprungsdatei | Initialisiere die Argumente aus der Ursprungsdatei | def init_db(coll_args, coll_translation, directory):
"""Initialisiere die Argumente aus der Ursprungsdatei"""
trans = []
with open(os.path.join(directory, 'args-me.json'), 'r') as f_in:
data = json.load(f_in)
arguments = data['arguments']
for i, arg in tqdm(enumerate(arguments)):
trans.append({
'_id': i,
'arg_id': arguments[i].pop('id')
})
arguments[i]['_id'] = i
print(f'Insert {len(arguments)} arguments into mongo db...')
coll_args.insert_many(arguments)
coll_translation.insert_many(trans) | [
"def",
"init_db",
"(",
"coll_args",
",",
"coll_translation",
",",
"directory",
")",
":",
"trans",
"=",
"[",
"]",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"directory",
",",
"'args-me.json'",
")",
",",
"'r'",
")",
"as",
"f_in",
":",
"data",
"=",
"json",
".",
"load",
"(",
"f_in",
")",
"arguments",
"=",
"data",
"[",
"'arguments'",
"]",
"for",
"i",
",",
"arg",
"in",
"tqdm",
"(",
"enumerate",
"(",
"arguments",
")",
")",
":",
"trans",
".",
"append",
"(",
"{",
"'_id'",
":",
"i",
",",
"'arg_id'",
":",
"arguments",
"[",
"i",
"]",
".",
"pop",
"(",
"'id'",
")",
"}",
")",
"arguments",
"[",
"i",
"]",
"[",
"'_id'",
"]",
"=",
"i",
"print",
"(",
"f'Insert {len(arguments)} arguments into mongo db...'",
")",
"coll_args",
".",
"insert_many",
"(",
"arguments",
")",
"coll_translation",
".",
"insert_many",
"(",
"trans",
")"
] | [
32,
0
] | [
49,
39
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
get_image_path | (path: str) | return get_resource_path(os.path.join(IMAGES_LOCATION, path)) | Erstellt einen absoluten Pfad zu der übergebenen Image-Ressource.
Erstellt den Pfad aus `RESOURCES_LOCATION` und dem übergebenen Pfad.
:param path: Pfad zur Ressource, relativ zum `resources`-Ordner.
:return: Absoluter Pfad zur übergebenen Ressource.
| Erstellt einen absoluten Pfad zu der übergebenen Image-Ressource. | def get_image_path(path: str):
"""Erstellt einen absoluten Pfad zu der übergebenen Image-Ressource.
Erstellt den Pfad aus `RESOURCES_LOCATION` und dem übergebenen Pfad.
:param path: Pfad zur Ressource, relativ zum `resources`-Ordner.
:return: Absoluter Pfad zur übergebenen Ressource.
"""
return get_resource_path(os.path.join(IMAGES_LOCATION, path)) | [
"def",
"get_image_path",
"(",
"path",
":",
"str",
")",
":",
"return",
"get_resource_path",
"(",
"os",
".",
"path",
".",
"join",
"(",
"IMAGES_LOCATION",
",",
"path",
")",
")"
] | [
75,
0
] | [
83,
65
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
DicomImage.getFieldDots | ( self, field=None ) | return dots | gibt die pixelangaben für die Feldgröße
berücksichtigt dabei die Kollimator Rotation
FIXME: alle Angaben auf das ISO Zentrum beziehen (auch bei crop Angaben)
Parameters
----------
field : dict
| gibt die pixelangaben für die Feldgröße
berücksichtigt dabei die Kollimator Rotation
FIXME: alle Angaben auf das ISO Zentrum beziehen (auch bei crop Angaben)
Parameters
----------
field : dict
| def getFieldDots( self, field=None ):
""" gibt die pixelangaben für die Feldgröße
berücksichtigt dabei die Kollimator Rotation
FIXME: alle Angaben auf das ISO Zentrum beziehen (auch bei crop Angaben)
Parameters
----------
field : dict
"""
if field:
d = {
"X1" : self.mm2dots_X( field["X1"] ),
"X2" : self.mm2dots_X( field["X2"] ),
"Y1" : self.mm2dots_Y( field["Y1"] ),
"Y2" : self.mm2dots_Y( field["Y2"] ),
"X" : self.mm2dots_X( field["X2"] ) - self.mm2dots_X( field["X1"] ),
"Y" : self.mm2dots_Y( field["Y2"] ) - self.mm2dots_Y( field["Y1"] ),
}
else:
d = {
"X1" : self.mm2dots_X( self.infos["X1"] ),
"X2" : self.mm2dots_X( self.infos["X2"] ),
"Y1" : self.mm2dots_Y( self.infos["Y1"] ),
"Y2" : self.mm2dots_Y( self.infos["Y2"] ),
"X" : self.mm2dots_X( self.infos["X2"] ) - self.mm2dots_X( self.infos["X1"] ),
"Y" : self.mm2dots_Y( self.infos["Y2"] ) - self.mm2dots_Y( self.infos["Y1"] ),
}
dots = copy.deepcopy( d )
if self.infos["collimator"] == 90:
dots["X1"] = d["Y1"]
dots["X2"] = d["Y2"]
dots["Y1"] = d["X1"]
dots["Y2"] = d["X2"]
dots["X"] = d["Y"]
dots["Y"] = d["X"]
elif self.infos["collimator"] == 180:
dots["X1"] = d["X2"]
dots["X2"] = d["X1"]
dots["Y1"] = d["Y2"]
dots["Y2"] = d["Y1"]
elif self.infos["collimator"] == 270:
dots["X1"] = d["Y1"]
dots["X2"] = d["Y2"]
dots["Y1"] = d["X1"]
dots["Y2"] = d["X2"]
dots["X"] = d["Y"]
dots["Y"] = d["X"]
#print(d, dots)
return dots | [
"def",
"getFieldDots",
"(",
"self",
",",
"field",
"=",
"None",
")",
":",
"if",
"field",
":",
"d",
"=",
"{",
"\"X1\"",
":",
"self",
".",
"mm2dots_X",
"(",
"field",
"[",
"\"X1\"",
"]",
")",
",",
"\"X2\"",
":",
"self",
".",
"mm2dots_X",
"(",
"field",
"[",
"\"X2\"",
"]",
")",
",",
"\"Y1\"",
":",
"self",
".",
"mm2dots_Y",
"(",
"field",
"[",
"\"Y1\"",
"]",
")",
",",
"\"Y2\"",
":",
"self",
".",
"mm2dots_Y",
"(",
"field",
"[",
"\"Y2\"",
"]",
")",
",",
"\"X\"",
":",
"self",
".",
"mm2dots_X",
"(",
"field",
"[",
"\"X2\"",
"]",
")",
"-",
"self",
".",
"mm2dots_X",
"(",
"field",
"[",
"\"X1\"",
"]",
")",
",",
"\"Y\"",
":",
"self",
".",
"mm2dots_Y",
"(",
"field",
"[",
"\"Y2\"",
"]",
")",
"-",
"self",
".",
"mm2dots_Y",
"(",
"field",
"[",
"\"Y1\"",
"]",
")",
",",
"}",
"else",
":",
"d",
"=",
"{",
"\"X1\"",
":",
"self",
".",
"mm2dots_X",
"(",
"self",
".",
"infos",
"[",
"\"X1\"",
"]",
")",
",",
"\"X2\"",
":",
"self",
".",
"mm2dots_X",
"(",
"self",
".",
"infos",
"[",
"\"X2\"",
"]",
")",
",",
"\"Y1\"",
":",
"self",
".",
"mm2dots_Y",
"(",
"self",
".",
"infos",
"[",
"\"Y1\"",
"]",
")",
",",
"\"Y2\"",
":",
"self",
".",
"mm2dots_Y",
"(",
"self",
".",
"infos",
"[",
"\"Y2\"",
"]",
")",
",",
"\"X\"",
":",
"self",
".",
"mm2dots_X",
"(",
"self",
".",
"infos",
"[",
"\"X2\"",
"]",
")",
"-",
"self",
".",
"mm2dots_X",
"(",
"self",
".",
"infos",
"[",
"\"X1\"",
"]",
")",
",",
"\"Y\"",
":",
"self",
".",
"mm2dots_Y",
"(",
"self",
".",
"infos",
"[",
"\"Y2\"",
"]",
")",
"-",
"self",
".",
"mm2dots_Y",
"(",
"self",
".",
"infos",
"[",
"\"Y1\"",
"]",
")",
",",
"}",
"dots",
"=",
"copy",
".",
"deepcopy",
"(",
"d",
")",
"if",
"self",
".",
"infos",
"[",
"\"collimator\"",
"]",
"==",
"90",
":",
"dots",
"[",
"\"X1\"",
"]",
"=",
"d",
"[",
"\"Y1\"",
"]",
"dots",
"[",
"\"X2\"",
"]",
"=",
"d",
"[",
"\"Y2\"",
"]",
"dots",
"[",
"\"Y1\"",
"]",
"=",
"d",
"[",
"\"X1\"",
"]",
"dots",
"[",
"\"Y2\"",
"]",
"=",
"d",
"[",
"\"X2\"",
"]",
"dots",
"[",
"\"X\"",
"]",
"=",
"d",
"[",
"\"Y\"",
"]",
"dots",
"[",
"\"Y\"",
"]",
"=",
"d",
"[",
"\"X\"",
"]",
"elif",
"self",
".",
"infos",
"[",
"\"collimator\"",
"]",
"==",
"180",
":",
"dots",
"[",
"\"X1\"",
"]",
"=",
"d",
"[",
"\"X2\"",
"]",
"dots",
"[",
"\"X2\"",
"]",
"=",
"d",
"[",
"\"X1\"",
"]",
"dots",
"[",
"\"Y1\"",
"]",
"=",
"d",
"[",
"\"Y2\"",
"]",
"dots",
"[",
"\"Y2\"",
"]",
"=",
"d",
"[",
"\"Y1\"",
"]",
"elif",
"self",
".",
"infos",
"[",
"\"collimator\"",
"]",
"==",
"270",
":",
"dots",
"[",
"\"X1\"",
"]",
"=",
"d",
"[",
"\"Y1\"",
"]",
"dots",
"[",
"\"X2\"",
"]",
"=",
"d",
"[",
"\"Y2\"",
"]",
"dots",
"[",
"\"Y1\"",
"]",
"=",
"d",
"[",
"\"X1\"",
"]",
"dots",
"[",
"\"Y2\"",
"]",
"=",
"d",
"[",
"\"X2\"",
"]",
"dots",
"[",
"\"X\"",
"]",
"=",
"d",
"[",
"\"Y\"",
"]",
"dots",
"[",
"\"Y\"",
"]",
"=",
"d",
"[",
"\"X\"",
"]",
"#print(d, dots)",
"return",
"dots"
] | [
363,
4
] | [
414,
19
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
spanne | (verkaufspreis, selbstkosten, tax=19) | return (nettopreis - selbstkosten) / nettopreis | Berechne die Gewinnspanne für einen Verkaufspreis und einen Selbstkostenanteil | Berechne die Gewinnspanne für einen Verkaufspreis und einen Selbstkostenanteil | def spanne(verkaufspreis, selbstkosten, tax=19):
"""Berechne die Gewinnspanne für einen Verkaufspreis und einen Selbstkostenanteil"""
verkaufspreis = decimal.Decimal(str(verkaufspreis))
selbstkosten = decimal.Decimal(str(selbstkosten))
nettopreis = netto(verkaufspreis, tax=tax)
return (nettopreis - selbstkosten) / nettopreis | [
"def",
"spanne",
"(",
"verkaufspreis",
",",
"selbstkosten",
",",
"tax",
"=",
"19",
")",
":",
"verkaufspreis",
"=",
"decimal",
".",
"Decimal",
"(",
"str",
"(",
"verkaufspreis",
")",
")",
"selbstkosten",
"=",
"decimal",
".",
"Decimal",
"(",
"str",
"(",
"selbstkosten",
")",
")",
"nettopreis",
"=",
"netto",
"(",
"verkaufspreis",
",",
"tax",
"=",
"tax",
")",
"return",
"(",
"nettopreis",
"-",
"selbstkosten",
")",
"/",
"nettopreis"
] | [
114,
0
] | [
120,
51
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
calculate_modulo | (values: dict, data: StepData) | Dividiert gegebene Werte durch Werte, die in divide_by stehen und gibt den Restwert zurück.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
| Dividiert gegebene Werte durch Werte, die in divide_by stehen und gibt den Restwert zurück. | def calculate_modulo(values: dict, data: StepData):
"""Dividiert gegebene Werte durch Werte, die in divide_by stehen und gibt den Restwert zurück.
:param values: Werte aus der JSON-Datei
:param data: Daten aus der API
"""
_bi_calculate(values, data, operator.mod) | [
"def",
"calculate_modulo",
"(",
"values",
":",
"dict",
",",
"data",
":",
"StepData",
")",
":",
"_bi_calculate",
"(",
"values",
",",
"data",
",",
"operator",
".",
"mod",
")"
] | [
258,
0
] | [
264,
45
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
ImpfterminService.terminsuche | (self) | return False, res.status_code | Es wird nach einen verfügbaren Termin in der gewünschten PLZ gesucht.
Ausgewählt wird der erstbeste Termin (!).
Zurückgegeben wird das Ergebnis der Abfrage und der Status-Code.
Bei Status-Code > 400 müssen die Cookies erneuert werden.
Beispiel für ein Termin-Paar:
[{
'slotId': 'slot-56817da7-3f46-4f97-9868-30a6ddabcdef',
'begin': 1616999901000,
'bsnr': '005221080'
}, {
'slotId': 'slot-d29f5c22-384c-4928-922a-30a6ddabcdef',
'begin': 1623999901000,
'bsnr': '005221080'
}]
:return: bool, status-code
| Es wird nach einen verfügbaren Termin in der gewünschten PLZ gesucht.
Ausgewählt wird der erstbeste Termin (!).
Zurückgegeben wird das Ergebnis der Abfrage und der Status-Code.
Bei Status-Code > 400 müssen die Cookies erneuert werden. | def terminsuche(self):
"""Es wird nach einen verfügbaren Termin in der gewünschten PLZ gesucht.
Ausgewählt wird der erstbeste Termin (!).
Zurückgegeben wird das Ergebnis der Abfrage und der Status-Code.
Bei Status-Code > 400 müssen die Cookies erneuert werden.
Beispiel für ein Termin-Paar:
[{
'slotId': 'slot-56817da7-3f46-4f97-9868-30a6ddabcdef',
'begin': 1616999901000,
'bsnr': '005221080'
}, {
'slotId': 'slot-d29f5c22-384c-4928-922a-30a6ddabcdef',
'begin': 1623999901000,
'bsnr': '005221080'
}]
:return: bool, status-code
"""
path = f"rest/suche/impfterminsuche?plz={self.plz}"
while True:
res = self.s.get(self.domain + path, timeout=15)
if not res.ok or 'Virtueller Warteraum des Impfterminservice' not in res.text:
break
self.log.info('Warteraum... zZz...')
time.sleep(30)
if res.ok:
res_json = res.json()
terminpaare = res_json.get("termine")
if terminpaare:
# Auswahl des erstbesten Terminpaares
self.terminpaar = choice(terminpaare)
self.log.success("Terminpaar gefunden!")
for num, termin in enumerate(self.terminpaar, 1):
ts = datetime.fromtimestamp(termin["begin"] / 1000).strftime(
'%d.%m.%Y um %H:%M Uhr')
self.log.success(f"{num}. Termin: {ts}")
return True, 200
else:
self.log.info("Keine Termine verfügbar")
else:
self.log.error("Terminpaare können nicht geladen werden")
return False, res.status_code | [
"def",
"terminsuche",
"(",
"self",
")",
":",
"path",
"=",
"f\"rest/suche/impfterminsuche?plz={self.plz}\"",
"while",
"True",
":",
"res",
"=",
"self",
".",
"s",
".",
"get",
"(",
"self",
".",
"domain",
"+",
"path",
",",
"timeout",
"=",
"15",
")",
"if",
"not",
"res",
".",
"ok",
"or",
"'Virtueller Warteraum des Impfterminservice'",
"not",
"in",
"res",
".",
"text",
":",
"break",
"self",
".",
"log",
".",
"info",
"(",
"'Warteraum... zZz...'",
")",
"time",
".",
"sleep",
"(",
"30",
")",
"if",
"res",
".",
"ok",
":",
"res_json",
"=",
"res",
".",
"json",
"(",
")",
"terminpaare",
"=",
"res_json",
".",
"get",
"(",
"\"termine\"",
")",
"if",
"terminpaare",
":",
"# Auswahl des erstbesten Terminpaares",
"self",
".",
"terminpaar",
"=",
"choice",
"(",
"terminpaare",
")",
"self",
".",
"log",
".",
"success",
"(",
"\"Terminpaar gefunden!\"",
")",
"for",
"num",
",",
"termin",
"in",
"enumerate",
"(",
"self",
".",
"terminpaar",
",",
"1",
")",
":",
"ts",
"=",
"datetime",
".",
"fromtimestamp",
"(",
"termin",
"[",
"\"begin\"",
"]",
"/",
"1000",
")",
".",
"strftime",
"(",
"'%d.%m.%Y um %H:%M Uhr'",
")",
"self",
".",
"log",
".",
"success",
"(",
"f\"{num}. Termin: {ts}\"",
")",
"return",
"True",
",",
"200",
"else",
":",
"self",
".",
"log",
".",
"info",
"(",
"\"Keine Termine verfügbar\")",
"",
"else",
":",
"self",
".",
"log",
".",
"error",
"(",
"\"Terminpaare können nicht geladen werden\")",
"",
"return",
"False",
",",
"res",
".",
"status_code"
] | [
264,
4
] | [
311,
37
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
Vierfelder_Tafel.__init__ | (self, **kwargs) | Die Angabe von drei Wahrscheinlichkeiten der Form
Vierfeldertafel(a1 = 0.3, a1b1 = 0.15, b1 = 0.5)
führen auf eine vollständig bestimmte Vierfeldertafel.
Alle Wahrscheinlichkeiten müssen zwischen 0 und 1 liegen:
0 < a1,a1b1,b1 < 1
Zusätzlich müssen alle Wahrscheinlichkeiten der
berechneten Vierfeldertafel zwischen 0 und 1 liegen.
| Die Angabe von drei Wahrscheinlichkeiten der Form
Vierfeldertafel(a1 = 0.3, a1b1 = 0.15, b1 = 0.5)
führen auf eine vollständig bestimmte Vierfeldertafel.
Alle Wahrscheinlichkeiten müssen zwischen 0 und 1 liegen:
0 < a1,a1b1,b1 < 1
Zusätzlich müssen alle Wahrscheinlichkeiten der
berechneten Vierfeldertafel zwischen 0 und 1 liegen.
| def __init__(self, **kwargs):
''' Die Angabe von drei Wahrscheinlichkeiten der Form
Vierfeldertafel(a1 = 0.3, a1b1 = 0.15, b1 = 0.5)
führen auf eine vollständig bestimmte Vierfeldertafel.
Alle Wahrscheinlichkeiten müssen zwischen 0 und 1 liegen:
0 < a1,a1b1,b1 < 1
Zusätzlich müssen alle Wahrscheinlichkeiten der
berechneten Vierfeldertafel zwischen 0 und 1 liegen.
'''
# die gegebenen Wahrscheinlichkeiten als Gleichungen formulieren
eqns = [Eq(getattr(self,k),v) for k,v in kwargs.items()]
# das Gleichungssystem der Vierfeldertafel lösen
self.lsg = solve(eqns + self.base_eqns) | [
"def",
"__init__",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"# die gegebenen Wahrscheinlichkeiten als Gleichungen formulieren",
"eqns",
"=",
"[",
"Eq",
"(",
"getattr",
"(",
"self",
",",
"k",
")",
",",
"v",
")",
"for",
"k",
",",
"v",
"in",
"kwargs",
".",
"items",
"(",
")",
"]",
"# das Gleichungssystem der Vierfeldertafel lösen",
"self",
".",
"lsg",
"=",
"solve",
"(",
"eqns",
"+",
"self",
".",
"base_eqns",
")"
] | [
144,
4
] | [
163,
47
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
ImageTaggingCNN._residual_block | (self, input, num_of_kernel) | return Activation('relu')(x) | Erzeugt ein residual Block mit Identity Shortcut
(Conv2, Batchnorm., ReLU, Conv2, Batchnorm., Add, RELU)
:tensor|list input: Die Tensoren aus vorherigen Layer
:int num_of_kernel: Die Anzahl der Kernel (Filter)
| Erzeugt ein residual Block mit Identity Shortcut
(Conv2, Batchnorm., ReLU, Conv2, Batchnorm., Add, RELU) | def _residual_block(self, input, num_of_kernel):
""" Erzeugt ein residual Block mit Identity Shortcut
(Conv2, Batchnorm., ReLU, Conv2, Batchnorm., Add, RELU)
:tensor|list input: Die Tensoren aus vorherigen Layer
:int num_of_kernel: Die Anzahl der Kernel (Filter)
"""
x = Conv2D(
filters=num_of_kernel,
kernel_size=3,
kernel_initializer='VarianceScaling',
padding='same')(input)
x = BatchNormalization(axis=1)(x)
x = Activation('relu')(x)
x = Conv2D(
filters=num_of_kernel,
kernel_size=3,
kernel_initializer='VarianceScaling',
padding='same')(x)
x = BatchNormalization(axis=1)(x)
x = Add()([x, input])
return Activation('relu')(x) | [
"def",
"_residual_block",
"(",
"self",
",",
"input",
",",
"num_of_kernel",
")",
":",
"x",
"=",
"Conv2D",
"(",
"filters",
"=",
"num_of_kernel",
",",
"kernel_size",
"=",
"3",
",",
"kernel_initializer",
"=",
"'VarianceScaling'",
",",
"padding",
"=",
"'same'",
")",
"(",
"input",
")",
"x",
"=",
"BatchNormalization",
"(",
"axis",
"=",
"1",
")",
"(",
"x",
")",
"x",
"=",
"Activation",
"(",
"'relu'",
")",
"(",
"x",
")",
"x",
"=",
"Conv2D",
"(",
"filters",
"=",
"num_of_kernel",
",",
"kernel_size",
"=",
"3",
",",
"kernel_initializer",
"=",
"'VarianceScaling'",
",",
"padding",
"=",
"'same'",
")",
"(",
"x",
")",
"x",
"=",
"BatchNormalization",
"(",
"axis",
"=",
"1",
")",
"(",
"x",
")",
"x",
"=",
"Add",
"(",
")",
"(",
"[",
"x",
",",
"input",
"]",
")",
"return",
"Activation",
"(",
"'relu'",
")",
"(",
"x",
")"
] | [
36,
4
] | [
57,
36
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
ispSAFRS.access_cls | (cls, key:str="") | return None | Versucht das mit key angegebene Model zu bestimmen.
Parameters
----------
key : str
Bezeichnung des gesuchten model.
Returns
-------
None|model
Das gefundene model oder None.
| Versucht das mit key angegebene Model zu bestimmen. | def access_cls(cls, key:str=""):
"""Versucht das mit key angegebene Model zu bestimmen.
Parameters
----------
key : str
Bezeichnung des gesuchten model.
Returns
-------
None|model
Das gefundene model oder None.
"""
if hasattr(cls, "_decl_class_registry") and key in cls._decl_class_registry:
return cls._decl_class_registry[key]
elif hasattr(cls, "metadata") and key in cls.metadata.tables: # pragma: no cover
return cls.metadata.tables[key]
else:
if key in sqlalchemy.__dict__:
return sqlalchemy.__dict__[key]
return None | [
"def",
"access_cls",
"(",
"cls",
",",
"key",
":",
"str",
"=",
"\"\"",
")",
":",
"if",
"hasattr",
"(",
"cls",
",",
"\"_decl_class_registry\"",
")",
"and",
"key",
"in",
"cls",
".",
"_decl_class_registry",
":",
"return",
"cls",
".",
"_decl_class_registry",
"[",
"key",
"]",
"elif",
"hasattr",
"(",
"cls",
",",
"\"metadata\"",
")",
"and",
"key",
"in",
"cls",
".",
"metadata",
".",
"tables",
":",
"# pragma: no cover",
"return",
"cls",
".",
"metadata",
".",
"tables",
"[",
"key",
"]",
"else",
":",
"if",
"key",
"in",
"sqlalchemy",
".",
"__dict__",
":",
"return",
"sqlalchemy",
".",
"__dict__",
"[",
"key",
"]",
"return",
"None"
] | [
656,
4
] | [
677,
19
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
PdfGenerator._getPandasFields | (self, df=None, fields:list=[]) | Parameter für Pandas Felder zusammenstellen.
Parameters
----------
df : pandas.DataFrame, optional
zu untersuchendes Dataframe. The default is None.
fields : list, optional
Liste von Feldern die verwendet werden sollen. The default is [].
Returns
-------
result : dict|None
Aufbau::
{
"names" : [],
"columns" : {},
"field_format" : {},
"table_styles" : []
}
| Parameter für Pandas Felder zusammenstellen. | def _getPandasFields(self, df=None, fields:list=[]):
"""Parameter für Pandas Felder zusammenstellen.
Parameters
----------
df : pandas.DataFrame, optional
zu untersuchendes Dataframe. The default is None.
fields : list, optional
Liste von Feldern die verwendet werden sollen. The default is [].
Returns
-------
result : dict|None
Aufbau::
{
"names" : [],
"columns" : {},
"field_format" : {},
"table_styles" : []
}
"""
result = {
"names" : [],
"columns" : {},
"field_format" : {},
"table_styles" : []
}
i = -1
for f in fields:
# nur wenn field angegeben wurde
if "field" in f and f["field"] in df.columns:
i += 1
result["names"].append( f["field"] )
if "label" in f:
result["columns"][ f["field"] ] = f["label"]
else:
result["columns"][ f["field"] ] = f["field"]
if "format" in f:
result["field_format"][ result["columns"][ f["field"] ] ] = f["format"]
if "style" in f:
result["table_styles"].append( {
'selector': '.col{:d}'.format(i),
'props' : f["style"]
} )
if i > 0:
return result
else:
return None | [
"def",
"_getPandasFields",
"(",
"self",
",",
"df",
"=",
"None",
",",
"fields",
":",
"list",
"=",
"[",
"]",
")",
":",
"result",
"=",
"{",
"\"names\"",
":",
"[",
"]",
",",
"\"columns\"",
":",
"{",
"}",
",",
"\"field_format\"",
":",
"{",
"}",
",",
"\"table_styles\"",
":",
"[",
"]",
"}",
"i",
"=",
"-",
"1",
"for",
"f",
"in",
"fields",
":",
"# nur wenn field angegeben wurde",
"if",
"\"field\"",
"in",
"f",
"and",
"f",
"[",
"\"field\"",
"]",
"in",
"df",
".",
"columns",
":",
"i",
"+=",
"1",
"result",
"[",
"\"names\"",
"]",
".",
"append",
"(",
"f",
"[",
"\"field\"",
"]",
")",
"if",
"\"label\"",
"in",
"f",
":",
"result",
"[",
"\"columns\"",
"]",
"[",
"f",
"[",
"\"field\"",
"]",
"]",
"=",
"f",
"[",
"\"label\"",
"]",
"else",
":",
"result",
"[",
"\"columns\"",
"]",
"[",
"f",
"[",
"\"field\"",
"]",
"]",
"=",
"f",
"[",
"\"field\"",
"]",
"if",
"\"format\"",
"in",
"f",
":",
"result",
"[",
"\"field_format\"",
"]",
"[",
"result",
"[",
"\"columns\"",
"]",
"[",
"f",
"[",
"\"field\"",
"]",
"]",
"]",
"=",
"f",
"[",
"\"format\"",
"]",
"if",
"\"style\"",
"in",
"f",
":",
"result",
"[",
"\"table_styles\"",
"]",
".",
"append",
"(",
"{",
"'selector'",
":",
"'.col{:d}'",
".",
"format",
"(",
"i",
")",
",",
"'props'",
":",
"f",
"[",
"\"style\"",
"]",
"}",
")",
"if",
"i",
">",
"0",
":",
"return",
"result",
"else",
":",
"return",
"None"
] | [
1145,
4
] | [
1198,
23
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
HeistSystem.WriteMessage_GamePayout | (self, payoutdata) | return | Ausgabe der Auszahlung der Gewinner im Chat | Ausgabe der Auszahlung der Gewinner im Chat | def WriteMessage_GamePayout(self, payoutdata):
''' Ausgabe der Auszahlung der Gewinner im Chat '''
thisActionName = "WriteMessage_GamePayout"
tempTextOutput = ""
pointsName = self.Parent.GetCurrencyName()
# Für jeden Datensatz ausführen
for userData in payoutdata:
# Text vorbereiten
tempTextOutput += str(userData["userDisplayName"]) + " (" + \
TransformLocale_Decimals(
userData["userPayout"]) + " " + pointsName + "), "
# Ersetze letztes Komma im String
textWinner = BotHelper.ReplaceFromRight(tempTextOutput, ", ", "", 1)
# Ersetze vorletztes Komma im String
textWinner = BotHelper.ReplaceFromRight(textWinner, ", ", " und ", 1)
# Ausgabe des Textes
self.chat_WriteTextMessage(textWinner)
return | [
"def",
"WriteMessage_GamePayout",
"(",
"self",
",",
"payoutdata",
")",
":",
"thisActionName",
"=",
"\"WriteMessage_GamePayout\"",
"tempTextOutput",
"=",
"\"\"",
"pointsName",
"=",
"self",
".",
"Parent",
".",
"GetCurrencyName",
"(",
")",
"# Für jeden Datensatz ausführen\r",
"for",
"userData",
"in",
"payoutdata",
":",
"# Text vorbereiten\r",
"tempTextOutput",
"+=",
"str",
"(",
"userData",
"[",
"\"userDisplayName\"",
"]",
")",
"+",
"\" (\"",
"+",
"TransformLocale_Decimals",
"(",
"userData",
"[",
"\"userPayout\"",
"]",
")",
"+",
"\" \"",
"+",
"pointsName",
"+",
"\"), \"",
"# Ersetze letztes Komma im String\r",
"textWinner",
"=",
"BotHelper",
".",
"ReplaceFromRight",
"(",
"tempTextOutput",
",",
"\", \"",
",",
"\"\"",
",",
"1",
")",
"# Ersetze vorletztes Komma im String\r",
"textWinner",
"=",
"BotHelper",
".",
"ReplaceFromRight",
"(",
"textWinner",
",",
"\", \"",
",",
"\" und \"",
",",
"1",
")",
"# Ausgabe des Textes\r",
"self",
".",
"chat_WriteTextMessage",
"(",
"textWinner",
")",
"return"
] | [
1378,
4
] | [
1400,
14
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
link | (values: dict, step_data: StepData) | Überprüft, welcher Typ der Video-Generierung vorliegt und ruft die passende Typ-Methode auf.
:param values: Werte aus der JSON-Datei
:param step_data: Daten aus der API
:return: Pfad zum Output-Video
:rtype: str
| Überprüft, welcher Typ der Video-Generierung vorliegt und ruft die passende Typ-Methode auf. | def link(values: dict, step_data: StepData):
"""Überprüft, welcher Typ der Video-Generierung vorliegt und ruft die passende Typ-Methode auf.
:param values: Werte aus der JSON-Datei
:param step_data: Daten aus der API
:return: Pfad zum Output-Video
:rtype: str
"""
out_images, out_audios, out_audio_l = [], [], []
attach_mode = step_data.get_config("attach_mode", "")
seq_func = get_type_func(values["sequence"], SEQUENCE_TYPES)
seq_func(values, step_data, out_images, out_audios, out_audio_l)
if step_data.get_config("attach", None) is not None and not attach_mode:
if not step_data.get_config("separate_rendering", False):
for item in step_data.get_config("attach", None):
pipeline = init_pipeline(step_data, step_data.data["_pipe_id"], item["steps"],
config=item.get("config", {}),
no_tmp_dir=True)
pipeline.start()
# Add images and audios from the pipeline
extend_out_config(pipeline.config["sequence"], out_images, out_audios, out_audio_l)
_generate(out_images, out_audios, out_audio_l, step_data, values)
else:
# Save and manipulate out path (to save video to tmp dir)
out_path = step_data.get_config("output_path")
step_data.data["_conf"]["output_path"] = get_relative_temp_resource_path("", step_data.data["_pipe_id"])
_generate(out_images, out_audios, out_audio_l, step_data, values)
# Resote out_path
step_data.data["_conf"]["output_path"] = out_path
sequence_out = [values["sequence"]]
for idx, item in enumerate(step_data.get_config("attach", None)):
pipeline = init_pipeline(step_data, uuid.uuid4().hex, item["steps"], idx, item.get("config", {}))
pipeline.start()
sequence_out.append(pipeline.config["sequence"])
_combine(sequence_out, step_data, values)
else:
if attach_mode == "combined":
values["sequence"] = {
"out_images": out_images,
"out_audios": out_audios,
"out_audio_l": out_audio_l
}
else:
_generate(out_images, out_audios, out_audio_l, step_data, values) | [
"def",
"link",
"(",
"values",
":",
"dict",
",",
"step_data",
":",
"StepData",
")",
":",
"out_images",
",",
"out_audios",
",",
"out_audio_l",
"=",
"[",
"]",
",",
"[",
"]",
",",
"[",
"]",
"attach_mode",
"=",
"step_data",
".",
"get_config",
"(",
"\"attach_mode\"",
",",
"\"\"",
")",
"seq_func",
"=",
"get_type_func",
"(",
"values",
"[",
"\"sequence\"",
"]",
",",
"SEQUENCE_TYPES",
")",
"seq_func",
"(",
"values",
",",
"step_data",
",",
"out_images",
",",
"out_audios",
",",
"out_audio_l",
")",
"if",
"step_data",
".",
"get_config",
"(",
"\"attach\"",
",",
"None",
")",
"is",
"not",
"None",
"and",
"not",
"attach_mode",
":",
"if",
"not",
"step_data",
".",
"get_config",
"(",
"\"separate_rendering\"",
",",
"False",
")",
":",
"for",
"item",
"in",
"step_data",
".",
"get_config",
"(",
"\"attach\"",
",",
"None",
")",
":",
"pipeline",
"=",
"init_pipeline",
"(",
"step_data",
",",
"step_data",
".",
"data",
"[",
"\"_pipe_id\"",
"]",
",",
"item",
"[",
"\"steps\"",
"]",
",",
"config",
"=",
"item",
".",
"get",
"(",
"\"config\"",
",",
"{",
"}",
")",
",",
"no_tmp_dir",
"=",
"True",
")",
"pipeline",
".",
"start",
"(",
")",
"# Add images and audios from the pipeline",
"extend_out_config",
"(",
"pipeline",
".",
"config",
"[",
"\"sequence\"",
"]",
",",
"out_images",
",",
"out_audios",
",",
"out_audio_l",
")",
"_generate",
"(",
"out_images",
",",
"out_audios",
",",
"out_audio_l",
",",
"step_data",
",",
"values",
")",
"else",
":",
"# Save and manipulate out path (to save video to tmp dir)",
"out_path",
"=",
"step_data",
".",
"get_config",
"(",
"\"output_path\"",
")",
"step_data",
".",
"data",
"[",
"\"_conf\"",
"]",
"[",
"\"output_path\"",
"]",
"=",
"get_relative_temp_resource_path",
"(",
"\"\"",
",",
"step_data",
".",
"data",
"[",
"\"_pipe_id\"",
"]",
")",
"_generate",
"(",
"out_images",
",",
"out_audios",
",",
"out_audio_l",
",",
"step_data",
",",
"values",
")",
"# Resote out_path",
"step_data",
".",
"data",
"[",
"\"_conf\"",
"]",
"[",
"\"output_path\"",
"]",
"=",
"out_path",
"sequence_out",
"=",
"[",
"values",
"[",
"\"sequence\"",
"]",
"]",
"for",
"idx",
",",
"item",
"in",
"enumerate",
"(",
"step_data",
".",
"get_config",
"(",
"\"attach\"",
",",
"None",
")",
")",
":",
"pipeline",
"=",
"init_pipeline",
"(",
"step_data",
",",
"uuid",
".",
"uuid4",
"(",
")",
".",
"hex",
",",
"item",
"[",
"\"steps\"",
"]",
",",
"idx",
",",
"item",
".",
"get",
"(",
"\"config\"",
",",
"{",
"}",
")",
")",
"pipeline",
".",
"start",
"(",
")",
"sequence_out",
".",
"append",
"(",
"pipeline",
".",
"config",
"[",
"\"sequence\"",
"]",
")",
"_combine",
"(",
"sequence_out",
",",
"step_data",
",",
"values",
")",
"else",
":",
"if",
"attach_mode",
"==",
"\"combined\"",
":",
"values",
"[",
"\"sequence\"",
"]",
"=",
"{",
"\"out_images\"",
":",
"out_images",
",",
"\"out_audios\"",
":",
"out_audios",
",",
"\"out_audio_l\"",
":",
"out_audio_l",
"}",
"else",
":",
"_generate",
"(",
"out_images",
",",
"out_audios",
",",
"out_audio_l",
",",
"step_data",
",",
"values",
")"
] | [
33,
0
] | [
87,
77
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
ispTest.test_webapp_db_tests_A | ( self ) | Api aufruf durchführen
GET /tests/
| Api aufruf durchführen
GET /tests/
| def test_webapp_db_tests_A( self ):
''' Api aufruf durchführen
GET /tests/
'''
# zuerst den zugriff testen und prüfen ob die tabelle 5 datensätze hat
#
response = self.app.get( "api/dbtests/", query_string={})
self.assertEqual(response.status_code, 200, "Api Status nicht 200")
self.assertEqual(
len(response.json["data"]), 5, "keine 5 Datensätze"
)
#
# einen Datensatz zusätzlich einfügen
#
response = self.app.post( "api/dbtests/", headers={'Content-Type': 'application/json'}, data=json.dumps({
"data" : {
"attributes": {
"string":"sechs", # Pflichtfeld
#"date":"2020-08-19",
"integer":6
},
"type":"dbtests"
}
}), follow_redirects=True)
self.assertEqual(response.status_code, 201, "Api Status nicht 201 (Created)")
self.assertEqual( response.json["data"]["id"], '6', "Datensatz id ist nicht 6")
# record merken
newRecord6 = response.json["data"]["attributes"]
id6 = response.json["data"]["id"]
link6 = response.json["data"]["links"]["self"]
#
# einen zweiten einfügen
#
response = self.app.post( "api/dbtests/", headers={'Content-Type': 'application/json'}, data=json.dumps({
"data" : {
"attributes": {
"string":"sieben", # Pflichtfeld
#"date":"2020-08-19",
"integer":7
},
"type":"dbtests"
}
}), follow_redirects=True)
self.assertEqual(response.status_code, 201, "Api Status nicht 201 (Created)")
self.assertEqual( response.json["data"]["id"], '7', "Datensatz id ist nicht 7")
# record merken
newRecord7 = response.json["data"]["attributes"]
id7 = response.json["data"]["id"]
link7 = response.json["data"]["links"]["self"]
#
# jetzt alle holen und prüfen
#
response = self.app.get( "api/dbtests/")
self.assertEqual(response.status_code, 200, "Api Status nicht 200")
self.assertEqual( len(response.json["data"]), 7, "Datensatzanzahl ist nicht 7")
id = response.json["data"][5]["id"] # zählung ab 0 (5 ist record 6)
record = response.json["data"][5]["attributes"]
link = response.json["data"][5]["links"]["self"]
self.assertEqual( id, id6, "Datensatz id=6 vom ersten stimmt nicht")
self.assertEqual( record, newRecord6, "Datensatz Inhalt vom ersten stimmt nicht")
#
# den siebten Datensatz über den angegebenen link holen
#
response = self.app.get( link7 )
self.assertEqual( response.json["data"]["id"], '7', "Datensatz Id Rückgabe ist nicht 7")
self.assertEqual( type(response.json["data"]), dict, "Datensatz data ist kein dict")
# Inhalt vergleichen
self.assertEqual( response.json["data"]["attributes"], newRecord7, "Datensatz Inhalt stimmt nicht")
#
# siebten Datensatz ändern - die id muss in body und path angegeben werden
#
response = self.app.patch( link7, headers={'Content-Type': 'application/json'}, data=json.dumps({
"data" : {
"attributes": {
# "date":"2020-08-19 00:00", # 2020-08-20, 00:00
"string":"changed",
},
"id": '7',
"type":"dbtests"
}
}), follow_redirects=True)
# 200 - Request fulfilled, document follows
self.assertEqual(response.status_code, 200, "Api Status nicht 200")
# Inhalt darf nicht mehr gleich sein
self.assertNotEqual( response.json["data"], newRecord7, "Datensatz Inhalt ist noch gleich")
#
# den zweiten Datensatz über den angegebenen link holen und Änderungen prüfen
#
response = self.app.get( link7 )
self.assertEqual(response.status_code, 200, "Api Status nicht 200")
self.assertEqual( response.json["data"]["attributes"]["string"], "changed", "Feldinhalt ist nicht changed")
# alle holen
response = self.app.get( "api/dbtests/")
self.assertEqual(response.status_code, 200, "Api Status nicht 200")
lastCount = len(response.json["data"] )
# Datensatz 6 und 7 löschen
response = self.app.delete( link6, headers={'Content-Type': 'application/json'} )
self.assertEqual(response.status_code, 204, "Api Status nicht 204")
# alle verbleibenden holen und Anzahl prüfen
response = self.app.get( "api/dbtests/")
self.assertEqual(response.status_code, 200, "Api Status nicht 200")
self.assertEqual(len(response.json["data"] ), lastCount - 1 , "Api Status nicht {}".format( lastCount - 1 ))
# jetzt noch 7 löschen
response = self.app.delete( link7, headers={'Content-Type': 'application/json'} )
self.assertEqual(response.status_code, 204, "Api Status nicht 204")
# nach dem löschen Anzahl prüfen
response = self.app.get( "api/dbtests/", query_string={})
self.assertEqual(response.status_code, 200, "Api Status nicht 200")
self.assertEqual(
len(response.json["data"]), 5, "keine 5 Datensätze nach dem löschen von 6 und 7"
)
# fehler bei falschem patch
response = self.app.patch( link7, headers={'Content-Type': 'application/json'}, data=json.dumps({
"data" : {
"attributes": {
"string_gibtsnicht":"changed",
},
"id": '99',
"type":"dbtests"
}
}), follow_redirects=True)
self.assertEqual(response.status_code, 500, "Api Status nicht 500")
self.assertEqual(
response.json["App-Error"],
[{'message': 'patch - unbekannter Fehler', 'info': '500'}],
"fehler bei falschem patch"
) | [
"def",
"test_webapp_db_tests_A",
"(",
"self",
")",
":",
"# zuerst den zugriff testen und prüfen ob die tabelle 5 datensätze hat",
"#",
"response",
"=",
"self",
".",
"app",
".",
"get",
"(",
"\"api/dbtests/\"",
",",
"query_string",
"=",
"{",
"}",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"200",
",",
"\"Api Status nicht 200\"",
")",
"self",
".",
"assertEqual",
"(",
"len",
"(",
"response",
".",
"json",
"[",
"\"data\"",
"]",
")",
",",
"5",
",",
"\"keine 5 Datensätze\"",
")",
"#",
"# einen Datensatz zusätzlich einfügen",
"#",
"response",
"=",
"self",
".",
"app",
".",
"post",
"(",
"\"api/dbtests/\"",
",",
"headers",
"=",
"{",
"'Content-Type'",
":",
"'application/json'",
"}",
",",
"data",
"=",
"json",
".",
"dumps",
"(",
"{",
"\"data\"",
":",
"{",
"\"attributes\"",
":",
"{",
"\"string\"",
":",
"\"sechs\"",
",",
"# Pflichtfeld",
"#\"date\":\"2020-08-19\", ",
"\"integer\"",
":",
"6",
"}",
",",
"\"type\"",
":",
"\"dbtests\"",
"}",
"}",
")",
",",
"follow_redirects",
"=",
"True",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"201",
",",
"\"Api Status nicht 201 (Created)\"",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"json",
"[",
"\"data\"",
"]",
"[",
"\"id\"",
"]",
",",
"'6'",
",",
"\"Datensatz id ist nicht 6\"",
")",
"# record merken",
"newRecord6",
"=",
"response",
".",
"json",
"[",
"\"data\"",
"]",
"[",
"\"attributes\"",
"]",
"id6",
"=",
"response",
".",
"json",
"[",
"\"data\"",
"]",
"[",
"\"id\"",
"]",
"link6",
"=",
"response",
".",
"json",
"[",
"\"data\"",
"]",
"[",
"\"links\"",
"]",
"[",
"\"self\"",
"]",
"#",
"# einen zweiten einfügen",
"#",
"response",
"=",
"self",
".",
"app",
".",
"post",
"(",
"\"api/dbtests/\"",
",",
"headers",
"=",
"{",
"'Content-Type'",
":",
"'application/json'",
"}",
",",
"data",
"=",
"json",
".",
"dumps",
"(",
"{",
"\"data\"",
":",
"{",
"\"attributes\"",
":",
"{",
"\"string\"",
":",
"\"sieben\"",
",",
"# Pflichtfeld",
"#\"date\":\"2020-08-19\", ",
"\"integer\"",
":",
"7",
"}",
",",
"\"type\"",
":",
"\"dbtests\"",
"}",
"}",
")",
",",
"follow_redirects",
"=",
"True",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"201",
",",
"\"Api Status nicht 201 (Created)\"",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"json",
"[",
"\"data\"",
"]",
"[",
"\"id\"",
"]",
",",
"'7'",
",",
"\"Datensatz id ist nicht 7\"",
")",
"# record merken",
"newRecord7",
"=",
"response",
".",
"json",
"[",
"\"data\"",
"]",
"[",
"\"attributes\"",
"]",
"id7",
"=",
"response",
".",
"json",
"[",
"\"data\"",
"]",
"[",
"\"id\"",
"]",
"link7",
"=",
"response",
".",
"json",
"[",
"\"data\"",
"]",
"[",
"\"links\"",
"]",
"[",
"\"self\"",
"]",
"#",
"# jetzt alle holen und prüfen ",
"#",
"response",
"=",
"self",
".",
"app",
".",
"get",
"(",
"\"api/dbtests/\"",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"200",
",",
"\"Api Status nicht 200\"",
")",
"self",
".",
"assertEqual",
"(",
"len",
"(",
"response",
".",
"json",
"[",
"\"data\"",
"]",
")",
",",
"7",
",",
"\"Datensatzanzahl ist nicht 7\"",
")",
"id",
"=",
"response",
".",
"json",
"[",
"\"data\"",
"]",
"[",
"5",
"]",
"[",
"\"id\"",
"]",
"# zählung ab 0 (5 ist record 6)",
"record",
"=",
"response",
".",
"json",
"[",
"\"data\"",
"]",
"[",
"5",
"]",
"[",
"\"attributes\"",
"]",
"link",
"=",
"response",
".",
"json",
"[",
"\"data\"",
"]",
"[",
"5",
"]",
"[",
"\"links\"",
"]",
"[",
"\"self\"",
"]",
"self",
".",
"assertEqual",
"(",
"id",
",",
"id6",
",",
"\"Datensatz id=6 vom ersten stimmt nicht\"",
")",
"self",
".",
"assertEqual",
"(",
"record",
",",
"newRecord6",
",",
"\"Datensatz Inhalt vom ersten stimmt nicht\"",
")",
"#",
"# den siebten Datensatz über den angegebenen link holen",
"#",
"response",
"=",
"self",
".",
"app",
".",
"get",
"(",
"link7",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"json",
"[",
"\"data\"",
"]",
"[",
"\"id\"",
"]",
",",
"'7'",
",",
"\"Datensatz Id Rückgabe ist nicht 7\")",
"",
"self",
".",
"assertEqual",
"(",
"type",
"(",
"response",
".",
"json",
"[",
"\"data\"",
"]",
")",
",",
"dict",
",",
"\"Datensatz data ist kein dict\"",
")",
"# Inhalt vergleichen",
"self",
".",
"assertEqual",
"(",
"response",
".",
"json",
"[",
"\"data\"",
"]",
"[",
"\"attributes\"",
"]",
",",
"newRecord7",
",",
"\"Datensatz Inhalt stimmt nicht\"",
")",
"#",
"# siebten Datensatz ändern - die id muss in body und path angegeben werden",
"#",
"response",
"=",
"self",
".",
"app",
".",
"patch",
"(",
"link7",
",",
"headers",
"=",
"{",
"'Content-Type'",
":",
"'application/json'",
"}",
",",
"data",
"=",
"json",
".",
"dumps",
"(",
"{",
"\"data\"",
":",
"{",
"\"attributes\"",
":",
"{",
"# \"date\":\"2020-08-19 00:00\", # 2020-08-20, 00:00",
"\"string\"",
":",
"\"changed\"",
",",
"}",
",",
"\"id\"",
":",
"'7'",
",",
"\"type\"",
":",
"\"dbtests\"",
"}",
"}",
")",
",",
"follow_redirects",
"=",
"True",
")",
"# 200 - Request fulfilled, document follows",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"200",
",",
"\"Api Status nicht 200\"",
")",
"# Inhalt darf nicht mehr gleich sein",
"self",
".",
"assertNotEqual",
"(",
"response",
".",
"json",
"[",
"\"data\"",
"]",
",",
"newRecord7",
",",
"\"Datensatz Inhalt ist noch gleich\"",
")",
"#",
"# den zweiten Datensatz über den angegebenen link holen und Änderungen prüfen",
"#",
"response",
"=",
"self",
".",
"app",
".",
"get",
"(",
"link7",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"200",
",",
"\"Api Status nicht 200\"",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"json",
"[",
"\"data\"",
"]",
"[",
"\"attributes\"",
"]",
"[",
"\"string\"",
"]",
",",
"\"changed\"",
",",
"\"Feldinhalt ist nicht changed\"",
")",
"# alle holen",
"response",
"=",
"self",
".",
"app",
".",
"get",
"(",
"\"api/dbtests/\"",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"200",
",",
"\"Api Status nicht 200\"",
")",
"lastCount",
"=",
"len",
"(",
"response",
".",
"json",
"[",
"\"data\"",
"]",
")",
"# Datensatz 6 und 7 löschen",
"response",
"=",
"self",
".",
"app",
".",
"delete",
"(",
"link6",
",",
"headers",
"=",
"{",
"'Content-Type'",
":",
"'application/json'",
"}",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"204",
",",
"\"Api Status nicht 204\"",
")",
"# alle verbleibenden holen und Anzahl prüfen",
"response",
"=",
"self",
".",
"app",
".",
"get",
"(",
"\"api/dbtests/\"",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"200",
",",
"\"Api Status nicht 200\"",
")",
"self",
".",
"assertEqual",
"(",
"len",
"(",
"response",
".",
"json",
"[",
"\"data\"",
"]",
")",
",",
"lastCount",
"-",
"1",
",",
"\"Api Status nicht {}\"",
".",
"format",
"(",
"lastCount",
"-",
"1",
")",
")",
"# jetzt noch 7 löschen",
"response",
"=",
"self",
".",
"app",
".",
"delete",
"(",
"link7",
",",
"headers",
"=",
"{",
"'Content-Type'",
":",
"'application/json'",
"}",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"204",
",",
"\"Api Status nicht 204\"",
")",
"# nach dem löschen Anzahl prüfen",
"response",
"=",
"self",
".",
"app",
".",
"get",
"(",
"\"api/dbtests/\"",
",",
"query_string",
"=",
"{",
"}",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"200",
",",
"\"Api Status nicht 200\"",
")",
"self",
".",
"assertEqual",
"(",
"len",
"(",
"response",
".",
"json",
"[",
"\"data\"",
"]",
")",
",",
"5",
",",
"\"keine 5 Datensätze nach dem löschen von 6 und 7\"",
")",
"# fehler bei falschem patch ",
"response",
"=",
"self",
".",
"app",
".",
"patch",
"(",
"link7",
",",
"headers",
"=",
"{",
"'Content-Type'",
":",
"'application/json'",
"}",
",",
"data",
"=",
"json",
".",
"dumps",
"(",
"{",
"\"data\"",
":",
"{",
"\"attributes\"",
":",
"{",
"\"string_gibtsnicht\"",
":",
"\"changed\"",
",",
"}",
",",
"\"id\"",
":",
"'99'",
",",
"\"type\"",
":",
"\"dbtests\"",
"}",
"}",
")",
",",
"follow_redirects",
"=",
"True",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"500",
",",
"\"Api Status nicht 500\"",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"json",
"[",
"\"App-Error\"",
"]",
",",
"[",
"{",
"'message'",
":",
"'patch - unbekannter Fehler'",
",",
"'info'",
":",
"'500'",
"}",
"]",
",",
"\"fehler bei falschem patch\"",
")"
] | [
1781,
4
] | [
1932,
9
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
example.test_get_type | (self) | Nicht sicher | Nicht sicher | def test_get_type(self): # test the dtypes method
obj = example()
obj.data = pd.util.testing.makeDataFrame()
self.assertEqual(obj.data.dtypes, obj.get_type())
""" Nicht sicher""" | [
"def",
"test_get_type",
"(",
"self",
")",
":",
"# test the dtypes method",
"obj",
"=",
"example",
"(",
")",
"obj",
".",
"data",
"=",
"pd",
".",
"util",
".",
"testing",
".",
"makeDataFrame",
"(",
")",
"self",
".",
"assertEqual",
"(",
"obj",
".",
"data",
".",
"dtypes",
",",
"obj",
".",
"get_type",
"(",
")",
")"
] | [
42,
4
] | [
46,
27
] | null | python | de | ['de', 'de', 'de'] | False | true | null |
|
open_resource | (path: str, mode: str = "rt") | return open(res_path, mode, encoding='utf-8') | Öffnet die übergebene Ressource.
Verwendet :func:`get_resource_path`, um den Pfad der Ressource zu bekommen.
Ist die Datei oder darüber liegende Ordner nicht vorhanden, werden diese erstellt.
:param path: Pfad zur Resource, relativ zum `resources`-Ordner.
:param mode: Mode zum Öffnen der Datei siehe :func:`open`.
:return: Die geöffnete Datei (siehe :func:`open`).
:raises: OSError
| Öffnet die übergebene Ressource. | def open_resource(path: str, mode: str = "rt"):
"""Öffnet die übergebene Ressource.
Verwendet :func:`get_resource_path`, um den Pfad der Ressource zu bekommen.
Ist die Datei oder darüber liegende Ordner nicht vorhanden, werden diese erstellt.
:param path: Pfad zur Resource, relativ zum `resources`-Ordner.
:param mode: Mode zum Öffnen der Datei siehe :func:`open`.
:return: Die geöffnete Datei (siehe :func:`open`).
:raises: OSError
"""
res_path = get_resource_path(path)
os.makedirs(os.path.dirname(res_path), exist_ok=True)
return open(res_path, mode, encoding='utf-8') | [
"def",
"open_resource",
"(",
"path",
":",
"str",
",",
"mode",
":",
"str",
"=",
"\"rt\"",
")",
":",
"res_path",
"=",
"get_resource_path",
"(",
"path",
")",
"os",
".",
"makedirs",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"res_path",
")",
",",
"exist_ok",
"=",
"True",
")",
"return",
"open",
"(",
"res_path",
",",
"mode",
",",
"encoding",
"=",
"'utf-8'",
")"
] | [
230,
0
] | [
246,
49
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
init_train | (source_coll, destiny_coll, min_arg_length=25, max_args=-1) | Erstelle die Trainingsdaten-Komponente für CBOW und BM25 | Erstelle die Trainingsdaten-Komponente für CBOW und BM25 | def init_train(source_coll, destiny_coll, min_arg_length=25, max_args=-1):
"""Erstelle die Trainingsdaten-Komponente für CBOW und BM25"""
data = []
for i, arg in tqdm(enumerate(source_coll.find())):
if i == max_args:
break
nl_text = clean_to_nl(arg['premises'][0]['text'])
if len(nl_text.split()) >= min_arg_length:
data.append({
'_id': arg['_id'],
'text': clean_to_train(nl_text),
})
destiny_coll.insert_many(data) | [
"def",
"init_train",
"(",
"source_coll",
",",
"destiny_coll",
",",
"min_arg_length",
"=",
"25",
",",
"max_args",
"=",
"-",
"1",
")",
":",
"data",
"=",
"[",
"]",
"for",
"i",
",",
"arg",
"in",
"tqdm",
"(",
"enumerate",
"(",
"source_coll",
".",
"find",
"(",
")",
")",
")",
":",
"if",
"i",
"==",
"max_args",
":",
"break",
"nl_text",
"=",
"clean_to_nl",
"(",
"arg",
"[",
"'premises'",
"]",
"[",
"0",
"]",
"[",
"'text'",
"]",
")",
"if",
"len",
"(",
"nl_text",
".",
"split",
"(",
")",
")",
">=",
"min_arg_length",
":",
"data",
".",
"append",
"(",
"{",
"'_id'",
":",
"arg",
"[",
"'_id'",
"]",
",",
"'text'",
":",
"clean_to_train",
"(",
"nl_text",
")",
",",
"}",
")",
"destiny_coll",
".",
"insert_many",
"(",
"data",
")"
] | [
79,
0
] | [
94,
34
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
PdfGenerator._html | ( self, html:str="" ) | Internes anfügen von html im aktuellem pageContent.
Parameters
----------
html : str, optional
Anzufügender Inhalt. The default is "".
Returns
-------
None.
| Internes anfügen von html im aktuellem pageContent. | def _html( self, html:str="" ):
"""Internes anfügen von html im aktuellem pageContent.
Parameters
----------
html : str, optional
Anzufügender Inhalt. The default is "".
Returns
-------
None.
"""
if not self.contentName in self.pageContent:
self.pageContent[ self.contentName ] = ""
self.pageContent[ self.contentName ] += html | [
"def",
"_html",
"(",
"self",
",",
"html",
":",
"str",
"=",
"\"\"",
")",
":",
"if",
"not",
"self",
".",
"contentName",
"in",
"self",
".",
"pageContent",
":",
"self",
".",
"pageContent",
"[",
"self",
".",
"contentName",
"]",
"=",
"\"\"",
"self",
".",
"pageContent",
"[",
"self",
".",
"contentName",
"]",
"+=",
"html"
] | [
806,
4
] | [
822,
52
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
get_audio_path | (path: str) | return get_resource_path(os.path.join(AUDIO_LOCATION, path)) | Erstellt einen absoluten Pfad zu der übergebenen Audio-Ressource.
Erstellt den Pfad aus `RESOURCES_LOCATION` und dem übergebenen Pfad.
:param path: Pfad zur Ressource, relativ zum `resources`-Ordner.
:return: Absoluter Pfad zur übergebenen Ressource.
| Erstellt einen absoluten Pfad zu der übergebenen Audio-Ressource. | def get_audio_path(path: str):
"""Erstellt einen absoluten Pfad zu der übergebenen Audio-Ressource.
Erstellt den Pfad aus `RESOURCES_LOCATION` und dem übergebenen Pfad.
:param path: Pfad zur Ressource, relativ zum `resources`-Ordner.
:return: Absoluter Pfad zur übergebenen Ressource.
"""
return get_resource_path(os.path.join(AUDIO_LOCATION, path)) | [
"def",
"get_audio_path",
"(",
"path",
":",
"str",
")",
":",
"return",
"get_resource_path",
"(",
"os",
".",
"path",
".",
"join",
"(",
"AUDIO_LOCATION",
",",
"path",
")",
")"
] | [
86,
0
] | [
94,
64
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
suche | (cls, wort: str, limit: int = None) | return findall(r'data-rhyme="(\w+(?#Reimwort))"', content)[:abs(limit)] | Sucht nach Reimwörter des gesuchten Wortes
Args:
wort (str): Das Wort wonach Reime gesucht werden.
limit (int, optional): Ein Limit, wie groß die
die Liste mit gefundenen Reimwörtern sein
darf. Wenn None, dann gibt es keine Grenze.
Raises:
Exception: Fehler beim Laden der Website.
Returns:
List[str]: Eine Liste mit gefundenen Reimwörtern.
| Sucht nach Reimwörter des gesuchten Wortes | def suche(cls, wort: str, limit: int = None) -> List[str]:
"""Sucht nach Reimwörter des gesuchten Wortes
Args:
wort (str): Das Wort wonach Reime gesucht werden.
limit (int, optional): Ein Limit, wie groß die
die Liste mit gefundenen Reimwörtern sein
darf. Wenn None, dann gibt es keine Grenze.
Raises:
Exception: Fehler beim Laden der Website.
Returns:
List[str]: Eine Liste mit gefundenen Reimwörtern.
"""
if not isinstance(wort, str):
raise ValueError("WARNUNG: Paramter 'wort' ist kein String!")
if not isinstance(limit, int) and limit is not None:
raise ValueError("WARNUNG: Paramter 'limit' ist kein Integer!")
content = cls._get_html_code(wort.lower().replace("ß", "ss"))
if limit is None:
return findall(r'data-rhyme="(\w+(?#Reimwort))"', content)
return findall(r'data-rhyme="(\w+(?#Reimwort))"', content)[:abs(limit)] | [
"def",
"suche",
"(",
"cls",
",",
"wort",
":",
"str",
",",
"limit",
":",
"int",
"=",
"None",
")",
"->",
"List",
"[",
"str",
"]",
":",
"if",
"not",
"isinstance",
"(",
"wort",
",",
"str",
")",
":",
"raise",
"ValueError",
"(",
"\"WARNUNG: Paramter 'wort' ist kein String!\"",
")",
"if",
"not",
"isinstance",
"(",
"limit",
",",
"int",
")",
"and",
"limit",
"is",
"not",
"None",
":",
"raise",
"ValueError",
"(",
"\"WARNUNG: Paramter 'limit' ist kein Integer!\"",
")",
"content",
"=",
"cls",
".",
"_get_html_code",
"(",
"wort",
".",
"lower",
"(",
")",
".",
"replace",
"(",
"\"ß\",",
" ",
"ss\")",
")",
"",
"if",
"limit",
"is",
"None",
":",
"return",
"findall",
"(",
"r'data-rhyme=\"(\\w+(?#Reimwort))\"'",
",",
"content",
")",
"return",
"findall",
"(",
"r'data-rhyme=\"(\\w+(?#Reimwort))\"'",
",",
"content",
")",
"[",
":",
"abs",
"(",
"limit",
")",
"]"
] | [
20,
4
] | [
42,
79
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
HeistSystem.WriteMessage_GameCooldownOver | (self) | return | Vorbereiten der Cooldown-Abgelaufen-Nachricht zur Ausgabe in den Chat | Vorbereiten der Cooldown-Abgelaufen-Nachricht zur Ausgabe in den Chat | def WriteMessage_GameCooldownOver(self):
''' Vorbereiten der Cooldown-Abgelaufen-Nachricht zur Ausgabe in den Chat '''
thisActionName = "WriteMessage_GameCooldownOver"
# Benachrichtigung aus der Datenbank auslesen
messageText = self.RandomMessage_ByType(
messageType=self.MessageType_GameCooldownOver
)
# Nachricht in den Chat schreiben
self.chat_WriteTextMessage(
messageText=str(messageText).format(
command=self.Settings.Game_Command
)
)
return | [
"def",
"WriteMessage_GameCooldownOver",
"(",
"self",
")",
":",
"thisActionName",
"=",
"\"WriteMessage_GameCooldownOver\"",
"# Benachrichtigung aus der Datenbank auslesen\r",
"messageText",
"=",
"self",
".",
"RandomMessage_ByType",
"(",
"messageType",
"=",
"self",
".",
"MessageType_GameCooldownOver",
")",
"# Nachricht in den Chat schreiben\r",
"self",
".",
"chat_WriteTextMessage",
"(",
"messageText",
"=",
"str",
"(",
"messageText",
")",
".",
"format",
"(",
"command",
"=",
"self",
".",
"Settings",
".",
"Game_Command",
")",
")",
"return"
] | [
1116,
4
] | [
1132,
14
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
Monat.monatscode | (self) | return self.__monatscode | Der fuer die Wochentagsberechnung erforderliche Monatscode | Der fuer die Wochentagsberechnung erforderliche Monatscode | def monatscode(self) -> Monatscode:
"""Der fuer die Wochentagsberechnung erforderliche Monatscode"""
return self.__monatscode | [
"def",
"monatscode",
"(",
"self",
")",
"->",
"Monatscode",
":",
"return",
"self",
".",
"__monatscode"
] | [
65,
4
] | [
67,
32
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
ispTest.test_webapp_db_group | ( self ) | Api aufruf für relative Tabellen
# ohne group Angabe wird fields verwendet
/api/<modul>/groupby?fields[<modul>]=<feld1>
# mit group
/api/<modul>/groupby?fields[<modul>]=<feld1,feld2>&groups=<feld1,feld2>
# mit group und delimiter
/api/<modul>/groupby?fields[<modul>]=<feld1,feld2>&groups[<modul>]=<feld1,feld2>&delimiter=,
# mit Filter
/api/<modul>/groupby?fields[<modul>]=<feld1,feld2>&filter=eq(aktiv,true)
# mit labels
/api/<modul>/groupby?fields[<modul>]=<feld1,feld2>&labels={"dbtests.gruppe": "Hallo"}
| Api aufruf für relative Tabellen
# ohne group Angabe wird fields verwendet
/api/<modul>/groupby?fields[<modul>]=<feld1>
# mit group
/api/<modul>/groupby?fields[<modul>]=<feld1,feld2>&groups=<feld1,feld2>
# mit group und delimiter
/api/<modul>/groupby?fields[<modul>]=<feld1,feld2>&groups[<modul>]=<feld1,feld2>&delimiter=,
# mit Filter
/api/<modul>/groupby?fields[<modul>]=<feld1,feld2>&filter=eq(aktiv,true)
# mit labels
/api/<modul>/groupby?fields[<modul>]=<feld1,feld2>&labels={"dbtests.gruppe": "Hallo"}
| def test_webapp_db_group( self ):
''' Api aufruf für relative Tabellen
# ohne group Angabe wird fields verwendet
/api/<modul>/groupby?fields[<modul>]=<feld1>
# mit group
/api/<modul>/groupby?fields[<modul>]=<feld1,feld2>&groups=<feld1,feld2>
# mit group und delimiter
/api/<modul>/groupby?fields[<modul>]=<feld1,feld2>&groups[<modul>]=<feld1,feld2>&delimiter=,
# mit Filter
/api/<modul>/groupby?fields[<modul>]=<feld1,feld2>&filter=eq(aktiv,true)
# mit labels
/api/<modul>/groupby?fields[<modul>]=<feld1,feld2>&labels={"dbtests.gruppe": "Hallo"}
'''
# mit fields Angabe
response = self.app.get( "api/dbtests/groupby", query_string={
"fields[dbtests]":"gruppe"
})
self.assertEqual(response.status_code, 200, "Api Status nicht 200")
self.assertEqual( response.json["data"],[
{'attributes': {'hasChildren': 1, 'gruppe': 'A'}, 'id': None, 'type': 'dbtests'},
{'attributes': {'hasChildren': 2, 'gruppe': 'B'}, 'id': None, 'type': 'dbtests'},
{'attributes': {'hasChildren': 2, 'gruppe': 'C'}, 'id': None, 'type': 'dbtests'}
], "groupby mit fields Angabe Rückgabe fehlerhaft " )
# mit groups Angabe
response = self.app.get( "api/dbtests/groupby", query_string={
"groups":"gruppe"
})
self.assertEqual(response.status_code, 200, "Api Status nicht 200")
self.assertEqual( response.json["data"],[
{'attributes': {'hasChildren': 1, 'gruppe': 'A'}, 'id': None, 'type': 'dbtests'},
{'attributes': {'hasChildren': 2, 'gruppe': 'B'}, 'id': None, 'type': 'dbtests'},
{'attributes': {'hasChildren': 2, 'gruppe': 'C'}, 'id': None, 'type': 'dbtests'}
], "groupby mit groups Angabe Rückgabe fehlerhaft " )
# mit Filter und zwei Gruppierungs Feldern
response = self.app.get( "api/dbtests/groupby", query_string={
"groups[dbtests]":"gruppe,tags",
"filter":"eq(aktiv,true)"
})
self.assertEqual(response.status_code, 200, "Api Status nicht 200")
self.assertEqual( response.json["data"],[
{'attributes': {'gruppe': 'A', 'hasChildren': 1, 'tags': 'A,K'}, 'id': None, 'type': 'dbtests'},
{'attributes': {'gruppe': 'B', 'hasChildren': 1, 'tags': 'A,K'}, 'id': None, 'type': 'dbtests'},
{'attributes': {'gruppe': 'C', 'hasChildren': 1, 'tags': 'M,K'}, 'id': None, 'type': 'dbtests'}
], "groupby mit Filter und zwei Gruppierungs Feldern fehlerhaft " )
# mit delimiter
response = self.app.get( "api/dbtests/groupby", query_string={
"groups":"tags",
"delimiter": ","
})
self.assertEqual(response.status_code, 200, "Api Status nicht 200")
self.assertEqual( response.json["data"],[
{'attributes': {'tags': 'A'}},
{'attributes': {'tags': 'B'}},
{'attributes': {'tags': 'K'}},
{'attributes': {'tags': 'M'}}
], "groupby mit delimiter Rückgabe fehlerhaft " )
# groupby mit label testen
response = self.app.get( "api/dbtests/groupby", query_string={
"groups":"gruppe",
"labels": '{"dbtests.gruppe": "lGruppe"}'
})
self.assertEqual(response.status_code, 200, "Api Status nicht 200")
self.assertEqual( response.json["data"],
[
{'attributes': {'hasChildren': 1, 'lGruppe': 'A'}, 'id': None, 'type': 'dbtests'},
{'attributes': {'hasChildren': 2, 'lGruppe': 'B'}, 'id': None, 'type': 'dbtests'},
{'attributes': {'hasChildren': 2, 'lGruppe': 'C'}, 'id': None, 'type': 'dbtests'}
]
, "groupby mit label fehlerhaft " )
# groupby mit zweifachen label testen
response = self.app.get( "api/dbtests/groupby", query_string={
"groups":"gruppe",
"labels": '{"dbtests.gruppe": ["lGruppeA", "lGruppeB"]}'
})
self.assertEqual(response.status_code, 200, "Api Status nicht 200")
self.assertEqual( response.json["data"],
[
{'attributes': {'hasChildren': 1, 'lGruppeA': 'A', 'lGruppeB': 'A'}, 'id': None, 'type': 'dbtests'},
{'attributes': {'hasChildren': 2, 'lGruppeA': 'B', 'lGruppeB': 'B'}, 'id': None, 'type': 'dbtests'},
{'attributes': {'hasChildren': 2, 'lGruppeA': 'C', 'lGruppeB': 'C'}, 'id': None, 'type': 'dbtests'}
]
, "groupby mit label fehlerhaft " )
# groupby mit fields und label testen
response = self.app.get( "api/dbtests/groupby", query_string={
"fields[dbtests]":"gruppe",
"labels": '{"dbtests.gruppe": "lGruppe"}'
})
self.assertEqual(response.status_code, 200, "Api Status nicht 200")
self.assertEqual(response.json["data"],
[
{'attributes': {'lGruppe': 'A', 'hasChildren': 1}, 'id': None, 'type': 'dbtests'},
{'attributes': {'lGruppe': 'B', 'hasChildren': 2}, 'id': None, 'type': 'dbtests'},
{'attributes': {'lGruppe': 'C', 'hasChildren': 2}, 'id': None, 'type': 'dbtests'}
]
, "groupby mit fields und label fehlerhaft" )
# groupby mit fields und zweifachen label testen
response = self.app.get( "api/dbtests/groupby", query_string={
"fields[dbtests]":"gruppe",
"labels": '{"dbtests.gruppe": ["lGruppeA", "lGruppeB"]}'
})
self.assertEqual(response.status_code, 200, "Api Status nicht 200")
self.assertEqual( response.json["data"],
[
{'attributes': {'hasChildren': 1, 'lGruppeA': 'A', 'lGruppeB': 'A'}, 'id': None, 'type': 'dbtests'},
{'attributes': {'hasChildren': 2, 'lGruppeA': 'B', 'lGruppeB': 'B'}, 'id': None, 'type': 'dbtests'},
{'attributes': {'hasChildren': 2, 'lGruppeA': 'C', 'lGruppeB': 'C'}, 'id': None, 'type': 'dbtests'}
]
, "groupby mit fields und label fehlerhaft" )
# id als gruppe wird ausgefiltert
response = self.app.get( "api/dbtests/groupby", query_string={
"groups":"id"
})
self.assertEqual(response.status_code, 200, "Api Status nicht 200")
self.assertEqual( response.json["data"], [
{'attributes': {'hasChildren': 1}, 'id': 1, 'type': 'dbtests'},
{'attributes': {'hasChildren': 1}, 'id': 2, 'type': 'dbtests'},
{'attributes': {'hasChildren': 1}, 'id': 3, 'type': 'dbtests'},
{'attributes': {'hasChildren': 1}, 'id': 4, 'type': 'dbtests'},
{'attributes': {'hasChildren': 1}, 'id': 5, 'type': 'dbtests'}
] , "id als gruppe wird ausgefiltert" ) | [
"def",
"test_webapp_db_group",
"(",
"self",
")",
":",
"# mit fields Angabe",
"response",
"=",
"self",
".",
"app",
".",
"get",
"(",
"\"api/dbtests/groupby\"",
",",
"query_string",
"=",
"{",
"\"fields[dbtests]\"",
":",
"\"gruppe\"",
"}",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"200",
",",
"\"Api Status nicht 200\"",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"json",
"[",
"\"data\"",
"]",
",",
"[",
"{",
"'attributes'",
":",
"{",
"'hasChildren'",
":",
"1",
",",
"'gruppe'",
":",
"'A'",
"}",
",",
"'id'",
":",
"None",
",",
"'type'",
":",
"'dbtests'",
"}",
",",
"{",
"'attributes'",
":",
"{",
"'hasChildren'",
":",
"2",
",",
"'gruppe'",
":",
"'B'",
"}",
",",
"'id'",
":",
"None",
",",
"'type'",
":",
"'dbtests'",
"}",
",",
"{",
"'attributes'",
":",
"{",
"'hasChildren'",
":",
"2",
",",
"'gruppe'",
":",
"'C'",
"}",
",",
"'id'",
":",
"None",
",",
"'type'",
":",
"'dbtests'",
"}",
"]",
",",
"\"groupby mit fields Angabe Rückgabe fehlerhaft \" ",
"",
"# mit groups Angabe",
"response",
"=",
"self",
".",
"app",
".",
"get",
"(",
"\"api/dbtests/groupby\"",
",",
"query_string",
"=",
"{",
"\"groups\"",
":",
"\"gruppe\"",
"}",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"200",
",",
"\"Api Status nicht 200\"",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"json",
"[",
"\"data\"",
"]",
",",
"[",
"{",
"'attributes'",
":",
"{",
"'hasChildren'",
":",
"1",
",",
"'gruppe'",
":",
"'A'",
"}",
",",
"'id'",
":",
"None",
",",
"'type'",
":",
"'dbtests'",
"}",
",",
"{",
"'attributes'",
":",
"{",
"'hasChildren'",
":",
"2",
",",
"'gruppe'",
":",
"'B'",
"}",
",",
"'id'",
":",
"None",
",",
"'type'",
":",
"'dbtests'",
"}",
",",
"{",
"'attributes'",
":",
"{",
"'hasChildren'",
":",
"2",
",",
"'gruppe'",
":",
"'C'",
"}",
",",
"'id'",
":",
"None",
",",
"'type'",
":",
"'dbtests'",
"}",
"]",
",",
"\"groupby mit groups Angabe Rückgabe fehlerhaft \" ",
"",
"# mit Filter und zwei Gruppierungs Feldern",
"response",
"=",
"self",
".",
"app",
".",
"get",
"(",
"\"api/dbtests/groupby\"",
",",
"query_string",
"=",
"{",
"\"groups[dbtests]\"",
":",
"\"gruppe,tags\"",
",",
"\"filter\"",
":",
"\"eq(aktiv,true)\"",
"}",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"200",
",",
"\"Api Status nicht 200\"",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"json",
"[",
"\"data\"",
"]",
",",
"[",
"{",
"'attributes'",
":",
"{",
"'gruppe'",
":",
"'A'",
",",
"'hasChildren'",
":",
"1",
",",
"'tags'",
":",
"'A,K'",
"}",
",",
"'id'",
":",
"None",
",",
"'type'",
":",
"'dbtests'",
"}",
",",
"{",
"'attributes'",
":",
"{",
"'gruppe'",
":",
"'B'",
",",
"'hasChildren'",
":",
"1",
",",
"'tags'",
":",
"'A,K'",
"}",
",",
"'id'",
":",
"None",
",",
"'type'",
":",
"'dbtests'",
"}",
",",
"{",
"'attributes'",
":",
"{",
"'gruppe'",
":",
"'C'",
",",
"'hasChildren'",
":",
"1",
",",
"'tags'",
":",
"'M,K'",
"}",
",",
"'id'",
":",
"None",
",",
"'type'",
":",
"'dbtests'",
"}",
"]",
",",
"\"groupby mit Filter und zwei Gruppierungs Feldern fehlerhaft \"",
")",
"# mit delimiter",
"response",
"=",
"self",
".",
"app",
".",
"get",
"(",
"\"api/dbtests/groupby\"",
",",
"query_string",
"=",
"{",
"\"groups\"",
":",
"\"tags\"",
",",
"\"delimiter\"",
":",
"\",\"",
"}",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"200",
",",
"\"Api Status nicht 200\"",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"json",
"[",
"\"data\"",
"]",
",",
"[",
"{",
"'attributes'",
":",
"{",
"'tags'",
":",
"'A'",
"}",
"}",
",",
"{",
"'attributes'",
":",
"{",
"'tags'",
":",
"'B'",
"}",
"}",
",",
"{",
"'attributes'",
":",
"{",
"'tags'",
":",
"'K'",
"}",
"}",
",",
"{",
"'attributes'",
":",
"{",
"'tags'",
":",
"'M'",
"}",
"}",
"]",
",",
"\"groupby mit delimiter Rückgabe fehlerhaft \" ",
"",
"# groupby mit label testen",
"response",
"=",
"self",
".",
"app",
".",
"get",
"(",
"\"api/dbtests/groupby\"",
",",
"query_string",
"=",
"{",
"\"groups\"",
":",
"\"gruppe\"",
",",
"\"labels\"",
":",
"'{\"dbtests.gruppe\": \"lGruppe\"}'",
"}",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"200",
",",
"\"Api Status nicht 200\"",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"json",
"[",
"\"data\"",
"]",
",",
"[",
"{",
"'attributes'",
":",
"{",
"'hasChildren'",
":",
"1",
",",
"'lGruppe'",
":",
"'A'",
"}",
",",
"'id'",
":",
"None",
",",
"'type'",
":",
"'dbtests'",
"}",
",",
"{",
"'attributes'",
":",
"{",
"'hasChildren'",
":",
"2",
",",
"'lGruppe'",
":",
"'B'",
"}",
",",
"'id'",
":",
"None",
",",
"'type'",
":",
"'dbtests'",
"}",
",",
"{",
"'attributes'",
":",
"{",
"'hasChildren'",
":",
"2",
",",
"'lGruppe'",
":",
"'C'",
"}",
",",
"'id'",
":",
"None",
",",
"'type'",
":",
"'dbtests'",
"}",
"]",
",",
"\"groupby mit label fehlerhaft \"",
")",
"# groupby mit zweifachen label testen",
"response",
"=",
"self",
".",
"app",
".",
"get",
"(",
"\"api/dbtests/groupby\"",
",",
"query_string",
"=",
"{",
"\"groups\"",
":",
"\"gruppe\"",
",",
"\"labels\"",
":",
"'{\"dbtests.gruppe\": [\"lGruppeA\", \"lGruppeB\"]}'",
"}",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"200",
",",
"\"Api Status nicht 200\"",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"json",
"[",
"\"data\"",
"]",
",",
"[",
"{",
"'attributes'",
":",
"{",
"'hasChildren'",
":",
"1",
",",
"'lGruppeA'",
":",
"'A'",
",",
"'lGruppeB'",
":",
"'A'",
"}",
",",
"'id'",
":",
"None",
",",
"'type'",
":",
"'dbtests'",
"}",
",",
"{",
"'attributes'",
":",
"{",
"'hasChildren'",
":",
"2",
",",
"'lGruppeA'",
":",
"'B'",
",",
"'lGruppeB'",
":",
"'B'",
"}",
",",
"'id'",
":",
"None",
",",
"'type'",
":",
"'dbtests'",
"}",
",",
"{",
"'attributes'",
":",
"{",
"'hasChildren'",
":",
"2",
",",
"'lGruppeA'",
":",
"'C'",
",",
"'lGruppeB'",
":",
"'C'",
"}",
",",
"'id'",
":",
"None",
",",
"'type'",
":",
"'dbtests'",
"}",
"]",
",",
"\"groupby mit label fehlerhaft \"",
")",
"# groupby mit fields und label testen",
"response",
"=",
"self",
".",
"app",
".",
"get",
"(",
"\"api/dbtests/groupby\"",
",",
"query_string",
"=",
"{",
"\"fields[dbtests]\"",
":",
"\"gruppe\"",
",",
"\"labels\"",
":",
"'{\"dbtests.gruppe\": \"lGruppe\"}'",
"}",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"200",
",",
"\"Api Status nicht 200\"",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"json",
"[",
"\"data\"",
"]",
",",
"[",
"{",
"'attributes'",
":",
"{",
"'lGruppe'",
":",
"'A'",
",",
"'hasChildren'",
":",
"1",
"}",
",",
"'id'",
":",
"None",
",",
"'type'",
":",
"'dbtests'",
"}",
",",
"{",
"'attributes'",
":",
"{",
"'lGruppe'",
":",
"'B'",
",",
"'hasChildren'",
":",
"2",
"}",
",",
"'id'",
":",
"None",
",",
"'type'",
":",
"'dbtests'",
"}",
",",
"{",
"'attributes'",
":",
"{",
"'lGruppe'",
":",
"'C'",
",",
"'hasChildren'",
":",
"2",
"}",
",",
"'id'",
":",
"None",
",",
"'type'",
":",
"'dbtests'",
"}",
"]",
",",
"\"groupby mit fields und label fehlerhaft\"",
")",
"# groupby mit fields und zweifachen label testen",
"response",
"=",
"self",
".",
"app",
".",
"get",
"(",
"\"api/dbtests/groupby\"",
",",
"query_string",
"=",
"{",
"\"fields[dbtests]\"",
":",
"\"gruppe\"",
",",
"\"labels\"",
":",
"'{\"dbtests.gruppe\": [\"lGruppeA\", \"lGruppeB\"]}'",
"}",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"200",
",",
"\"Api Status nicht 200\"",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"json",
"[",
"\"data\"",
"]",
",",
"[",
"{",
"'attributes'",
":",
"{",
"'hasChildren'",
":",
"1",
",",
"'lGruppeA'",
":",
"'A'",
",",
"'lGruppeB'",
":",
"'A'",
"}",
",",
"'id'",
":",
"None",
",",
"'type'",
":",
"'dbtests'",
"}",
",",
"{",
"'attributes'",
":",
"{",
"'hasChildren'",
":",
"2",
",",
"'lGruppeA'",
":",
"'B'",
",",
"'lGruppeB'",
":",
"'B'",
"}",
",",
"'id'",
":",
"None",
",",
"'type'",
":",
"'dbtests'",
"}",
",",
"{",
"'attributes'",
":",
"{",
"'hasChildren'",
":",
"2",
",",
"'lGruppeA'",
":",
"'C'",
",",
"'lGruppeB'",
":",
"'C'",
"}",
",",
"'id'",
":",
"None",
",",
"'type'",
":",
"'dbtests'",
"}",
"]",
",",
"\"groupby mit fields und label fehlerhaft\"",
")",
"# id als gruppe wird ausgefiltert",
"response",
"=",
"self",
".",
"app",
".",
"get",
"(",
"\"api/dbtests/groupby\"",
",",
"query_string",
"=",
"{",
"\"groups\"",
":",
"\"id\"",
"}",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"status_code",
",",
"200",
",",
"\"Api Status nicht 200\"",
")",
"self",
".",
"assertEqual",
"(",
"response",
".",
"json",
"[",
"\"data\"",
"]",
",",
"[",
"{",
"'attributes'",
":",
"{",
"'hasChildren'",
":",
"1",
"}",
",",
"'id'",
":",
"1",
",",
"'type'",
":",
"'dbtests'",
"}",
",",
"{",
"'attributes'",
":",
"{",
"'hasChildren'",
":",
"1",
"}",
",",
"'id'",
":",
"2",
",",
"'type'",
":",
"'dbtests'",
"}",
",",
"{",
"'attributes'",
":",
"{",
"'hasChildren'",
":",
"1",
"}",
",",
"'id'",
":",
"3",
",",
"'type'",
":",
"'dbtests'",
"}",
",",
"{",
"'attributes'",
":",
"{",
"'hasChildren'",
":",
"1",
"}",
",",
"'id'",
":",
"4",
",",
"'type'",
":",
"'dbtests'",
"}",
",",
"{",
"'attributes'",
":",
"{",
"'hasChildren'",
":",
"1",
"}",
",",
"'id'",
":",
"5",
",",
"'type'",
":",
"'dbtests'",
"}",
"]",
",",
"\"id als gruppe wird ausgefiltert\"",
")"
] | [
2063,
4
] | [
2196,
51
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
|
RawData.write_SubGifterDataLog | (self, userDisplayName) | return | Zählt die Anzahl der geschenkten Subs eines Users und schreibt diese in eine Datei | Zählt die Anzahl der geschenkten Subs eines Users und schreibt diese in eine Datei | def write_SubGifterDataLog(self, userDisplayName):
''' Zählt die Anzahl der geschenkten Subs eines Users und schreibt diese in eine Datei'''
# Subgift kommt vom gleichen User
if self.LastSubGifterUserName == userDisplayName:
# Counter erhöhen
self.LastSubGifterCounter += 1
# Neuer Sub-Gifter
else:
# Letzten Subgifter in ein Logfile schreiben
self.write_LastSubgifterToDataLog(self)
# Neuer UserName und Counter zurücksetzen
self.LastSubGifterUserName = userDisplayName
self.LastSubGifterCounter = 1
# Daten nur Schreiben, wenn des Daten-Files-Verzeichnis angegeben wurde
if self.DataFilesPath:
try:
with codecs.open( self.LatestSubsGifterFile, encoding="utf-8", mode="w") as file:
file.write( str( "{0}".format( userDisplayName ) ) + os.linesep + str( "({0} verschenke Subs)".format( TransformLocale_Decimals( int(self.LastSubGifterCounter) ) ) ) )
file.close()
except:
pass
return | [
"def",
"write_SubGifterDataLog",
"(",
"self",
",",
"userDisplayName",
")",
":",
"# Subgift kommt vom gleichen User\r",
"if",
"self",
".",
"LastSubGifterUserName",
"==",
"userDisplayName",
":",
"# Counter erhöhen\r",
"self",
".",
"LastSubGifterCounter",
"+=",
"1",
"# Neuer Sub-Gifter\r",
"else",
":",
"# Letzten Subgifter in ein Logfile schreiben\r",
"self",
".",
"write_LastSubgifterToDataLog",
"(",
"self",
")",
"# Neuer UserName und Counter zurücksetzen\r",
"self",
".",
"LastSubGifterUserName",
"=",
"userDisplayName",
"self",
".",
"LastSubGifterCounter",
"=",
"1",
"# Daten nur Schreiben, wenn des Daten-Files-Verzeichnis angegeben wurde\r",
"if",
"self",
".",
"DataFilesPath",
":",
"try",
":",
"with",
"codecs",
".",
"open",
"(",
"self",
".",
"LatestSubsGifterFile",
",",
"encoding",
"=",
"\"utf-8\"",
",",
"mode",
"=",
"\"w\"",
")",
"as",
"file",
":",
"file",
".",
"write",
"(",
"str",
"(",
"\"{0}\"",
".",
"format",
"(",
"userDisplayName",
")",
")",
"+",
"os",
".",
"linesep",
"+",
"str",
"(",
"\"({0} verschenke Subs)\"",
".",
"format",
"(",
"TransformLocale_Decimals",
"(",
"int",
"(",
"self",
".",
"LastSubGifterCounter",
")",
")",
")",
")",
")",
"file",
".",
"close",
"(",
")",
"except",
":",
"pass",
"return"
] | [
144,
4
] | [
174,
14
] | null | python | de | ['de', 'de', 'de'] | True | true | null |
ProduitController. | null | permet de récupérer un produit avec son id | permet de récupérer un produit avec son id | Produit findById(int id){
Query query = em.createNamedQuery("Produit.findById");
query.setParameter("id", id);
@SuppressWarnings("unchecked")
List<Produit> results = query.getResultList();
if( !results.isEmpty()) {
return results.get(0);
}
return null;
} | [
"Produit",
"findById",
"(",
"int",
"id",
")",
"{",
"Query",
"query",
"=",
"em",
".",
"createNamedQuery",
"(",
"\"Produit.findById\"",
")",
";",
"query",
".",
"setParameter",
"(",
"\"id\"",
",",
"id",
")",
";",
"@",
"SuppressWarnings",
"(",
"\"unchecked\"",
")",
"List",
"<",
"Produit",
">",
"results",
"=",
"query",
".",
"getResultList",
"(",
")",
";",
"if",
"(",
"!",
"results",
".",
"isEmpty",
"(",
")",
")",
"{",
"return",
"results",
".",
"get",
"(",
"0",
")",
";",
"}",
"return",
"null",
";",
"}"
] | [
47,
1
] | [
56,
2
] | null | java | fr | ['fr', 'fr', 'fr'] | True | true | method_declaration |
|
PeriodCounterFactory. | null | ici c'est un mois flottant (ie une durée), et pas un mois entier | ici c'est un mois flottant (ie une durée), et pas un mois entier | Counter getMonthCounter() {
final Counter monthCounter = createMonthCounterAtDate(currentDayCounter.getStartDate());
addRequestsAndErrorsForRange(monthCounter, Period.MOIS.getRange());
return monthCounter;
} | [
"Counter",
"getMonthCounter",
"(",
")",
"{",
"final",
"Counter",
"monthCounter",
"=",
"createMonthCounterAtDate",
"(",
"currentDayCounter",
".",
"getStartDate",
"(",
")",
")",
";",
"addRequestsAndErrorsForRange",
"(",
"monthCounter",
",",
"Period",
".",
"MOIS",
".",
"getRange",
"(",
")",
")",
";",
"return",
"monthCounter",
";",
"}"
] | [
91,
1
] | [
95,
2
] | null | java | fr | ['fr', 'fr', 'fr'] | True | true | method_declaration |
|
fragmentDivtecInfBulletin. | null | Initialise toutes les textViews contenant les notes | Initialise toutes les textViews contenant les notes | private void initAllTxtView(View view) {
initTxtViewMath(view);
initTxtViewScience(view);
initTxtViewAnglais(view);
initTxtViewEconomie(view);
initTxtViewEduPhy(view);
initTxtViewModules(view);
initTxtViewMoyTot(view);
initTxtViewSociete(view);
initTxtViewLangueEtComm(view);
initTxtViewAtelier(view);
} | [
"private",
"void",
"initAllTxtView",
"(",
"View",
"view",
")",
"{",
"initTxtViewMath",
"(",
"view",
")",
";",
"initTxtViewScience",
"(",
"view",
")",
";",
"initTxtViewAnglais",
"(",
"view",
")",
";",
"initTxtViewEconomie",
"(",
"view",
")",
";",
"initTxtViewEduPhy",
"(",
"view",
")",
";",
"initTxtViewModules",
"(",
"view",
")",
";",
"initTxtViewMoyTot",
"(",
"view",
")",
";",
"initTxtViewSociete",
"(",
"view",
")",
";",
"initTxtViewLangueEtComm",
"(",
"view",
")",
";",
"initTxtViewAtelier",
"(",
"view",
")",
";",
"}"
] | [
250,
4
] | [
261,
5
] | null | java | fr | ['fr', 'fr', 'fr'] | True | true | method_declaration |
|
DicoTools. | null | permet de remplacer des key [VAR0],[VAR1],etc... par des values | permet de remplacer des key [VAR0],[VAR1],etc... par des values | public static String dico_jsp(Lang langue, String key, Object... value) {
String mydico = net.violet.common.StringShop.EMPTY_STRING;
try {
final String hash_key = langue + net.violet.common.StringShop.SLASH + key;
mydico = DicoTools.DICO_CACHE.get(hash_key);
if (mydico == null) {
synchronized (DicoTools.DICO_CACHE) {
mydico = DicoTools.DICO_CACHE.get(hash_key);
if (mydico == null) {
final Dico theDico = Factories.DICO.findByKeyAndLang(key, langue);
if (theDico != null) {
mydico = theDico.getDico_text();
DicoTools.DICO_CACHE.put(hash_key, mydico);
} else {
mydico = "NOLOC [" + key + "]";
//Trop de clé pourrite sur my.nabaztag.com
DicoTools.LOGGER.info("ERREUR lang = " + langue + " key = " + key);
}
}
}
}
} catch (final Throwable t) {
mydico = "NOLOCDB !";
DicoTools.LOGGER.fatal("ERREUR lang = " + langue + " key = " + key);
};
if (value != null) {
for (int i = 0; i < value.length; i++) {
final Object theValue = value[i];
mydico = mydico.replaceAll("\\[VAR" + i + "\\]", theValue != null ? String.valueOf(theValue) : net.violet.common.StringShop.EMPTY_STRING);
}
}
return mydico;
} | [
"public",
"static",
"String",
"dico_jsp",
"(",
"Lang",
"langue",
",",
"String",
"key",
",",
"Object",
"...",
"value",
")",
"{",
"String",
"mydico",
"=",
"net",
".",
"violet",
".",
"common",
".",
"StringShop",
".",
"EMPTY_STRING",
";",
"try",
"{",
"final",
"String",
"hash_key",
"=",
"langue",
"+",
"net",
".",
"violet",
".",
"common",
".",
"StringShop",
".",
"SLASH",
"+",
"key",
";",
"mydico",
"=",
"DicoTools",
".",
"DICO_CACHE",
".",
"get",
"(",
"hash_key",
")",
";",
"if",
"(",
"mydico",
"==",
"null",
")",
"{",
"synchronized",
"(",
"DicoTools",
".",
"DICO_CACHE",
")",
"{",
"mydico",
"=",
"DicoTools",
".",
"DICO_CACHE",
".",
"get",
"(",
"hash_key",
")",
";",
"if",
"(",
"mydico",
"==",
"null",
")",
"{",
"final",
"Dico",
"theDico",
"=",
"Factories",
".",
"DICO",
".",
"findByKeyAndLang",
"(",
"key",
",",
"langue",
")",
";",
"if",
"(",
"theDico",
"!=",
"null",
")",
"{",
"mydico",
"=",
"theDico",
".",
"getDico_text",
"(",
")",
";",
"DicoTools",
".",
"DICO_CACHE",
".",
"put",
"(",
"hash_key",
",",
"mydico",
")",
";",
"}",
"else",
"{",
"mydico",
"=",
"\"NOLOC [\"",
"+",
"key",
"+",
"\"]\"",
";",
"//Trop de clé pourrite sur my.nabaztag.com",
"DicoTools",
".",
"LOGGER",
".",
"info",
"(",
"\"ERREUR lang = \"",
"+",
"langue",
"+",
"\" key = \"",
"+",
"key",
")",
";",
"}",
"}",
"}",
"}",
"}",
"catch",
"(",
"final",
"Throwable",
"t",
")",
"{",
"mydico",
"=",
"\"NOLOCDB !\"",
";",
"DicoTools",
".",
"LOGGER",
".",
"fatal",
"(",
"\"ERREUR lang = \"",
"+",
"langue",
"+",
"\" key = \"",
"+",
"key",
")",
";",
"}",
";",
"if",
"(",
"value",
"!=",
"null",
")",
"{",
"for",
"(",
"int",
"i",
"=",
"0",
";",
"i",
"<",
"value",
".",
"length",
";",
"i",
"++",
")",
"{",
"final",
"Object",
"theValue",
"=",
"value",
"[",
"i",
"]",
";",
"mydico",
"=",
"mydico",
".",
"replaceAll",
"(",
"\"\\\\[VAR\"",
"+",
"i",
"+",
"\"\\\\]\"",
",",
"theValue",
"!=",
"null",
"?",
"String",
".",
"valueOf",
"(",
"theValue",
")",
":",
"net",
".",
"violet",
".",
"common",
".",
"StringShop",
".",
"EMPTY_STRING",
")",
";",
"}",
"}",
"return",
"mydico",
";",
"}"
] | [
55,
1
] | [
92,
2
] | null | java | fr | ['fr', 'fr', 'fr'] | True | true | method_declaration |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.