identifier
stringlengths
0
89
parameters
stringlengths
0
399
return_statement
stringlengths
0
982
docstring
stringlengths
10
3.04k
docstring_summary
stringlengths
0
3.04k
function
stringlengths
13
25.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
argument_list
null
language
stringclasses
3 values
docstring_language
stringclasses
4 values
docstring_language_predictions
stringclasses
4 values
is_langid_reliable
stringclasses
2 values
is_langid_extra_reliable
bool
1 class
type
stringclasses
9 values
UNet64_sigmoid_tanh
(input_shape)
return model
gleich wie UNet64_output_expansed, teilweise mit sigmoid und tanh statt relu.
gleich wie UNet64_output_expansed, teilweise mit sigmoid und tanh statt relu.
def UNet64_sigmoid_tanh(input_shape): """gleich wie UNet64_output_expansed, teilweise mit sigmoid und tanh statt relu.""" inputs = Input(shape=input_shape) conv01 = Conv2D(10, kernel_size=(3, 3), padding="same")(inputs) # 10 x 64x64 conv01 = Activation('tanh')(conv01) conv01_pool = MaxPooling2D((2, 2), strides=(2, 2))(conv01) # 10 x 32x32 print("0)", conv01_pool.shape, "10 x 32x32") conv02 = Conv2D(20, kernel_size=(3, 3), padding="same")(conv01_pool) # 20 x 32x32 conv02 = Activation('tanh')(conv02) conv02_pool = MaxPooling2D((2, 2), strides=(2, 2))(conv02) # 20 x 16x16 print("1)", conv02_pool.shape, "20 x 16x16") conv03 = Conv2D(20, kernel_size=(3, 3), padding="same")(conv02_pool) # 20 x 16x16 conv03 = Activation('tanh')(conv03) conv03_pool = MaxPooling2D((2, 2), strides=(2, 2))(conv03) # 20 x 8x8 print("2)", conv03_pool.shape, "20 x 8x8") conv04 = Conv2D(20, kernel_size=(3, 3), padding="same")(conv03_pool) # 20 x 8x8 conv04 = Activation('relu')(conv04) conv04_pool = MaxPooling2D((2, 2), strides=(2, 2))(conv04) # 20 x 4x4 print("3)", conv04_pool.shape, "20 x 4x4") ### UPSAMPLING: up04 = UpSampling2D((2, 2))(conv04_pool) # 20 x 8x8 up04 = concatenate([conv04, up04], axis=3) # 20+20 x 8x8 print("4)", up04.shape, "40 x 8x8") up03 = UpSampling2D((2, 2))(up04) # 40 x 16x16 up03 = concatenate([conv03, up03], axis=3) # 20+40 x 16x16 print("5)", up03.shape, "60 x 16x16") up02 = UpSampling2D((2, 2))(up03) # 60 x 32x32 up02 = concatenate([conv02, up02], axis=3) # 20+60 x 32x32 print("6)", up02.shape, "80 x 32x32") up01 = UpSampling2D((2, 2))(up02) # 80 x 64x64 up01 = concatenate([conv01, up01], axis=3) # 15+80 x 64x64 print("7)", up01.shape, "95 x 64x64") output = Conv2D(1, (3, 3), activation='relu', padding="same")(up01) # 1 x 64x64 # output = Activation('tanh')(output) print("8)", output.shape, "1 x 64x64") output = Flatten()(output) model = Model(inputs=inputs, outputs=output) model.compile(loss="mean_squared_error", optimizer='nadam') # ToDo: try Nesterov Adam optimizer (nadam) # http://proceedings.mlr.press/v28/sutskever13.pdf return model
[ "def", "UNet64_sigmoid_tanh", "(", "input_shape", ")", ":", "inputs", "=", "Input", "(", "shape", "=", "input_shape", ")", "conv01", "=", "Conv2D", "(", "10", ",", "kernel_size", "=", "(", "3", ",", "3", ")", ",", "padding", "=", "\"same\"", ")", "(", "inputs", ")", "# 10 x 64x64", "conv01", "=", "Activation", "(", "'tanh'", ")", "(", "conv01", ")", "conv01_pool", "=", "MaxPooling2D", "(", "(", "2", ",", "2", ")", ",", "strides", "=", "(", "2", ",", "2", ")", ")", "(", "conv01", ")", "# 10 x 32x32", "print", "(", "\"0)\"", ",", "conv01_pool", ".", "shape", ",", "\"10 x 32x32\"", ")", "conv02", "=", "Conv2D", "(", "20", ",", "kernel_size", "=", "(", "3", ",", "3", ")", ",", "padding", "=", "\"same\"", ")", "(", "conv01_pool", ")", "# 20 x 32x32", "conv02", "=", "Activation", "(", "'tanh'", ")", "(", "conv02", ")", "conv02_pool", "=", "MaxPooling2D", "(", "(", "2", ",", "2", ")", ",", "strides", "=", "(", "2", ",", "2", ")", ")", "(", "conv02", ")", "# 20 x 16x16", "print", "(", "\"1)\"", ",", "conv02_pool", ".", "shape", ",", "\"20 x 16x16\"", ")", "conv03", "=", "Conv2D", "(", "20", ",", "kernel_size", "=", "(", "3", ",", "3", ")", ",", "padding", "=", "\"same\"", ")", "(", "conv02_pool", ")", "# 20 x 16x16", "conv03", "=", "Activation", "(", "'tanh'", ")", "(", "conv03", ")", "conv03_pool", "=", "MaxPooling2D", "(", "(", "2", ",", "2", ")", ",", "strides", "=", "(", "2", ",", "2", ")", ")", "(", "conv03", ")", "# 20 x 8x8", "print", "(", "\"2)\"", ",", "conv03_pool", ".", "shape", ",", "\"20 x 8x8\"", ")", "conv04", "=", "Conv2D", "(", "20", ",", "kernel_size", "=", "(", "3", ",", "3", ")", ",", "padding", "=", "\"same\"", ")", "(", "conv03_pool", ")", "# 20 x 8x8", "conv04", "=", "Activation", "(", "'relu'", ")", "(", "conv04", ")", "conv04_pool", "=", "MaxPooling2D", "(", "(", "2", ",", "2", ")", ",", "strides", "=", "(", "2", ",", "2", ")", ")", "(", "conv04", ")", "# 20 x 4x4", "print", "(", "\"3)\"", ",", "conv04_pool", ".", "shape", ",", "\"20 x 4x4\"", ")", "### UPSAMPLING:", "up04", "=", "UpSampling2D", "(", "(", "2", ",", "2", ")", ")", "(", "conv04_pool", ")", "# 20 x 8x8", "up04", "=", "concatenate", "(", "[", "conv04", ",", "up04", "]", ",", "axis", "=", "3", ")", "# 20+20 x 8x8", "print", "(", "\"4)\"", ",", "up04", ".", "shape", ",", "\"40 x 8x8\"", ")", "up03", "=", "UpSampling2D", "(", "(", "2", ",", "2", ")", ")", "(", "up04", ")", "# 40 x 16x16", "up03", "=", "concatenate", "(", "[", "conv03", ",", "up03", "]", ",", "axis", "=", "3", ")", "# 20+40 x 16x16", "print", "(", "\"5)\"", ",", "up03", ".", "shape", ",", "\"60 x 16x16\"", ")", "up02", "=", "UpSampling2D", "(", "(", "2", ",", "2", ")", ")", "(", "up03", ")", "# 60 x 32x32", "up02", "=", "concatenate", "(", "[", "conv02", ",", "up02", "]", ",", "axis", "=", "3", ")", "# 20+60 x 32x32", "print", "(", "\"6)\"", ",", "up02", ".", "shape", ",", "\"80 x 32x32\"", ")", "up01", "=", "UpSampling2D", "(", "(", "2", ",", "2", ")", ")", "(", "up02", ")", "# 80 x 64x64", "up01", "=", "concatenate", "(", "[", "conv01", ",", "up01", "]", ",", "axis", "=", "3", ")", "# 15+80 x 64x64", "print", "(", "\"7)\"", ",", "up01", ".", "shape", ",", "\"95 x 64x64\"", ")", "output", "=", "Conv2D", "(", "1", ",", "(", "3", ",", "3", ")", ",", "activation", "=", "'relu'", ",", "padding", "=", "\"same\"", ")", "(", "up01", ")", "# 1 x 64x64", "# output = Activation('tanh')(output)", "print", "(", "\"8)\"", ",", "output", ".", "shape", ",", "\"1 x 64x64\"", ")", "output", "=", "Flatten", "(", ")", "(", "output", ")", "model", "=", "Model", "(", "inputs", "=", "inputs", ",", "outputs", "=", "output", ")", "model", ".", "compile", "(", "loss", "=", "\"mean_squared_error\"", ",", "optimizer", "=", "'nadam'", ")", "# ToDo: try Nesterov Adam optimizer (nadam)", "# http://proceedings.mlr.press/v28/sutskever13.pdf", "return", "model" ]
[ 294, 0 ]
[ 343, 16 ]
null
python
de
['de', 'de', 'de']
True
true
null
CoolDownHelper.AddUserCooldown
( self, scriptname, command, user, cooldownTime )
return
Cooldown für User setzen
Cooldown für User setzen
def AddUserCooldown( self, scriptname, command, user, cooldownTime ): '''Cooldown für User setzen''' thisActionName = "AddUserCooldown" UserDisplayName = self.Parent.GetDisplayName( user ) self.Parent.AddUserCooldown( scriptname, command.lower(), user, int(cooldownTime) ) if ( self.Debug ): self.Logger.WriteDebug( command, "Setze Cooldown für User '{0}' ({1}).".format(UserDisplayName, TimePrettyFormatString(int( cooldownTime ) ) ) ) self.Logger.WriteLog( command + ": Setze Cooldown für User '{0}' ({1}).".format(UserDisplayName, TimePrettyFormatString(int( cooldownTime ) ) ) ) return
[ "def", "AddUserCooldown", "(", "self", ",", "scriptname", ",", "command", ",", "user", ",", "cooldownTime", ")", ":", "thisActionName", "=", "\"AddUserCooldown\"", "UserDisplayName", "=", "self", ".", "Parent", ".", "GetDisplayName", "(", "user", ")", "self", ".", "Parent", ".", "AddUserCooldown", "(", "scriptname", ",", "command", ".", "lower", "(", ")", ",", "user", ",", "int", "(", "cooldownTime", ")", ")", "if", "(", "self", ".", "Debug", ")", ":", "self", ".", "Logger", ".", "WriteDebug", "(", "command", ",", "\"Setze Cooldown für User '{0}' ({1}).\".", "f", "ormat(", "U", "serDisplayName,", " ", "imePrettyFormatString(", "i", "nt(", " ", "ooldownTime ", " ", " ", " ", "\r", "self", ".", "Logger", ".", "WriteLog", "(", "command", "+", "\": Setze Cooldown für User '{0}' ({1}).\".", "f", "ormat(", "U", "serDisplayName,", " ", "imePrettyFormatString(", "i", "nt(", " ", "ooldownTime ", " ", " ", " ", "\r", "return" ]
[ 74, 4 ]
[ 86, 14 ]
null
python
de
['de', 'de', 'de']
True
true
null
XLSwriter.writerow
(self, row)
Eine Zeile schreiben. Row ist eine Liste von Werten.
Eine Zeile schreiben. Row ist eine Liste von Werten.
def writerow(self, row): """Eine Zeile schreiben. Row ist eine Liste von Werten.""" col = 0 for coldata in row: if isinstance(coldata, (datetime.datetime, datetime.date, datetime.time)): self.sheet.write(self.rownum, col, coldata, datestyle) else: if len(unicode(coldata)) > 8192: # übergroße Felder RADIKAL verkürzen self.sheet.write(self.rownum, col, "%s ..." % unicode(coldata)[:64]) else: self.sheet.write(self.rownum, col, coldata) col += 1 self.rownum += 1
[ "def", "writerow", "(", "self", ",", "row", ")", ":", "col", "=", "0", "for", "coldata", "in", "row", ":", "if", "isinstance", "(", "coldata", ",", "(", "datetime", ".", "datetime", ",", "datetime", ".", "date", ",", "datetime", ".", "time", ")", ")", ":", "self", ".", "sheet", ".", "write", "(", "self", ".", "rownum", ",", "col", ",", "coldata", ",", "datestyle", ")", "else", ":", "if", "len", "(", "unicode", "(", "coldata", ")", ")", ">", "8192", ":", "# übergroße Felder RADIKAL verkürzen", "self", ".", "sheet", ".", "write", "(", "self", ".", "rownum", ",", "col", ",", "\"%s ...\"", "%", "unicode", "(", "coldata", ")", "[", ":", "64", "]", ")", "else", ":", "self", ".", "sheet", ".", "write", "(", "self", ".", "rownum", ",", "col", ",", "coldata", ")", "col", "+=", "1", "self", ".", "rownum", "+=", "1" ]
[ 29, 4 ]
[ 42, 24 ]
null
python
de
['de', 'de', 'de']
True
true
null
Pipeline.id
(self)
return self.__id
str: id der Pipeline.
str: id der Pipeline.
def id(self): """str: id der Pipeline.""" return self.__id
[ "def", "id", "(", "self", ")", ":", "return", "self", ".", "__id" ]
[ 88, 4 ]
[ 90, 24 ]
null
python
de
['de', 'de', 'de']
True
true
null
read_arguments
(path, max_args=-1)
Generator um alle CSV direkt in Argument-Objekte zu konvertieren Args: path (str): Pfad zur CSV-Datei max_args (int, default=-1): Maximale Anzahl verschiedener Argumente. Bei -1 werden alle Argumente eingelesen. Yields: Argument
Generator um alle CSV direkt in Argument-Objekte zu konvertieren
def read_arguments(path, max_args=-1): """Generator um alle CSV direkt in Argument-Objekte zu konvertieren Args: path (str): Pfad zur CSV-Datei max_args (int, default=-1): Maximale Anzahl verschiedener Argumente. Bei -1 werden alle Argumente eingelesen. Yields: Argument """ for row in read_csv(path, max_rows=max_args): yield Argument(row)
[ "def", "read_arguments", "(", "path", ",", "max_args", "=", "-", "1", ")", ":", "for", "row", "in", "read_csv", "(", "path", ",", "max_rows", "=", "max_args", ")", ":", "yield", "Argument", "(", "row", ")" ]
[ 168, 0 ]
[ 181, 27 ]
null
python
de
['de', 'de', 'de']
True
true
null
ispBaseWebApp.routeRender
( self, filepath:str="" )
return output
Ein Template in ui oder template_folder rendern. Parameters ---------- filepath : str, optional file und path einer datei aus ui. The default is "". Returns ------- output : str Das gerenderte Template.
Ein Template in ui oder template_folder rendern.
def routeRender( self, filepath:str="" ): """Ein Template in ui oder template_folder rendern. Parameters ---------- filepath : str, optional file und path einer datei aus ui. The default is "". Returns ------- output : str Das gerenderte Template. """ # .vue as default in views if filepath[-4:] == ".vue" or filepath[:6] == "views/": if filepath.find(".vue") == -1 and filepath.find(".js") == -1: filepath = "{}.vue".format( filepath ) else: # otherwise default is .phtml if filepath.find(".phtml") == -1: filepath = "{}.phtml".format( filepath ) uuidstr = str( uuid.uuid1() ) params = { "uuid" : uuidstr, "id" : "uuid_" + uuidstr } # defaults mit requestParams überschreiben import connexion # connexion verwendet FirstValueURIParser collectionFormat: csv # ?letters=a,b,c&letters=d,e,f wird letters = ['a', 'b', 'c'] params.update( self.parseRequestParams( connexion.request.args.copy() ) ) # value bestimmen value = params.get("value", None ) try: output = render_template( filepath, params = json.dumps( params ), value = value, id = params["id"], uuid = uuidstr, **self._config.get("variables", {} ).toDict() ) except Exception as err: # print("[webapp] ERROR: render_template:", err, self._config.get("variables", {} ) ) output = "<h1>Das Template {} wurde nicht gefunden oder ein parser error [ {} ] liegt vor.</h1>".format( filepath, err ) self.status_code = 404 pass return output
[ "def", "routeRender", "(", "self", ",", "filepath", ":", "str", "=", "\"\"", ")", ":", "# .vue as default in views", "if", "filepath", "[", "-", "4", ":", "]", "==", "\".vue\"", "or", "filepath", "[", ":", "6", "]", "==", "\"views/\"", ":", "if", "filepath", ".", "find", "(", "\".vue\"", ")", "==", "-", "1", "and", "filepath", ".", "find", "(", "\".js\"", ")", "==", "-", "1", ":", "filepath", "=", "\"{}.vue\"", ".", "format", "(", "filepath", ")", "else", ":", "# otherwise default is .phtml", "if", "filepath", ".", "find", "(", "\".phtml\"", ")", "==", "-", "1", ":", "filepath", "=", "\"{}.phtml\"", ".", "format", "(", "filepath", ")", "uuidstr", "=", "str", "(", "uuid", ".", "uuid1", "(", ")", ")", "params", "=", "{", "\"uuid\"", ":", "uuidstr", ",", "\"id\"", ":", "\"uuid_\"", "+", "uuidstr", "}", "# defaults mit requestParams überschreiben", "import", "connexion", "# connexion verwendet FirstValueURIParser collectionFormat: csv", "# ?letters=a,b,c&letters=d,e,f wird letters = ['a', 'b', 'c']", "params", ".", "update", "(", "self", ".", "parseRequestParams", "(", "connexion", ".", "request", ".", "args", ".", "copy", "(", ")", ")", ")", "# value bestimmen", "value", "=", "params", ".", "get", "(", "\"value\"", ",", "None", ")", "try", ":", "output", "=", "render_template", "(", "filepath", ",", "params", "=", "json", ".", "dumps", "(", "params", ")", ",", "value", "=", "value", ",", "id", "=", "params", "[", "\"id\"", "]", ",", "uuid", "=", "uuidstr", ",", "*", "*", "self", ".", "_config", ".", "get", "(", "\"variables\"", ",", "{", "}", ")", ".", "toDict", "(", ")", ")", "except", "Exception", "as", "err", ":", "# print(\"[webapp] ERROR: render_template:\", err, self._config.get(\"variables\", {} ) )", "output", "=", "\"<h1>Das Template {} wurde nicht gefunden oder ein parser error [ {} ] liegt vor.</h1>\"", ".", "format", "(", "filepath", ",", "err", ")", "self", ".", "status_code", "=", "404", "pass", "return", "output" ]
[ 678, 4 ]
[ 735, 21 ]
null
python
de
['de', 'de', 'de']
True
true
null
add_symbol
(values: dict, data: StepData)
Fügt ein Zeichen, Symbol, Wort oder einen Satz zu einem Wert hinzu. Fügt ein Zeichen, Symbol, Wort oder einen Satz zu einem Wert hinzu. Dieses kann sowohl vor als auch hinter dem Wert stehen, der mit `"{_key}"` eingefügt wird. Außerdem kann man so einen Wert kopieren und einem neuen Key zuweisen, wenn man in unter `"pattern"` nur `"{_key}"` einsetzt. :param values: Werte aus der JSON-Datei :param data: Daten aus der API
Fügt ein Zeichen, Symbol, Wort oder einen Satz zu einem Wert hinzu.
def add_symbol(values: dict, data: StepData): """Fügt ein Zeichen, Symbol, Wort oder einen Satz zu einem Wert hinzu. Fügt ein Zeichen, Symbol, Wort oder einen Satz zu einem Wert hinzu. Dieses kann sowohl vor als auch hinter dem Wert stehen, der mit `"{_key}"` eingefügt wird. Außerdem kann man so einen Wert kopieren und einem neuen Key zuweisen, wenn man in unter `"pattern"` nur `"{_key}"` einsetzt. :param values: Werte aus der JSON-Datei :param data: Daten aus der API """ for idx, key in data.loop_key(values["keys"], values): new_key = get_new_keys(values, idx) new_values = data.format(values['pattern'], values) data.insert_data(new_key, new_values, values)
[ "def", "add_symbol", "(", "values", ":", "dict", ",", "data", ":", "StepData", ")", ":", "for", "idx", ",", "key", "in", "data", ".", "loop_key", "(", "values", "[", "\"keys\"", "]", ",", "values", ")", ":", "new_key", "=", "get_new_keys", "(", "values", ",", "idx", ")", "new_values", "=", "data", ".", "format", "(", "values", "[", "'pattern'", "]", ",", "values", ")", "data", ".", "insert_data", "(", "new_key", ",", "new_values", ",", "values", ")" ]
[ 173, 0 ]
[ 187, 53 ]
null
python
de
['de', 'de', 'de']
True
true
null
PdfGenerator.mathtext
(self, text, area:dict={}, attrs:dict={}, render=None, fontsize=12, dpi=300)
return self.image( output, area, attrs, render, 'svg+xml' )
r"""Rendert Text und TeX Formel nach SVG mit mathtext. https://matplotlib.org/3.1.1/tutorials/text/mathtext.html Die Formel muss mit $ anfangen und enden und der string als raw r"" angegeben werden Beispiel: r"$a/b$" Parameters ---------- text : str Der einzufügende Text area : Area {left,top,with,height} die Größe der Ausgabe attrs : dict zu ändernde id class oder Style Angaben render : bool sofort rendern oder nur zurückgeben ohne Angabe wird self.autoRender verwendet fontsize (int, optional): Font size. dpi (int, optional): DPI. Returns ------- element_html: str HTML des erzeugten Elements
r"""Rendert Text und TeX Formel nach SVG mit mathtext.
def mathtext(self, text, area:dict={}, attrs:dict={}, render=None, fontsize=12, dpi=300): r"""Rendert Text und TeX Formel nach SVG mit mathtext. https://matplotlib.org/3.1.1/tutorials/text/mathtext.html Die Formel muss mit $ anfangen und enden und der string als raw r"" angegeben werden Beispiel: r"$a/b$" Parameters ---------- text : str Der einzufügende Text area : Area {left,top,with,height} die Größe der Ausgabe attrs : dict zu ändernde id class oder Style Angaben render : bool sofort rendern oder nur zurückgeben ohne Angabe wird self.autoRender verwendet fontsize (int, optional): Font size. dpi (int, optional): DPI. Returns ------- element_html: str HTML des erzeugten Elements """ fig = plt.figure(figsize=(0.01, 0.01)) fig.text(0, 0, text, fontsize=fontsize) output = io.BytesIO() fig.savefig(output, dpi=dpi, transparent=True, format='svg', bbox_inches='tight', pad_inches=0.0, frameon=False) plt.close(fig) return self.image( output, area, attrs, render, 'svg+xml' )
[ "def", "mathtext", "(", "self", ",", "text", ",", "area", ":", "dict", "=", "{", "}", ",", "attrs", ":", "dict", "=", "{", "}", ",", "render", "=", "None", ",", "fontsize", "=", "12", ",", "dpi", "=", "300", ")", ":", "fig", "=", "plt", ".", "figure", "(", "figsize", "=", "(", "0.01", ",", "0.01", ")", ")", "fig", ".", "text", "(", "0", ",", "0", ",", "text", ",", "fontsize", "=", "fontsize", ")", "output", "=", "io", ".", "BytesIO", "(", ")", "fig", ".", "savefig", "(", "output", ",", "dpi", "=", "dpi", ",", "transparent", "=", "True", ",", "format", "=", "'svg'", ",", "bbox_inches", "=", "'tight'", ",", "pad_inches", "=", "0.0", ",", "frameon", "=", "False", ")", "plt", ".", "close", "(", "fig", ")", "return", "self", ".", "image", "(", "output", ",", "area", ",", "attrs", ",", "render", ",", "'svg+xml'", ")" ]
[ 1025, 4 ]
[ 1062, 67 ]
null
python
de
['de', 'de', 'de']
True
true
null
handle_getmode
(_daten)
gib den Modus zurueck
gib den Modus zurueck
def handle_getmode(_daten): """gib den Modus zurueck""" modus = konfiguration.get("modus") emit("getmode", {"modus": modus})
[ "def", "handle_getmode", "(", "_daten", ")", ":", "modus", "=", "konfiguration", ".", "get", "(", "\"modus\"", ")", "emit", "(", "\"getmode\"", ",", "{", "\"modus\"", ":", "modus", "}", ")" ]
[ 115, 0 ]
[ 118, 37 ]
null
python
de
['de', 'de', 'de']
True
true
null
RawData.write_LatestSubscriberFile
( self, submonth = "", userDisplayName = "" )
return
Schreibt den letzten Subscriber mit der Anzahl der Monate in ein File
Schreibt den letzten Subscriber mit der Anzahl der Monate in ein File
def write_LatestSubscriberFile( self, submonth = "", userDisplayName = "" ): ''' Schreibt den letzten Subscriber mit der Anzahl der Monate in ein File ''' thisActionName = "write_LatestSubscriberFile" # Daten nur Schreiben, wenn des Daten-Files-Verzeichnis angegeben wurde if self.DataFilesPath: with codecs.open( self.LatestSubscriberFile, encoding="utf-8", mode="w") as file: file.write( str( "{0}".format( userDisplayName ) ) + os.linesep + str( "({0}. Monat)".format( TransformLocale_Decimals(int(submonth)) ) ) ) file.close() return
[ "def", "write_LatestSubscriberFile", "(", "self", ",", "submonth", "=", "\"\"", ",", "userDisplayName", "=", "\"\"", ")", ":", "thisActionName", "=", "\"write_LatestSubscriberFile\"", "# Daten nur Schreiben, wenn des Daten-Files-Verzeichnis angegeben wurde\r", "if", "self", ".", "DataFilesPath", ":", "with", "codecs", ".", "open", "(", "self", ".", "LatestSubscriberFile", ",", "encoding", "=", "\"utf-8\"", ",", "mode", "=", "\"w\"", ")", "as", "file", ":", "file", ".", "write", "(", "str", "(", "\"{0}\"", ".", "format", "(", "userDisplayName", ")", ")", "+", "os", ".", "linesep", "+", "str", "(", "\"({0}. Monat)\"", ".", "format", "(", "TransformLocale_Decimals", "(", "int", "(", "submonth", ")", ")", ")", ")", ")", "file", ".", "close", "(", ")", "return" ]
[ 202, 4 ]
[ 213, 14 ]
null
python
de
['de', 'de', 'de']
True
true
null
wordcloud
(values: dict, step_data: StepData, prev_paths)
return file
Erstellt ein Wordcloud-Bild. Der Standard-Farbverlauf bei color_func true ist Grau/Schwarz. Die Standard-Farbe ist generell die colormap viridis. Quelle bzgl. Verwendung: https://github.com/amueller/word_cloud :param values: Image Bauplan des zu erstellenden Bildes :param step_data: Daten aus der API :param prev_paths: Alle Image Baupläne und somit auch alle Pfade zu den bisher erstellen Bildern :return: Den Pfad zum erstellten Bild :rtype: str
Erstellt ein Wordcloud-Bild.
def wordcloud(values: dict, step_data: StepData, prev_paths): """Erstellt ein Wordcloud-Bild. Der Standard-Farbverlauf bei color_func true ist Grau/Schwarz. Die Standard-Farbe ist generell die colormap viridis. Quelle bzgl. Verwendung: https://github.com/amueller/word_cloud :param values: Image Bauplan des zu erstellenden Bildes :param step_data: Daten aus der API :param prev_paths: Alle Image Baupläne und somit auch alle Pfade zu den bisher erstellen Bildern :return: Den Pfad zum erstellten Bild :rtype: str """ wordcloud_parameter = dict(WORDCLOUD_DEFAULT_PARAMETER) parameter = values.get("parameter", {}) for param in parameter: if param in wordcloud_parameter: if isinstance(wordcloud_parameter[param], bool): value = step_data.get_data(parameter[param], {}, bool) elif isinstance(wordcloud_parameter[param], numbers.Number): value = step_data.get_data(parameter[param], {}, numbers.Number) else: value = step_data.format(parameter[param]) wordcloud_parameter[param] = value path = resources.get_resource_path(wordcloud_parameter["font_path"]) wordcloud_parameter["font_path"] = path if bool(wordcloud_parameter.get("color_func", False)): cfw = list(DEFAULT_COLOR_FUNC_VALUES) if "color_func_words" in values: cfw_list = step_data.format(values["color_func_words"]).split(" ") for idx, c in enumerate(cfw_list): cfw[idx] = int(c) wordcloud_parameter["color_func"] = get_color_func(*cfw) else: wordcloud_parameter["color_func"] = None if parameter.get("colormap", ""): wordcloud_parameter["colormap"] = step_data.format(parameter["colormap"]) if parameter.get("figure", None) is not None: figure = step_data.format(parameter["figure"], {}) if figure == "circle": x0 = wordcloud_parameter["width"] y0 = wordcloud_parameter["height"] x, y = np.ogrid[:x0, :y0] mask = (x - (x0 / 2)) ** 2 + (y - (y0 / 2)) ** 2 > 500 ** 2 mask = 255 * mask.astype(int) wordcloud_parameter["mask"] = mask if values.get("use_stopwords", None) is not None: try: file = resources.get_resource_path("stopwords/stopwords.txt") with open(file, "r", encoding='utf-8') as f: list_stopwords = f.read().splitlines() except IOError: list_stopwords = [] dont_use = step_data.get_data(values.get("stopwords", []), {}, list) for each in list_stopwords: if each not in dont_use: dont_use.append(each) wordcloud_parameter["stopwords"] = set(dont_use) else: dont_use = step_data.get_data(values.get("stopwords", []), {}, list) wordcloud_parameter["stopwords"] = set(dont_use) if values.get("text", None) is not None: wordcloud_image = WordCloud(**wordcloud_parameter).generate(step_data.format(values["text"], {})) elif values.get("dict", None) is not None: wordcloud_image = WordCloud(**wordcloud_parameter).generate_from_frequencies( step_data.get_data(values["dict"], {}, dict)) image = wordcloud_image.to_image() file = resources.new_temp_resource_path(step_data.data["_pipe_id"], "png") image.save(file) return file
[ "def", "wordcloud", "(", "values", ":", "dict", ",", "step_data", ":", "StepData", ",", "prev_paths", ")", ":", "wordcloud_parameter", "=", "dict", "(", "WORDCLOUD_DEFAULT_PARAMETER", ")", "parameter", "=", "values", ".", "get", "(", "\"parameter\"", ",", "{", "}", ")", "for", "param", "in", "parameter", ":", "if", "param", "in", "wordcloud_parameter", ":", "if", "isinstance", "(", "wordcloud_parameter", "[", "param", "]", ",", "bool", ")", ":", "value", "=", "step_data", ".", "get_data", "(", "parameter", "[", "param", "]", ",", "{", "}", ",", "bool", ")", "elif", "isinstance", "(", "wordcloud_parameter", "[", "param", "]", ",", "numbers", ".", "Number", ")", ":", "value", "=", "step_data", ".", "get_data", "(", "parameter", "[", "param", "]", ",", "{", "}", ",", "numbers", ".", "Number", ")", "else", ":", "value", "=", "step_data", ".", "format", "(", "parameter", "[", "param", "]", ")", "wordcloud_parameter", "[", "param", "]", "=", "value", "path", "=", "resources", ".", "get_resource_path", "(", "wordcloud_parameter", "[", "\"font_path\"", "]", ")", "wordcloud_parameter", "[", "\"font_path\"", "]", "=", "path", "if", "bool", "(", "wordcloud_parameter", ".", "get", "(", "\"color_func\"", ",", "False", ")", ")", ":", "cfw", "=", "list", "(", "DEFAULT_COLOR_FUNC_VALUES", ")", "if", "\"color_func_words\"", "in", "values", ":", "cfw_list", "=", "step_data", ".", "format", "(", "values", "[", "\"color_func_words\"", "]", ")", ".", "split", "(", "\" \"", ")", "for", "idx", ",", "c", "in", "enumerate", "(", "cfw_list", ")", ":", "cfw", "[", "idx", "]", "=", "int", "(", "c", ")", "wordcloud_parameter", "[", "\"color_func\"", "]", "=", "get_color_func", "(", "*", "cfw", ")", "else", ":", "wordcloud_parameter", "[", "\"color_func\"", "]", "=", "None", "if", "parameter", ".", "get", "(", "\"colormap\"", ",", "\"\"", ")", ":", "wordcloud_parameter", "[", "\"colormap\"", "]", "=", "step_data", ".", "format", "(", "parameter", "[", "\"colormap\"", "]", ")", "if", "parameter", ".", "get", "(", "\"figure\"", ",", "None", ")", "is", "not", "None", ":", "figure", "=", "step_data", ".", "format", "(", "parameter", "[", "\"figure\"", "]", ",", "{", "}", ")", "if", "figure", "==", "\"circle\"", ":", "x0", "=", "wordcloud_parameter", "[", "\"width\"", "]", "y0", "=", "wordcloud_parameter", "[", "\"height\"", "]", "x", ",", "y", "=", "np", ".", "ogrid", "[", ":", "x0", ",", ":", "y0", "]", "mask", "=", "(", "x", "-", "(", "x0", "/", "2", ")", ")", "**", "2", "+", "(", "y", "-", "(", "y0", "/", "2", ")", ")", "**", "2", ">", "500", "**", "2", "mask", "=", "255", "*", "mask", ".", "astype", "(", "int", ")", "wordcloud_parameter", "[", "\"mask\"", "]", "=", "mask", "if", "values", ".", "get", "(", "\"use_stopwords\"", ",", "None", ")", "is", "not", "None", ":", "try", ":", "file", "=", "resources", ".", "get_resource_path", "(", "\"stopwords/stopwords.txt\"", ")", "with", "open", "(", "file", ",", "\"r\"", ",", "encoding", "=", "'utf-8'", ")", "as", "f", ":", "list_stopwords", "=", "f", ".", "read", "(", ")", ".", "splitlines", "(", ")", "except", "IOError", ":", "list_stopwords", "=", "[", "]", "dont_use", "=", "step_data", ".", "get_data", "(", "values", ".", "get", "(", "\"stopwords\"", ",", "[", "]", ")", ",", "{", "}", ",", "list", ")", "for", "each", "in", "list_stopwords", ":", "if", "each", "not", "in", "dont_use", ":", "dont_use", ".", "append", "(", "each", ")", "wordcloud_parameter", "[", "\"stopwords\"", "]", "=", "set", "(", "dont_use", ")", "else", ":", "dont_use", "=", "step_data", ".", "get_data", "(", "values", ".", "get", "(", "\"stopwords\"", ",", "[", "]", ")", ",", "{", "}", ",", "list", ")", "wordcloud_parameter", "[", "\"stopwords\"", "]", "=", "set", "(", "dont_use", ")", "if", "values", ".", "get", "(", "\"text\"", ",", "None", ")", "is", "not", "None", ":", "wordcloud_image", "=", "WordCloud", "(", "*", "*", "wordcloud_parameter", ")", ".", "generate", "(", "step_data", ".", "format", "(", "values", "[", "\"text\"", "]", ",", "{", "}", ")", ")", "elif", "values", ".", "get", "(", "\"dict\"", ",", "None", ")", "is", "not", "None", ":", "wordcloud_image", "=", "WordCloud", "(", "*", "*", "wordcloud_parameter", ")", ".", "generate_from_frequencies", "(", "step_data", ".", "get_data", "(", "values", "[", "\"dict\"", "]", ",", "{", "}", ",", "dict", ")", ")", "image", "=", "wordcloud_image", ".", "to_image", "(", ")", "file", "=", "resources", ".", "new_temp_resource_path", "(", "step_data", ".", "data", "[", "\"_pipe_id\"", "]", ",", "\"png\"", ")", "image", ".", "save", "(", "file", ")", "return", "file" ]
[ 68, 0 ]
[ 152, 15 ]
null
python
de
['de', 'de', 'de']
True
true
null
get_test_diagram_resource_path
(infoprovider_name=None, diagram_name=None)
return path
Erstellt einen absoluten Pfad zu der übergebene Ressource im Temp-Ordner. :param infoprovider_name: Name des Infoproviders der das Diagram enthält. :param diagram_name: Name des Diagrams innerhalb des Infoproviders.
Erstellt einen absoluten Pfad zu der übergebene Ressource im Temp-Ordner.
def get_test_diagram_resource_path(infoprovider_name=None, diagram_name=None): """Erstellt einen absoluten Pfad zu der übergebene Ressource im Temp-Ordner. :param infoprovider_name: Name des Infoproviders der das Diagram enthält. :param diagram_name: Name des Diagrams innerhalb des Infoproviders. """ if infoprovider_name: path = infoprovider_name + "/" else: return get_resource_path(os.path.join(TEMP_LOCATION, "tmp_diagram.png")) path += (diagram_name if diagram_name else "") + ".png" path = get_resource_path(os.path.join(TEMP_LOCATION, path)) os.makedirs(os.path.dirname(path), exist_ok=True) return path
[ "def", "get_test_diagram_resource_path", "(", "infoprovider_name", "=", "None", ",", "diagram_name", "=", "None", ")", ":", "if", "infoprovider_name", ":", "path", "=", "infoprovider_name", "+", "\"/\"", "else", ":", "return", "get_resource_path", "(", "os", ".", "path", ".", "join", "(", "TEMP_LOCATION", ",", "\"tmp_diagram.png\"", ")", ")", "path", "+=", "(", "diagram_name", "if", "diagram_name", "else", "\"\"", ")", "+", "\".png\"", "path", "=", "get_resource_path", "(", "os", ".", "path", ".", "join", "(", "TEMP_LOCATION", ",", "path", ")", ")", "os", ".", "makedirs", "(", "os", ".", "path", ".", "dirname", "(", "path", ")", ",", "exist_ok", "=", "True", ")", "return", "path" ]
[ 152, 0 ]
[ 165, 15 ]
null
python
de
['de', 'de', 'de']
True
true
null
Pipeline.progress
(self)
return self.__current_step + 1, self.__steps_max + 1
Fortschritt der Pipeline. :return: Anzahl der schon ausgeführten Schritte, Anzahl aller Schritte. :rtype: int, int
Fortschritt der Pipeline.
def progress(self): """Fortschritt der Pipeline. :return: Anzahl der schon ausgeführten Schritte, Anzahl aller Schritte. :rtype: int, int """ return self.__current_step + 1, self.__steps_max + 1
[ "def", "progress", "(", "self", ")", ":", "return", "self", ".", "__current_step", "+", "1", ",", "self", ".", "__steps_max", "+", "1" ]
[ 92, 4 ]
[ 98, 60 ]
null
python
de
['de', 'de', 'de']
True
true
null
get_audio_path
(path: str)
return get_resource_path(os.path.join(AUDIO_LOCATION, path))
Erstellt einen absoluten Pfad zu der übergebenen Audio-Ressource. Erstellt den Pfad aus `RESOURCES_LOCATION`, `AUDIO_LOCATION` und dem übergebenen Pfad. :param path: Pfad zur Ressource, relativ zum `resources/audio`-Ordner. :return: Absoluter Pfad zur übergebenen Ressource.
Erstellt einen absoluten Pfad zu der übergebenen Audio-Ressource.
def get_audio_path(path: str): """Erstellt einen absoluten Pfad zu der übergebenen Audio-Ressource. Erstellt den Pfad aus `RESOURCES_LOCATION`, `AUDIO_LOCATION` und dem übergebenen Pfad. :param path: Pfad zur Ressource, relativ zum `resources/audio`-Ordner. :return: Absoluter Pfad zur übergebenen Ressource. """ return get_resource_path(os.path.join(AUDIO_LOCATION, path))
[ "def", "get_audio_path", "(", "path", ":", "str", ")", ":", "return", "get_resource_path", "(", "os", ".", "path", ".", "join", "(", "AUDIO_LOCATION", ",", "path", ")", ")" ]
[ 131, 0 ]
[ 139, 64 ]
null
python
de
['de', 'de', 'de']
True
true
null
plotImage.axLimit
(self, ax, limits:dict={})
Achsen Limits ändern wird border angegeben zusätzlichen Rand hinzufügen. Parameters ---------- ax: axis das axis element auf das die Änderungen angewandt werden sollen limits: dict Elemente: X1, X2, Y1, Y2, border
Achsen Limits ändern wird border angegeben zusätzlichen Rand hinzufügen. Parameters ---------- ax: axis das axis element auf das die Änderungen angewandt werden sollen limits: dict Elemente: X1, X2, Y1, Y2, border
def axLimit(self, ax, limits:dict={}): """Achsen Limits ändern wird border angegeben zusätzlichen Rand hinzufügen. Parameters ---------- ax: axis das axis element auf das die Änderungen angewandt werden sollen limits: dict Elemente: X1, X2, Y1, Y2, border """ border = 0 if "border" in limits: border = limits["border"] if "X1" in limits and "X2" in limits: px_min = self.mm2dots_X( limits["X1"] - border ) px_max = self.mm2dots_X( limits["X2"] + border ) # neues limit setzen ax.set_xlim( (px_min, px_max) ) if "Y1" in limits and "Y2" in limits : py_min = self.mm2dots_Y( limits["Y1"] - border) py_max = self.mm2dots_Y( limits["Y2"] + border) # neues limit setzen ax.set_ylim( (py_min, py_max) )
[ "def", "axLimit", "(", "self", ",", "ax", ",", "limits", ":", "dict", "=", "{", "}", ")", ":", "border", "=", "0", "if", "\"border\"", "in", "limits", ":", "border", "=", "limits", "[", "\"border\"", "]", "if", "\"X1\"", "in", "limits", "and", "\"X2\"", "in", "limits", ":", "px_min", "=", "self", ".", "mm2dots_X", "(", "limits", "[", "\"X1\"", "]", "-", "border", ")", "px_max", "=", "self", ".", "mm2dots_X", "(", "limits", "[", "\"X2\"", "]", "+", "border", ")", "# neues limit setzen", "ax", ".", "set_xlim", "(", "(", "px_min", ",", "px_max", ")", ")", "if", "\"Y1\"", "in", "limits", "and", "\"Y2\"", "in", "limits", ":", "py_min", "=", "self", ".", "mm2dots_Y", "(", "limits", "[", "\"Y1\"", "]", "-", "border", ")", "py_max", "=", "self", ".", "mm2dots_Y", "(", "limits", "[", "\"Y2\"", "]", "+", "border", ")", "# neues limit setzen", "ax", ".", "set_ylim", "(", "(", "py_min", ",", "py_max", ")", ")" ]
[ 129, 4 ]
[ 153, 43 ]
null
python
de
['de', 'de', 'de']
True
true
null
DicomImage.getLine
( self, field=None )
return line
holt eine pixel Reihe
holt eine pixel Reihe
def getLine( self, field=None ): """ holt eine pixel Reihe """ line = None if "x" in field: line = self.array[:, self.mm2dots_X(field["x"]) ] elif "y" in field: line = self.array[ self.mm2dots_Y(field["y"]) ] return line
[ "def", "getLine", "(", "self", ",", "field", "=", "None", ")", ":", "line", "=", "None", "if", "\"x\"", "in", "field", ":", "line", "=", "self", ".", "array", "[", ":", ",", "self", ".", "mm2dots_X", "(", "field", "[", "\"x\"", "]", ")", "]", "elif", "\"y\"", "in", "field", ":", "line", "=", "self", ".", "array", "[", "self", ".", "mm2dots_Y", "(", "field", "[", "\"y\"", "]", ")", "]", "return", "line" ]
[ 643, 4 ]
[ 651, 19 ]
null
python
de
['de', 'de', 'de']
True
true
null
qa_wl.plotMergeImage
(self, imageSize={} )
return self.getPlot()
Alle Achsen Bilder ausgeben
Alle Achsen Bilder ausgeben
def plotMergeImage(self, imageSize={} ): """Alle Achsen Bilder ausgeben """ # plotbereiche festlegen fig, ax = self.initPlot( imageSize, nrows=1, ncols=3) for mpl_axis, wl_axis, title in zip_longest( ax.flatten(), ["G", "C", "T"], ["Gantry", "Kollimator", "Tisch"] ): try: self._axisImage( mpl_axis, wl_axis, title ) except: #print( "plotMergeImage", mpl_axis, wl_axis, title ) pass plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0) return self.getPlot()
[ "def", "plotMergeImage", "(", "self", ",", "imageSize", "=", "{", "}", ")", ":", "# plotbereiche festlegen", "fig", ",", "ax", "=", "self", ".", "initPlot", "(", "imageSize", ",", "nrows", "=", "1", ",", "ncols", "=", "3", ")", "for", "mpl_axis", ",", "wl_axis", ",", "title", "in", "zip_longest", "(", "ax", ".", "flatten", "(", ")", ",", "[", "\"G\"", ",", "\"C\"", ",", "\"T\"", "]", ",", "[", "\"Gantry\"", ",", "\"Kollimator\"", ",", "\"Tisch\"", "]", ")", ":", "try", ":", "self", ".", "_axisImage", "(", "mpl_axis", ",", "wl_axis", ",", "title", ")", "except", ":", "#print( \"plotMergeImage\", mpl_axis, wl_axis, title )", "pass", "plt", ".", "tight_layout", "(", "pad", "=", "0.4", ",", "w_pad", "=", "0.5", ",", "h_pad", "=", "1.0", ")", "return", "self", ".", "getPlot", "(", ")" ]
[ 645, 4 ]
[ 662, 29 ]
null
python
de
['de', 'de', 'de']
True
true
null
ispDicom.retrieve
( self, params={} )
return instances, signals
Holt DICOM Daten mit threading und event Benachrichtigung. Ruft _retrieve mit den Parametern auf suchen nach Received unexpected C-MOVE service message in pynetdicom Association _serve_request aufgerufen von _run_reactor Parameters ---------- params : dict, optional DESCRIPTION. The default is {}. Returns ------- instances : list gefundene Dataset Instances. signals: list
Holt DICOM Daten mit threading und event Benachrichtigung.
def retrieve( self, params={} ): """Holt DICOM Daten mit threading und event Benachrichtigung. Ruft _retrieve mit den Parametern auf suchen nach Received unexpected C-MOVE service message in pynetdicom Association _serve_request aufgerufen von _run_reactor Parameters ---------- params : dict, optional DESCRIPTION. The default is {}. Returns ------- instances : list gefundene Dataset Instances. signals: list """ instances = [] signals = [] result_available = threading.Event() mq = queue.Queue() def _C_STORE( signal ): signals.append(signal) if signal["_is_cancelled"] == True: # Vorgang abbrechen result_available.set() elif signal["_is_cancelled"] == False: if "dataset" in signal: # Ergebnis setzen und abbrechen instances.append( signal["dataset"] ) result_available.set() def _RELEASED( signal ): signals.append( signal ) result_available.set() def _REJECTED( signal ): signals.append( signal ) result_available.set() def _ABORTED( signal ): signals.append( signal ) result_available.set() signal( 'dicom.EVT_C_STORE' ).connect( _C_STORE ) signal( 'dicom.EVT_REJECTED' ).connect( _REJECTED ) signal( 'dicom.EVT_RELEASED' ).connect( _RELEASED ) signal( 'dicom.EVT_ABORTED' ).connect( _ABORTED ) # Als Thread aufrufen, über mq.get() wird die Rückgabe von _retrieve abgerufen thread = threading.Thread( target=lambda q, args: q.put( self._retrieve( **args ) ), args=( mq, params ) ) thread.start() # nach max. 10 sec den Vorgang abbrechen while not result_available.wait( timeout=10 ): result_available.set() return instances, signals
[ "def", "retrieve", "(", "self", ",", "params", "=", "{", "}", ")", ":", "instances", "=", "[", "]", "signals", "=", "[", "]", "result_available", "=", "threading", ".", "Event", "(", ")", "mq", "=", "queue", ".", "Queue", "(", ")", "def", "_C_STORE", "(", "signal", ")", ":", "signals", ".", "append", "(", "signal", ")", "if", "signal", "[", "\"_is_cancelled\"", "]", "==", "True", ":", "# Vorgang abbrechen", "result_available", ".", "set", "(", ")", "elif", "signal", "[", "\"_is_cancelled\"", "]", "==", "False", ":", "if", "\"dataset\"", "in", "signal", ":", "# Ergebnis setzen und abbrechen", "instances", ".", "append", "(", "signal", "[", "\"dataset\"", "]", ")", "result_available", ".", "set", "(", ")", "def", "_RELEASED", "(", "signal", ")", ":", "signals", ".", "append", "(", "signal", ")", "result_available", ".", "set", "(", ")", "def", "_REJECTED", "(", "signal", ")", ":", "signals", ".", "append", "(", "signal", ")", "result_available", ".", "set", "(", ")", "def", "_ABORTED", "(", "signal", ")", ":", "signals", ".", "append", "(", "signal", ")", "result_available", ".", "set", "(", ")", "signal", "(", "'dicom.EVT_C_STORE'", ")", ".", "connect", "(", "_C_STORE", ")", "signal", "(", "'dicom.EVT_REJECTED'", ")", ".", "connect", "(", "_REJECTED", ")", "signal", "(", "'dicom.EVT_RELEASED'", ")", ".", "connect", "(", "_RELEASED", ")", "signal", "(", "'dicom.EVT_ABORTED'", ")", ".", "connect", "(", "_ABORTED", ")", "# Als Thread aufrufen, über mq.get() wird die Rückgabe von _retrieve abgerufen", "thread", "=", "threading", ".", "Thread", "(", "target", "=", "lambda", "q", ",", "args", ":", "q", ".", "put", "(", "self", ".", "_retrieve", "(", "*", "*", "args", ")", ")", ",", "args", "=", "(", "mq", ",", "params", ")", ")", "thread", ".", "start", "(", ")", "# nach max. 10 sec den Vorgang abbrechen", "while", "not", "result_available", ".", "wait", "(", "timeout", "=", "10", ")", ":", "result_available", ".", "set", "(", ")", "return", "instances", ",", "signals" ]
[ 1125, 4 ]
[ 1190, 33 ]
null
python
de
['de', 'de', 'de']
True
true
null
DatumsNotationSplitted.splitter
(self)
return self.__splitter
Das Trennzeichen der Notation
Das Trennzeichen der Notation
def splitter(self) -> str: """Das Trennzeichen der Notation""" return self.__splitter
[ "def", "splitter", "(", "self", ")", "->", "str", ":", "return", "self", ".", "__splitter" ]
[ 29, 4 ]
[ 31, 30 ]
null
python
de
['de', 'de', 'de']
True
true
null
qa_vmat.getResults
( self )
return data
Holt die Ergebnisse der Auswertung. und füllt die segmente mit jeweiligen Daten aus metadata Returns ------- dict - open - vmat - header - SID - deviation - maximum_deviation - segments : list of dict - segment - center - r_corr - r_dev - draw_height - draw_width - draw_corner - segment_obj
Holt die Ergebnisse der Auswertung. und füllt die segmente mit jeweiligen Daten aus metadata
def getResults( self ): """Holt die Ergebnisse der Auswertung. und füllt die segmente mit jeweiligen Daten aus metadata Returns ------- dict - open - vmat - header - SID - deviation - maximum_deviation - segments : list of dict - segment - center - r_corr - r_dev - draw_height - draw_width - draw_corner - segment_obj """ if not self.analysed: return {} dmlc_prof, open_prof = self._median_profiles((self.dmlc_image, self.open_image)) segments = [] lfd = 0 for segment in self.segments: #print( segment ) lfd += 1 # r_corr = Return the ratio of the mean pixel values of DMLC/OPEN images. # r_dev = The reading deviation (R_dev) from the average readings of all the segments # passed = self.r_dev < self._tolerance * 100 _segment = { "segment": lfd, "center" : self.SEGMENT_X_POSITIONS_MM[lfd-1], "r_corr" : segment.r_corr, "r_dev" : segment.r_dev, "draw_height": segment.height, "draw_width": segment.width, "draw_corner": segment.bl_corner, "segment_obj": segment # "passed" : segment.passed } _segment.update( self.metadata["segments"][ str(lfd) ] ) segments.append( _segment ) data = { "open" : open_prof, "vmat": dmlc_prof, "header" : self._result_header, "SID" : self.open_image.sid, "deviation": self.avg_abs_r_deviation, "maximum_deviation": self.max_r_deviation, "segments": segments } return data
[ "def", "getResults", "(", "self", ")", ":", "if", "not", "self", ".", "analysed", ":", "return", "{", "}", "dmlc_prof", ",", "open_prof", "=", "self", ".", "_median_profiles", "(", "(", "self", ".", "dmlc_image", ",", "self", ".", "open_image", ")", ")", "segments", "=", "[", "]", "lfd", "=", "0", "for", "segment", "in", "self", ".", "segments", ":", "#print( segment )", "lfd", "+=", "1", "# r_corr = Return the ratio of the mean pixel values of DMLC/OPEN images.", "# r_dev = The reading deviation (R_dev) from the average readings of all the segments", "# passed = self.r_dev < self._tolerance * 100", "_segment", "=", "{", "\"segment\"", ":", "lfd", ",", "\"center\"", ":", "self", ".", "SEGMENT_X_POSITIONS_MM", "[", "lfd", "-", "1", "]", ",", "\"r_corr\"", ":", "segment", ".", "r_corr", ",", "\"r_dev\"", ":", "segment", ".", "r_dev", ",", "\"draw_height\"", ":", "segment", ".", "height", ",", "\"draw_width\"", ":", "segment", ".", "width", ",", "\"draw_corner\"", ":", "segment", ".", "bl_corner", ",", "\"segment_obj\"", ":", "segment", "# \"passed\" : segment.passed", "}", "_segment", ".", "update", "(", "self", ".", "metadata", "[", "\"segments\"", "]", "[", "str", "(", "lfd", ")", "]", ")", "segments", ".", "append", "(", "_segment", ")", "data", "=", "{", "\"open\"", ":", "open_prof", ",", "\"vmat\"", ":", "dmlc_prof", ",", "\"header\"", ":", "self", ".", "_result_header", ",", "\"SID\"", ":", "self", ".", "open_image", ".", "sid", ",", "\"deviation\"", ":", "self", ".", "avg_abs_r_deviation", ",", "\"maximum_deviation\"", ":", "self", ".", "max_r_deviation", ",", "\"segments\"", ":", "segments", "}", "return", "data" ]
[ 132, 4 ]
[ 196, 19 ]
null
python
de
['de', 'de', 'de']
True
true
null
UhrzeitNotation.convert
(self, *args, **kwargs)
Konvertiert eine Angabe einer Uhrzeit in einer gewissen Notation in eine Uhrzeit
Konvertiert eine Angabe einer Uhrzeit in einer gewissen Notation in eine Uhrzeit
def convert(self, *args, **kwargs) -> "Uhrzeit": """Konvertiert eine Angabe einer Uhrzeit in einer gewissen Notation in eine Uhrzeit""" pass
[ "def", "convert", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "->", "\"Uhrzeit\"", ":", "pass" ]
[ 12, 4 ]
[ 14, 12 ]
null
python
de
['de', 'de', 'de']
True
true
null
ispTest.test_webapp_dummy_test
( self )
Api aufruf durchführen GET /api/dummy/
Api aufruf durchführen GET /api/dummy/
def test_webapp_dummy_test( self ): ''' Api aufruf durchführen GET /api/dummy/ ''' # --- dummy Klasse abfragen # dummy api_list abfragen response = self.app.get( "api/dummy" ) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( response.json["data"], [{ 'attributes': {'function': 'api_list', 'kwargs': {'_ispcp': {}}}, 'id': '12', 'links': {'self': 'http://localhost/api/dummy/12/'}, 'type': 'dummy' }], "falsche api_list Rückgabe" ) # dummy api_get abfragen wird dummyId mitgegeben response = self.app.get( "api/dummy/12" ) self.assertEqual(response.status_code, 200, "Api Status nicht 200") #print(response.json["data"][0]) self.assertDictEqual( response.json["data"], { 'attributes': {'dummyId': '12'}, 'id': 12, 'links': {'self': 'http://localhost/api/dummy/12/'}, 'type': 'dummy' }, "falsche id Rückgabe" ) #print( response.json ) # ohne Pflichfeld Angabe test gibt es nicht response = self.app.get( "api/dummy/test" ) # print("api/dummy/test", response.json ) self.assertEqual(response.status_code, 400, "Api Status nicht 400") self.assertDictEqual( response.json, { "message": { "zahl": "Eine Zahl" } }, "nicht abgelehnt ohne Pflichfeld Angabe" ) # ohne text (hat default) mit test (nicht vorhanden) # /api/system/test?zahl=012&bool=True&test=1&_ispcp={"name":"B"} response = self.app.get( "api/dummy/test", query_string={ "zahl":"012", "bool":True, "test":1, "_ispcp": json.dumps( {"name":"B"} ) } ) # kommen auch zusätzliche Angaben und werden unnötige ausgefiltert self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertDictEqual( response.json["data"], { "_ispcp": {"name": "B"}, "bool": True, "text": "typenlos", "zahl": 12.0 }, "Parameter Auswertung falsch" ) response = self.app.get( "api/dummy/undefined" ) # einen undefined holen self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( response.json["data"], [{'attributes': {}, 'id': 'undefined', 'type': 'dummy'}], "undefined fehlerhaft" ) # Dummy ohne funktion gibt undefined Datensatz response = self.app.get( "api/dummy/gibtsnicht" ) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( response.json["data"], { 'attributes': {}, 'id': 'undefined', 'type': 'dummy' }, "Dummy ohne funktion gibt keine undefined datensatz " ) # response = self.app.get( "api/dummy/test", query_string={ "zahl": 1 } ) self.assertEqual(response.status_code, 200, "Status nicht 200") self.assertEqual( response.json["data"], [], "Test leere Liste" ) response = self.app.get( "api/dummy/test", query_string={ "zahl": 2 } ) self.assertEqual(response.status_code, 200, "Status nicht 200") self.assertEqual( response.json["data"], [{"a": 1, "b": 2}], "Test Liste mit einem Element" ) # fehler bei der Umwandlung data bleibt leer response = self.app.get( "api/dummy/test", query_string={ "zahl": 3 } ) self.assertEqual(response.status_code, 200, "Status nicht 200") self.assertEqual( response.json["data"], [], "fehler bei der Umwandlung data bleibt leer" ) response = self.app.get( "api/dummy/test", query_string={ "zahl": 4 } ) self.assertEqual(response.status_code, 200, "Status nicht 200") #print( response.json ) response = self.app.get( "api/dummy/test", query_string={ "zahl": 5, "_ispcp" : "{test}"} ) self.assertEqual(response.status_code, 200, "Status nicht 200") self.assertEqual( response.json['App-Error'], [{'message': 'swagger Parameter Json Error', 'info': '_ispcp={test}'}], "Parameter Json Error" ) # _int_query selbst aufrufen response = self.app.get( "api/dummy/test", query_string={ "zahl": 6 } ) self.assertEqual(response.status_code, 200, "Status nicht 200") self.assertEqual( response.json['data'], [{'A': 1}, {'B': 2}], "Parameter Json Error" ) # _int_group_query selbst aufrufen response = self.app.get( "api/dummy/test", query_string={ "zahl": 7 } ) self.assertEqual(response.status_code, 200, "Status nicht 200") self.assertEqual( response.json['App-Error'], [], # [{'message': 'Fehler bei _int_group', 'info': "'dummyQuery' object has no attribute 'group_by'"}], "_int_group_query selbst aufrufen" ) # access_cls selbst aufrufen response = self.app.get( "api/dummy/test", query_string={ "zahl": 8 } ) self.assertEqual(response.status_code, 200, "Status nicht 200") self.assertEqual( response.json['data'], [{'nicht da': ''}, {'sqlalchemy.BigInteger': ''}], "access_cls selbst aufrufen" ) # iso2date aufrufen response = self.app.get( "api/dummy/test", query_string={ "zahl": 9 } ) self.assertEqual(response.status_code, 200, "Status nicht 200") self.assertEqual( response.json['data'], [ {'test=None': None}, {'20180415=2018-04-15': '2018-04-15'}, {'2018-04-15=2018-04-15': '2018-04-15'}, {'2018-04-15 14:36:25=2018-04-15': '2018-04-15'}, {'2018-04-15=18-04-15 00:00:00': '2018-04-15 00:00:00'}, {'2018-04-15 14:36:25=2018-04-15 14:36:25': '2018-04-15 14:36:25'}, {'20180415 14:36:25=2018-04-15 14:36:25': '2018-04-15 14:36:25'}, {'20180415 14:36=2018-04-15 14:36:00': '2018-04-15 14:36:00'}, {'201A0415 14:36:25=None': None}, {'201A0415 14:36=None': None}, {'201A0415=None': None} ], "iso2date aufrufen" ) # versuchen eine vorhandene Funktion ohne rpc Kennung aufzurufen response = self.app.get( "api/dummy/norpc" ) self.assertEqual(response.status_code, 400, "Status nicht 400") self.assertEqual( response.json, {}, "versuchen eine vorhandene Funktion ohne rpc Kennung aufzurufen" )
[ "def", "test_webapp_dummy_test", "(", "self", ")", ":", "# --- dummy Klasse abfragen", "# dummy api_list abfragen", "response", "=", "self", ".", "app", ".", "get", "(", "\"api/dummy\"", ")", "self", ".", "assertEqual", "(", "response", ".", "status_code", ",", "200", ",", "\"Api Status nicht 200\"", ")", "self", ".", "assertEqual", "(", "response", ".", "json", "[", "\"data\"", "]", ",", "[", "{", "'attributes'", ":", "{", "'function'", ":", "'api_list'", ",", "'kwargs'", ":", "{", "'_ispcp'", ":", "{", "}", "}", "}", ",", "'id'", ":", "'12'", ",", "'links'", ":", "{", "'self'", ":", "'http://localhost/api/dummy/12/'", "}", ",", "'type'", ":", "'dummy'", "}", "]", ",", "\"falsche api_list Rückgabe\"", ")", "# dummy api_get abfragen wird dummyId mitgegeben", "response", "=", "self", ".", "app", ".", "get", "(", "\"api/dummy/12\"", ")", "self", ".", "assertEqual", "(", "response", ".", "status_code", ",", "200", ",", "\"Api Status nicht 200\"", ")", "#print(response.json[\"data\"][0])", "self", ".", "assertDictEqual", "(", "response", ".", "json", "[", "\"data\"", "]", ",", "{", "'attributes'", ":", "{", "'dummyId'", ":", "'12'", "}", ",", "'id'", ":", "12", ",", "'links'", ":", "{", "'self'", ":", "'http://localhost/api/dummy/12/'", "}", ",", "'type'", ":", "'dummy'", "}", ",", "\"falsche id Rückgabe\"", ")", "#print( response.json )", "# ohne Pflichfeld Angabe test gibt es nicht", "response", "=", "self", ".", "app", ".", "get", "(", "\"api/dummy/test\"", ")", "# print(\"api/dummy/test\", response.json )", "self", ".", "assertEqual", "(", "response", ".", "status_code", ",", "400", ",", "\"Api Status nicht 400\"", ")", "self", ".", "assertDictEqual", "(", "response", ".", "json", ",", "{", "\"message\"", ":", "{", "\"zahl\"", ":", "\"Eine Zahl\"", "}", "}", ",", "\"nicht abgelehnt ohne Pflichfeld Angabe\"", ")", "# ohne text (hat default) mit test (nicht vorhanden)", "# /api/system/test?zahl=012&bool=True&test=1&_ispcp={\"name\":\"B\"}", "response", "=", "self", ".", "app", ".", "get", "(", "\"api/dummy/test\"", ",", "query_string", "=", "{", "\"zahl\"", ":", "\"012\"", ",", "\"bool\"", ":", "True", ",", "\"test\"", ":", "1", ",", "\"_ispcp\"", ":", "json", ".", "dumps", "(", "{", "\"name\"", ":", "\"B\"", "}", ")", "}", ")", "# kommen auch zusätzliche Angaben und werden unnötige ausgefiltert", "self", ".", "assertEqual", "(", "response", ".", "status_code", ",", "200", ",", "\"Api Status nicht 200\"", ")", "self", ".", "assertDictEqual", "(", "response", ".", "json", "[", "\"data\"", "]", ",", "{", "\"_ispcp\"", ":", "{", "\"name\"", ":", "\"B\"", "}", ",", "\"bool\"", ":", "True", ",", "\"text\"", ":", "\"typenlos\"", ",", "\"zahl\"", ":", "12.0", "}", ",", "\"Parameter Auswertung falsch\"", ")", "response", "=", "self", ".", "app", ".", "get", "(", "\"api/dummy/undefined\"", ")", "# einen undefined holen", "self", ".", "assertEqual", "(", "response", ".", "status_code", ",", "200", ",", "\"Api Status nicht 200\"", ")", "self", ".", "assertEqual", "(", "response", ".", "json", "[", "\"data\"", "]", ",", "[", "{", "'attributes'", ":", "{", "}", ",", "'id'", ":", "'undefined'", ",", "'type'", ":", "'dummy'", "}", "]", ",", "\"undefined fehlerhaft\"", ")", "# Dummy ohne funktion gibt undefined Datensatz ", "response", "=", "self", ".", "app", ".", "get", "(", "\"api/dummy/gibtsnicht\"", ")", "self", ".", "assertEqual", "(", "response", ".", "status_code", ",", "200", ",", "\"Api Status nicht 200\"", ")", "self", ".", "assertEqual", "(", "response", ".", "json", "[", "\"data\"", "]", ",", "{", "'attributes'", ":", "{", "}", ",", "'id'", ":", "'undefined'", ",", "'type'", ":", "'dummy'", "}", ",", "\"Dummy ohne funktion gibt keine undefined datensatz \"", ")", "# ", "response", "=", "self", ".", "app", ".", "get", "(", "\"api/dummy/test\"", ",", "query_string", "=", "{", "\"zahl\"", ":", "1", "}", ")", "self", ".", "assertEqual", "(", "response", ".", "status_code", ",", "200", ",", "\"Status nicht 200\"", ")", "self", ".", "assertEqual", "(", "response", ".", "json", "[", "\"data\"", "]", ",", "[", "]", ",", "\"Test leere Liste\"", ")", "response", "=", "self", ".", "app", ".", "get", "(", "\"api/dummy/test\"", ",", "query_string", "=", "{", "\"zahl\"", ":", "2", "}", ")", "self", ".", "assertEqual", "(", "response", ".", "status_code", ",", "200", ",", "\"Status nicht 200\"", ")", "self", ".", "assertEqual", "(", "response", ".", "json", "[", "\"data\"", "]", ",", "[", "{", "\"a\"", ":", "1", ",", "\"b\"", ":", "2", "}", "]", ",", "\"Test Liste mit einem Element\"", ")", "# fehler bei der Umwandlung data bleibt leer", "response", "=", "self", ".", "app", ".", "get", "(", "\"api/dummy/test\"", ",", "query_string", "=", "{", "\"zahl\"", ":", "3", "}", ")", "self", ".", "assertEqual", "(", "response", ".", "status_code", ",", "200", ",", "\"Status nicht 200\"", ")", "self", ".", "assertEqual", "(", "response", ".", "json", "[", "\"data\"", "]", ",", "[", "]", ",", "\"fehler bei der Umwandlung data bleibt leer\"", ")", "response", "=", "self", ".", "app", ".", "get", "(", "\"api/dummy/test\"", ",", "query_string", "=", "{", "\"zahl\"", ":", "4", "}", ")", "self", ".", "assertEqual", "(", "response", ".", "status_code", ",", "200", ",", "\"Status nicht 200\"", ")", "#print( response.json )", "response", "=", "self", ".", "app", ".", "get", "(", "\"api/dummy/test\"", ",", "query_string", "=", "{", "\"zahl\"", ":", "5", ",", "\"_ispcp\"", ":", "\"{test}\"", "}", ")", "self", ".", "assertEqual", "(", "response", ".", "status_code", ",", "200", ",", "\"Status nicht 200\"", ")", "self", ".", "assertEqual", "(", "response", ".", "json", "[", "'App-Error'", "]", ",", "[", "{", "'message'", ":", "'swagger Parameter Json Error'", ",", "'info'", ":", "'_ispcp={test}'", "}", "]", ",", "\"Parameter Json Error\"", ")", "# _int_query selbst aufrufen", "response", "=", "self", ".", "app", ".", "get", "(", "\"api/dummy/test\"", ",", "query_string", "=", "{", "\"zahl\"", ":", "6", "}", ")", "self", ".", "assertEqual", "(", "response", ".", "status_code", ",", "200", ",", "\"Status nicht 200\"", ")", "self", ".", "assertEqual", "(", "response", ".", "json", "[", "'data'", "]", ",", "[", "{", "'A'", ":", "1", "}", ",", "{", "'B'", ":", "2", "}", "]", ",", "\"Parameter Json Error\"", ")", "# _int_group_query selbst aufrufen", "response", "=", "self", ".", "app", ".", "get", "(", "\"api/dummy/test\"", ",", "query_string", "=", "{", "\"zahl\"", ":", "7", "}", ")", "self", ".", "assertEqual", "(", "response", ".", "status_code", ",", "200", ",", "\"Status nicht 200\"", ")", "self", ".", "assertEqual", "(", "response", ".", "json", "[", "'App-Error'", "]", ",", "[", "]", ",", "# [{'message': 'Fehler bei _int_group', 'info': \"'dummyQuery' object has no attribute 'group_by'\"}],", "\"_int_group_query selbst aufrufen\"", ")", "# access_cls selbst aufrufen", "response", "=", "self", ".", "app", ".", "get", "(", "\"api/dummy/test\"", ",", "query_string", "=", "{", "\"zahl\"", ":", "8", "}", ")", "self", ".", "assertEqual", "(", "response", ".", "status_code", ",", "200", ",", "\"Status nicht 200\"", ")", "self", ".", "assertEqual", "(", "response", ".", "json", "[", "'data'", "]", ",", "[", "{", "'nicht da'", ":", "''", "}", ",", "{", "'sqlalchemy.BigInteger'", ":", "''", "}", "]", ",", "\"access_cls selbst aufrufen\"", ")", "# iso2date aufrufen", "response", "=", "self", ".", "app", ".", "get", "(", "\"api/dummy/test\"", ",", "query_string", "=", "{", "\"zahl\"", ":", "9", "}", ")", "self", ".", "assertEqual", "(", "response", ".", "status_code", ",", "200", ",", "\"Status nicht 200\"", ")", "self", ".", "assertEqual", "(", "response", ".", "json", "[", "'data'", "]", ",", "[", "{", "'test=None'", ":", "None", "}", ",", "{", "'20180415=2018-04-15'", ":", "'2018-04-15'", "}", ",", "{", "'2018-04-15=2018-04-15'", ":", "'2018-04-15'", "}", ",", "{", "'2018-04-15 14:36:25=2018-04-15'", ":", "'2018-04-15'", "}", ",", "{", "'2018-04-15=18-04-15 00:00:00'", ":", "'2018-04-15 00:00:00'", "}", ",", "{", "'2018-04-15 14:36:25=2018-04-15 14:36:25'", ":", "'2018-04-15 14:36:25'", "}", ",", "{", "'20180415 14:36:25=2018-04-15 14:36:25'", ":", "'2018-04-15 14:36:25'", "}", ",", "{", "'20180415 14:36=2018-04-15 14:36:00'", ":", "'2018-04-15 14:36:00'", "}", ",", "{", "'201A0415 14:36:25=None'", ":", "None", "}", ",", "{", "'201A0415 14:36=None'", ":", "None", "}", ",", "{", "'201A0415=None'", ":", "None", "}", "]", ",", "\"iso2date aufrufen\"", ")", "# versuchen eine vorhandene Funktion ohne rpc Kennung aufzurufen", "response", "=", "self", ".", "app", ".", "get", "(", "\"api/dummy/norpc\"", ")", "self", ".", "assertEqual", "(", "response", ".", "status_code", ",", "400", ",", "\"Status nicht 400\"", ")", "self", ".", "assertEqual", "(", "response", ".", "json", ",", "{", "}", ",", "\"versuchen eine vorhandene Funktion ohne rpc Kennung aufzurufen\"", ")" ]
[ 1584, 4 ]
[ 1775, 9 ]
null
python
de
['de', 'de', 'de']
True
true
null
ispDicom.archive_hasSOPInstanceUID
(self, SOPInstanceUID)
return os.path.isfile( filename ), filename
Prüft ob eine SOPInstanceUID schon im File Archiv vorhanden ist Parameters ---------- SOPInstanceUID : TYPE Eine SOPInstanceUID. Returns ------- exists : bool Datei vorhanden oder nicht. filename : str Der geprüfte Dateiname
Prüft ob eine SOPInstanceUID schon im File Archiv vorhanden ist
def archive_hasSOPInstanceUID(self, SOPInstanceUID): """Prüft ob eine SOPInstanceUID schon im File Archiv vorhanden ist Parameters ---------- SOPInstanceUID : TYPE Eine SOPInstanceUID. Returns ------- exists : bool Datei vorhanden oder nicht. filename : str Der geprüfte Dateiname """ filename = osp.join( self.dicomPath, self.subPath, SOPInstanceUID + ".dcm" ) return os.path.isfile( filename ), filename
[ "def", "archive_hasSOPInstanceUID", "(", "self", ",", "SOPInstanceUID", ")", ":", "filename", "=", "osp", ".", "join", "(", "self", ".", "dicomPath", ",", "self", ".", "subPath", ",", "SOPInstanceUID", "+", "\".dcm\"", ")", "return", "os", ".", "path", ".", "isfile", "(", "filename", ")", ",", "filename" ]
[ 1054, 4 ]
[ 1072, 51 ]
null
python
de
['de', 'de', 'de']
True
true
null
ToolboxAgenda.set_tags_for_textbox
(textbox)
Meta-Informationen für Textbox einstellen
Meta-Informationen für Textbox einstellen
def set_tags_for_textbox(textbox): ''' Meta-Informationen für Textbox einstellen ''' textbox.Tags.Add(TOOLBOX_AGENDA_TEXTBOX, "1") textbox.Tags.Add(bkt.contextdialogs.BKT_CONTEXTDIALOG_TAGKEY, TOOLBOX_AGENDA_POPUP)
[ "def", "set_tags_for_textbox", "(", "textbox", ")", ":", "textbox", ".", "Tags", ".", "Add", "(", "TOOLBOX_AGENDA_TEXTBOX", ",", "\"1\"", ")", "textbox", ".", "Tags", ".", "Add", "(", "bkt", ".", "contextdialogs", ".", "BKT_CONTEXTDIALOG_TAGKEY", ",", "TOOLBOX_AGENDA_POPUP", ")" ]
[ 1236, 4 ]
[ 1239, 91 ]
null
python
de
['de', 'de', 'de']
True
true
null
ReadDatafromFile
( file )
return text
Auslesen von Daten aus einer Datei
Auslesen von Daten aus einer Datei
def ReadDatafromFile( file ): ''' Auslesen von Daten aus einer Datei ''' thisActionName = "ReadDatafromFile" try: with codecs.open( file, encoding="utf-8", mode="r") as f: text = f.read() f.close() except: return False return text
[ "def", "ReadDatafromFile", "(", "file", ")", ":", "thisActionName", "=", "\"ReadDatafromFile\"", "try", ":", "with", "codecs", ".", "open", "(", "file", ",", "encoding", "=", "\"utf-8\"", ",", "mode", "=", "\"r\"", ")", "as", "f", ":", "text", "=", "f", ".", "read", "(", ")", "f", ".", "close", "(", ")", "except", ":", "return", "False", "return", "text" ]
[ 258, 0 ]
[ 269, 15 ]
null
python
de
['de', 'de', 'de']
True
true
null
qa_mlc.findTransmissions
( self, position )
return { 'filename': self.infos["filename"], 'Kennung': self._kennung.format( **self.infos ), 'check_subtag': self.infos["check_subtag"], 'position': position, 'pxPosition': pxPosition, 'type': self.infos['testTags'], 'profile': profile, 'unit': self.infos['unit'], 'energy': self.infos['energy'], 'gantry' : self.infos['gantry'], 'collimator': self.infos['collimator'], 'leaf.peaks' : minPeaks, 'leaf.min' : np.min( profile[minPeaks] ), 'leaf.mean' : meanMinPeaks, 'leaf.max' : np.max( profile[minPeaks] ), 'interleaf.peaks' : maxPeaks, 'interleaf.min' : np.min( profile[maxPeaks] ), 'interleaf.mean' : meanMaxPeaks, 'interleaf.max' : np.max( profile[maxPeaks] ) }
Die transmissions bestimmen und mean über alle ermitteln. Diese Auswertung wird quer über alle Leafs verwendet leaf ( min ) interleaf (max) Attributes ---------- position :
Die transmissions bestimmen und mean über alle ermitteln. Diese Auswertung wird quer über alle Leafs verwendet
def findTransmissions( self, position ): """Die transmissions bestimmen und mean über alle ermitteln. Diese Auswertung wird quer über alle Leafs verwendet leaf ( min ) interleaf (max) Attributes ---------- position : """ if position < -200 or position > 200 : return {} # PixelPosition ermitteln pxPosition = self.image.mm2dots_X( position ) # int(round( self.image.dpmm * position + self.image.cax.x )) """ Analysis """ #position = 0.6 # Profilwerte in % #profile = MultiProfile(self.image.array[:, int(round(self.image.array.shape[1]*vert_position))]) #profile = MultiProfile( self.image.array[:, 563] * 100 ) # evt. asl % mit MultiProfile( self.image.array[:, pixPosition] * 100 ) if self.infos["collimator"] == 90: self.image.rot90( n=3 ) elif self.infos["collimator"] == 180: self.image.rot90( n=2 ) elif self.infos["collimator"] == 270: self.image.rot90( n=1 ) profile = MultiProfile( self.image.array[:, pxPosition] ) """ max Peaks (interleaf) suchen """ # max peaks für innere leafs bei 10 für äußere bei 20 maxPeaks = [] # FIXED: manchmal werden als peak_idx floats mit .0 von find_peaks ermittelt deshalb nach int wandeln peak_idxs = profile.find_peaks( min_distance=10, threshold=0.1 ) for peak_idx in peak_idxs: maxPeaks.append( int( peak_idx ) ) # mittelwert von maxPeaks meanMaxPeaks = np.mean( profile[maxPeaks] ) """ min Peaks (leaf) suchen """ minPeaks = [] profile.invert() # FIXED: manchmal werden als peak_idx floats mit .0 von find_peaks ermittelt deshalb nach int wandeln peak_idxs = profile.find_peaks( min_distance=10, threshold=0.1 ) for peak_idx in peak_idxs: minPeaks.append( int( peak_idx ) ) profile.invert() # mittelwert von minPeaks meanMinPeaks = np.mean( profile[minPeaks] ) # rotation wieder zurücknehmen if self.infos["collimator"] == 90: self.image.rot90( n=3 ) elif self.infos["collimator"] == 180: self.image.rot90( n=2 ) elif self.infos["collimator"] == 270: self.image.rot90( n=1 ) return { 'filename': self.infos["filename"], 'Kennung': self._kennung.format( **self.infos ), 'check_subtag': self.infos["check_subtag"], 'position': position, 'pxPosition': pxPosition, 'type': self.infos['testTags'], 'profile': profile, 'unit': self.infos['unit'], 'energy': self.infos['energy'], 'gantry' : self.infos['gantry'], 'collimator': self.infos['collimator'], 'leaf.peaks' : minPeaks, 'leaf.min' : np.min( profile[minPeaks] ), 'leaf.mean' : meanMinPeaks, 'leaf.max' : np.max( profile[minPeaks] ), 'interleaf.peaks' : maxPeaks, 'interleaf.min' : np.min( profile[maxPeaks] ), 'interleaf.mean' : meanMaxPeaks, 'interleaf.max' : np.max( profile[maxPeaks] ) }
[ "def", "findTransmissions", "(", "self", ",", "position", ")", ":", "if", "position", "<", "-", "200", "or", "position", ">", "200", ":", "return", "{", "}", "# PixelPosition ermitteln", "pxPosition", "=", "self", ".", "image", ".", "mm2dots_X", "(", "position", ")", "# int(round( self.image.dpmm * position + self.image.cax.x ))", "\"\"\" Analysis \"\"\"", "#position = 0.6", "# Profilwerte in %", "#profile = MultiProfile(self.image.array[:, int(round(self.image.array.shape[1]*vert_position))])", "#profile = MultiProfile( self.image.array[:, 563] * 100 )", "# evt. asl % mit MultiProfile( self.image.array[:, pixPosition] * 100 )", "if", "self", ".", "infos", "[", "\"collimator\"", "]", "==", "90", ":", "self", ".", "image", ".", "rot90", "(", "n", "=", "3", ")", "elif", "self", ".", "infos", "[", "\"collimator\"", "]", "==", "180", ":", "self", ".", "image", ".", "rot90", "(", "n", "=", "2", ")", "elif", "self", ".", "infos", "[", "\"collimator\"", "]", "==", "270", ":", "self", ".", "image", ".", "rot90", "(", "n", "=", "1", ")", "profile", "=", "MultiProfile", "(", "self", ".", "image", ".", "array", "[", ":", ",", "pxPosition", "]", ")", "\"\"\" max Peaks (interleaf) suchen \"\"\"", "# max peaks für innere leafs bei 10 für äußere bei 20", "maxPeaks", "=", "[", "]", "# FIXED: manchmal werden als peak_idx floats mit .0 von find_peaks ermittelt deshalb nach int wandeln", "peak_idxs", "=", "profile", ".", "find_peaks", "(", "min_distance", "=", "10", ",", "threshold", "=", "0.1", ")", "for", "peak_idx", "in", "peak_idxs", ":", "maxPeaks", ".", "append", "(", "int", "(", "peak_idx", ")", ")", "# mittelwert von maxPeaks", "meanMaxPeaks", "=", "np", ".", "mean", "(", "profile", "[", "maxPeaks", "]", ")", "\"\"\" min Peaks (leaf) suchen \"\"\"", "minPeaks", "=", "[", "]", "profile", ".", "invert", "(", ")", "# FIXED: manchmal werden als peak_idx floats mit .0 von find_peaks ermittelt deshalb nach int wandeln", "peak_idxs", "=", "profile", ".", "find_peaks", "(", "min_distance", "=", "10", ",", "threshold", "=", "0.1", ")", "for", "peak_idx", "in", "peak_idxs", ":", "minPeaks", ".", "append", "(", "int", "(", "peak_idx", ")", ")", "profile", ".", "invert", "(", ")", "# mittelwert von minPeaks", "meanMinPeaks", "=", "np", ".", "mean", "(", "profile", "[", "minPeaks", "]", ")", "# rotation wieder zurücknehmen", "if", "self", ".", "infos", "[", "\"collimator\"", "]", "==", "90", ":", "self", ".", "image", ".", "rot90", "(", "n", "=", "3", ")", "elif", "self", ".", "infos", "[", "\"collimator\"", "]", "==", "180", ":", "self", ".", "image", ".", "rot90", "(", "n", "=", "2", ")", "elif", "self", ".", "infos", "[", "\"collimator\"", "]", "==", "270", ":", "self", ".", "image", ".", "rot90", "(", "n", "=", "1", ")", "return", "{", "'filename'", ":", "self", ".", "infos", "[", "\"filename\"", "]", ",", "'Kennung'", ":", "self", ".", "_kennung", ".", "format", "(", "*", "*", "self", ".", "infos", ")", ",", "'check_subtag'", ":", "self", ".", "infos", "[", "\"check_subtag\"", "]", ",", "'position'", ":", "position", ",", "'pxPosition'", ":", "pxPosition", ",", "'type'", ":", "self", ".", "infos", "[", "'testTags'", "]", ",", "'profile'", ":", "profile", ",", "'unit'", ":", "self", ".", "infos", "[", "'unit'", "]", ",", "'energy'", ":", "self", ".", "infos", "[", "'energy'", "]", ",", "'gantry'", ":", "self", ".", "infos", "[", "'gantry'", "]", ",", "'collimator'", ":", "self", ".", "infos", "[", "'collimator'", "]", ",", "'leaf.peaks'", ":", "minPeaks", ",", "'leaf.min'", ":", "np", ".", "min", "(", "profile", "[", "minPeaks", "]", ")", ",", "'leaf.mean'", ":", "meanMinPeaks", ",", "'leaf.max'", ":", "np", ".", "max", "(", "profile", "[", "minPeaks", "]", ")", ",", "'interleaf.peaks'", ":", "maxPeaks", ",", "'interleaf.min'", ":", "np", ".", "min", "(", "profile", "[", "maxPeaks", "]", ")", ",", "'interleaf.mean'", ":", "meanMaxPeaks", ",", "'interleaf.max'", ":", "np", ".", "max", "(", "profile", "[", "maxPeaks", "]", ")", "}" ]
[ 138, 4 ]
[ 224, 13 ]
null
python
de
['de', 'de', 'de']
True
true
null
ispSAFRS._int_filter
(cls, query, qs:str="" )
return query
Die in qs angegebene RQL Filterbedingung auswerten und an query anhängen. Parameters ---------- query : obj Das bisherige query Object qs : str, optional RQL Querystring. The default is "". Returns ------- query die query mit zusätzlichem Filter
Die in qs angegebene RQL Filterbedingung auswerten und an query anhängen.
def _int_filter(cls, query, qs:str="" ): """Die in qs angegebene RQL Filterbedingung auswerten und an query anhängen. Parameters ---------- query : obj Das bisherige query Object qs : str, optional RQL Querystring. The default is "". Returns ------- query die query mit zusätzlichem Filter """ # RQLQuery bereitstellen die eigene Klasse muss mit _set_entities angegeben werden rql = RQLQuery( cls ) rql._set_entities( cls ) # rql_filter auswerten #rql.rql_parse( qs ) try: rql.rql_parse( qs ) except NotImplementedError as exc: cls.appError("_int_filter", "NotImplementedError: {}".format( exc ) ) query = query.filter( text("1=2") ) return query except Exception as exc: cls.appError("_int_filter", "rql-error: {}".format( exc ) ) query = query.filter( text("1=2") ) return query # die Bedingung an die query anfügen if rql._rql_where_clause is not None: query = query.filter( rql._rql_where_clause ) cls.appInfo("_int_filter", str( rql._rql_where_clause ) ) return query
[ "def", "_int_filter", "(", "cls", ",", "query", ",", "qs", ":", "str", "=", "\"\"", ")", ":", "# RQLQuery bereitstellen die eigene Klasse muss mit _set_entities angegeben werden", "rql", "=", "RQLQuery", "(", "cls", ")", "rql", ".", "_set_entities", "(", "cls", ")", "# rql_filter auswerten", "#rql.rql_parse( qs )", "try", ":", "rql", ".", "rql_parse", "(", "qs", ")", "except", "NotImplementedError", "as", "exc", ":", "cls", ".", "appError", "(", "\"_int_filter\"", ",", "\"NotImplementedError: {}\"", ".", "format", "(", "exc", ")", ")", "query", "=", "query", ".", "filter", "(", "text", "(", "\"1=2\"", ")", ")", "return", "query", "except", "Exception", "as", "exc", ":", "cls", ".", "appError", "(", "\"_int_filter\"", ",", "\"rql-error: {}\"", ".", "format", "(", "exc", ")", ")", "query", "=", "query", ".", "filter", "(", "text", "(", "\"1=2\"", ")", ")", "return", "query", "# die Bedingung an die query anfügen", "if", "rql", ".", "_rql_where_clause", "is", "not", "None", ":", "query", "=", "query", ".", "filter", "(", "rql", ".", "_rql_where_clause", ")", "cls", ".", "appInfo", "(", "\"_int_filter\"", ",", "str", "(", "rql", ".", "_rql_where_clause", ")", ")", "return", "query" ]
[ 1037, 4 ]
[ 1075, 20 ]
null
python
de
['de', 'de', 'de']
True
true
null
ispConfig.__setattr__
(self, k, v)
Zugriff auf die Klassenattribute mit _. sonst wird in self._config gesetzt Unlike __getattr__, __setattr__ is an encapsulation solution. It allows you to define behavior for assignment to an attribute regardless of whether or not that attribute exists, meaning you can define custom rules for any changes in the values of attributes. However, you have to be careful with how you use __setattr__. Parameters ---------- k : str Name des Attributs aus dem Object oder der _config. v : Zu setzender Inhalt.
Zugriff auf die Klassenattribute mit _.
def __setattr__(self, k, v): """Zugriff auf die Klassenattribute mit _. sonst wird in self._config gesetzt Unlike __getattr__, __setattr__ is an encapsulation solution. It allows you to define behavior for assignment to an attribute regardless of whether or not that attribute exists, meaning you can define custom rules for any changes in the values of attributes. However, you have to be careful with how you use __setattr__. Parameters ---------- k : str Name des Attributs aus dem Object oder der _config. v : Zu setzender Inhalt. """ if k[0] == "_": self.__dict__[k] = v else: self._config[k] = v
[ "def", "__setattr__", "(", "self", ",", "k", ",", "v", ")", ":", "if", "k", "[", "0", "]", "==", "\"_\"", ":", "self", ".", "__dict__", "[", "k", "]", "=", "v", "else", ":", "self", ".", "_config", "[", "k", "]", "=", "v" ]
[ 401, 4 ]
[ 423, 31 ]
null
python
de
['de', 'de', 'de']
True
true
null
RawData.Join_Handler
( self, data )
return False
Ein User hat den Chat betreten
Ein User hat den Chat betreten
def Join_Handler( self, data ): ''' Ein User hat den Chat betreten ''' thisActionName = "Subscription_Handler" tmpTestMessage = ":[email protected] JOIN #nachtmeister666" JOIN = self.reJOIN.search( data.RawData ) if JOIN: tag = JOIN.group( "name" ) UserDisplayName = self.Parent.GetDisplayName( tag ) return UserDisplayName return False
[ "def", "Join_Handler", "(", "self", ",", "data", ")", ":", "thisActionName", "=", "\"Subscription_Handler\"", "tmpTestMessage", "=", "\":[email protected] JOIN #nachtmeister666\"", "JOIN", "=", "self", ".", "reJOIN", ".", "search", "(", "data", ".", "RawData", ")", "if", "JOIN", ":", "tag", "=", "JOIN", ".", "group", "(", "\"name\"", ")", "UserDisplayName", "=", "self", ".", "Parent", ".", "GetDisplayName", "(", "tag", ")", "return", "UserDisplayName", "return", "False" ]
[ 547, 4 ]
[ 560, 20 ]
null
python
de
['de', 'de', 'de']
True
true
null
ispBase.pdf_error_result
(self, md:dict={}, date="", group_len:int=0, errors:dict={}, msg:str="", pos:dict={}, )
return result
Für das PDF eine fehlermeldung erzeugen. Parameters ---------- md : dict, optional DESCRIPTION. The default is {}. date : TYPE, optional DESCRIPTION. The default is "". group_len : int, optional DESCRIPTION. The default is 0. errors: dict immer mit - msg: str - data: DataFrame msg : str, optional DESCRIPTION. The default is "". pos : dict, optional DESCRIPTION. The default is {}. Returns ------- result : int Rückgabe von createResult()
Für das PDF eine fehlermeldung erzeugen.
def pdf_error_result(self, md:dict={}, date="", group_len:int=0, errors:dict={}, msg:str="", pos:dict={}, ): """Für das PDF eine fehlermeldung erzeugen. Parameters ---------- md : dict, optional DESCRIPTION. The default is {}. date : TYPE, optional DESCRIPTION. The default is "". group_len : int, optional DESCRIPTION. The default is 0. errors: dict immer mit - msg: str - data: DataFrame msg : str, optional DESCRIPTION. The default is "". pos : dict, optional DESCRIPTION. The default is {}. Returns ------- result : int Rückgabe von createResult() """ acceptance = 0 # Gesamt check - na self.pdf.resultIcon( acceptance ) if msg=="": msg = '<b>Datenfehler</b>' if "msg" in errors: msg += "\n\n" + errors["msg"] self.pdf.markdown( msg ) if "data" in errors: self.pdf.pandas( errors["data"], pos ) result = self.createResult( md=md, date=date, group=group_len, acceptance=acceptance ) return result
[ "def", "pdf_error_result", "(", "self", ",", "md", ":", "dict", "=", "{", "}", ",", "date", "=", "\"\"", ",", "group_len", ":", "int", "=", "0", ",", "errors", ":", "dict", "=", "{", "}", ",", "msg", ":", "str", "=", "\"\"", ",", "pos", ":", "dict", "=", "{", "}", ",", ")", ":", "acceptance", "=", "0", "# Gesamt check - na", "self", ".", "pdf", ".", "resultIcon", "(", "acceptance", ")", "if", "msg", "==", "\"\"", ":", "msg", "=", "'<b>Datenfehler</b>'", "if", "\"msg\"", "in", "errors", ":", "msg", "+=", "\"\\n\\n\"", "+", "errors", "[", "\"msg\"", "]", "self", ".", "pdf", ".", "markdown", "(", "msg", ")", "if", "\"data\"", "in", "errors", ":", "self", ".", "pdf", ".", "pandas", "(", "errors", "[", "\"data\"", "]", ",", "pos", ")", "result", "=", "self", ".", "createResult", "(", "md", "=", "md", ",", "date", "=", "date", ",", "group", "=", "group_len", ",", "acceptance", "=", "acceptance", ")", "return", "result" ]
[ 276, 4 ]
[ 322, 21 ]
null
python
de
['de', 'de', 'de']
True
true
null
ResetDefaultTargetNames
()
return
Lösche alle Daten in der Tabelle 'game_heist_targets' und schreibe die Default Daten in die Tabelle
Lösche alle Daten in der Tabelle 'game_heist_targets' und schreibe die Default Daten in die Tabelle
def ResetDefaultTargetNames(): ''' Lösche alle Daten in der Tabelle 'game_heist_targets' und schreibe die Default Daten in die Tabelle ''' global myGameHeist global myLogger # Benachrichtigung, dass alle Daten gelöscht werden winsound.MessageBeep() returnValue = MessageBox(0, u"Du willst die ursprünglichen Namen der Ziele wiederherstellen?" "\r\nAlle bisherigen Einstellungen werden dabei gelöscht!" , u"Bist du sicher?", 4) if returnValue == MB_YES: # Funktion zum Zurücksetzen aufrufen myGameHeist.DB_create_DefaultTargets() # Nachrichtenbox MessageBox(0, u"Die Namen der Ziele wurden auf die Grundeinstellungen zurückgesetzt!" "\r\nLade das Skript neu und aktualisiere die Einstellungen." , u"Reset wurde abgeschlossen!", 0) # Information ins Log schreiben myLogger.WriteLog("") myLogger.WriteLog( " ---------------------------------------------------------------") myLogger.WriteLog( " ---- TARGET DATEN WURDEN ZURÜCKGESETZT ----") myLogger.WriteLog( " ---------------------------------------------------------------") myLogger.WriteLog("") return
[ "def", "ResetDefaultTargetNames", "(", ")", ":", "global", "myGameHeist", "global", "myLogger", "# Benachrichtigung, dass alle Daten gelöscht werden\r", "winsound", ".", "MessageBeep", "(", ")", "returnValue", "=", "MessageBox", "(", "0", ",", "u\"Du willst die ursprünglichen Namen der Ziele wiederherstellen?\"\r", "\"\\r\\nAlle bisherigen Einstellungen werden dabei gelöscht!\"\r", ",", "u\"Bist du sicher?\"", ",", "4", ")", "if", "returnValue", "==", "MB_YES", ":", "# Funktion zum Zurücksetzen aufrufen\r", "myGameHeist", ".", "DB_create_DefaultTargets", "(", ")", "# Nachrichtenbox\r", "MessageBox", "(", "0", ",", "u\"Die Namen der Ziele wurden auf die Grundeinstellungen zurückgesetzt!\"\r", "\"\\r\\nLade das Skript neu und aktualisiere die Einstellungen.\"", ",", "u\"Reset wurde abgeschlossen!\"", ",", "0", ")", "# Information ins Log schreiben\r", "myLogger", ".", "WriteLog", "(", "\"\"", ")", "myLogger", ".", "WriteLog", "(", "\" ---------------------------------------------------------------\"", ")", "myLogger", ".", "WriteLog", "(", "\" ---- TARGET DATEN WURDEN ZURÜCKGESETZT ----\")", "\r", "myLogger", ".", "WriteLog", "(", "\" ---------------------------------------------------------------\"", ")", "myLogger", ".", "WriteLog", "(", "\"\"", ")", "return" ]
[ 264, 0 ]
[ 297, 10 ]
null
python
de
['de', 'de', 'de']
True
true
null
testBase.setUpClass
(cls)
Wird beim initialisieren der Testklasse aufgerufen - Api bereitstellen - test Ergebnisse zum Vergleich laden
Wird beim initialisieren der Testklasse aufgerufen - Api bereitstellen - test Ergebnisse zum Vergleich laden
def setUpClass(cls): ''' Wird beim initialisieren der Testklasse aufgerufen - Api bereitstellen - test Ergebnisse zum Vergleich laden ''' # This attribute controls the maximum length of diffs output by assert methods that report diffs on failure. # It defaults to 80*8 characters cls.maxDiff = None files_path = os.path.join( ABSPATH, 'files') pdf_path = os.path.join( ABSPATH, 'files', 'pdf') config_path = os.path.join( ABSPATH, '..', 'config') if not os.path.exists( files_path ): os.mkdir( files_path ) # alte Datenbank löschen: über Pfad Angaben falls in der config nicht die testdatei steht db_file = os.path.join( files_path, "tests.db" ) if os.path.exists( db_file ): os.remove( db_file ) # alle erzeugten pdf und den Pfad pdf löschen if os.path.exists( pdf_path ): shutil.rmtree( pdf_path ) swagger_file = os.path.join( files_path, "swagger_test.json" ) if not os.path.exists( swagger_file ): with open(swagger_file, 'w') as fp: obj = { "info": { "title": "swagger test" } } json.dump(obj, fp, indent=2) # webapp mit unitest config cls.webapp = run( { "loglevel" :{ "safrs" : logging.DEBUG #"webapp" : logging.INFO, }, "server" : { "webserver" : { "name" : "swagger_test", "port" : 5001, "TESTING": True, "reloader" : False }, "api": { "DBADMIN": True, "custom_swagger_config": os.path.join( files_path, "swagger_test.json" ) } }, "templates":{ "PDF-HEADER": None }, "database": { "main": "tests", "tests" : { "connection": "sqlite:///{{BASE_DIR}}/tests/files/tests.db" } } } ) cls.app = cls.webapp.app #print("setUpClass", cls.webapp.config.get() ) # Grunddaten in die Datenbank laden data = { "dbtests" : [ { "string": "eins", "integer": 1, "gruppe":"A", "tags":"A,K", "aktiv":True }, { "string": "zwei", "integer": 2, "gruppe":"B", "tags":"B,M", "aktiv":False }, { "string": "drei", "integer": 3, "gruppe":"C", "tags":"M,K", "aktiv":True }, { "string": "vier", "integer": 4, "gruppe":"C", "aktiv":False }, { "string": "fünf", "integer": 5, "gruppe":"B", "tags":"A,K", "aktiv":True } ], "dbtestsrel" : [ { "dbtests_id": "1", "rstring": "r_eins", "rinteger": 11 }, { "dbtests_id": "2", "rstring": "r_zwei", "rinteger": 12 }, { "dbtests_id": "3", "rstring": "r_drei", "rinteger": 13 }, { "dbtests_id": "4", "rstring": "r_vier", "rinteger": 14 }, { "dbtests_id": "5", "rstring": "r_fünf", "rinteger": 15 } ] } for d in data["dbtests"]: response = cls.app.post( "api/dbtests/", headers={'Content-Type': 'application/json'}, data=json.dumps({ "data": { "attributes": d, "type":"dbtests" } })) for d in data["dbtestsrel"]: response = cls.app.post( "api/dbtestsrel/", headers={'Content-Type': 'application/json'}, data=json.dumps({ "data": { "attributes": d, "type":"dbtestsrel" } }))
[ "def", "setUpClass", "(", "cls", ")", ":", "# This attribute controls the maximum length of diffs output by assert methods that report diffs on failure. ", "# It defaults to 80*8 characters", "cls", ".", "maxDiff", "=", "None", "files_path", "=", "os", ".", "path", ".", "join", "(", "ABSPATH", ",", "'files'", ")", "pdf_path", "=", "os", ".", "path", ".", "join", "(", "ABSPATH", ",", "'files'", ",", "'pdf'", ")", "config_path", "=", "os", ".", "path", ".", "join", "(", "ABSPATH", ",", "'..'", ",", "'config'", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "files_path", ")", ":", "os", ".", "mkdir", "(", "files_path", ")", "# alte Datenbank löschen: über Pfad Angaben falls in der config nicht die testdatei steht ", "db_file", "=", "os", ".", "path", ".", "join", "(", "files_path", ",", "\"tests.db\"", ")", "if", "os", ".", "path", ".", "exists", "(", "db_file", ")", ":", "os", ".", "remove", "(", "db_file", ")", "# alle erzeugten pdf und den Pfad pdf löschen", "if", "os", ".", "path", ".", "exists", "(", "pdf_path", ")", ":", "shutil", ".", "rmtree", "(", "pdf_path", ")", "swagger_file", "=", "os", ".", "path", ".", "join", "(", "files_path", ",", "\"swagger_test.json\"", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "swagger_file", ")", ":", "with", "open", "(", "swagger_file", ",", "'w'", ")", "as", "fp", ":", "obj", "=", "{", "\"info\"", ":", "{", "\"title\"", ":", "\"swagger test\"", "}", "}", "json", ".", "dump", "(", "obj", ",", "fp", ",", "indent", "=", "2", ")", "# webapp mit unitest config", "cls", ".", "webapp", "=", "run", "(", "{", "\"loglevel\"", ":", "{", "\"safrs\"", ":", "logging", ".", "DEBUG", "#\"webapp\" : logging.INFO,", "}", ",", "\"server\"", ":", "{", "\"webserver\"", ":", "{", "\"name\"", ":", "\"swagger_test\"", ",", "\"port\"", ":", "5001", ",", "\"TESTING\"", ":", "True", ",", "\"reloader\"", ":", "False", "}", ",", "\"api\"", ":", "{", "\"DBADMIN\"", ":", "True", ",", "\"custom_swagger_config\"", ":", "os", ".", "path", ".", "join", "(", "files_path", ",", "\"swagger_test.json\"", ")", "}", "}", ",", "\"templates\"", ":", "{", "\"PDF-HEADER\"", ":", "None", "}", ",", "\"database\"", ":", "{", "\"main\"", ":", "\"tests\"", ",", "\"tests\"", ":", "{", "\"connection\"", ":", "\"sqlite:///{{BASE_DIR}}/tests/files/tests.db\"", "}", "}", "}", ")", "cls", ".", "app", "=", "cls", ".", "webapp", ".", "app", "#print(\"setUpClass\", cls.webapp.config.get() )", "# Grunddaten in die Datenbank laden", "data", "=", "{", "\"dbtests\"", ":", "[", "{", "\"string\"", ":", "\"eins\"", ",", "\"integer\"", ":", "1", ",", "\"gruppe\"", ":", "\"A\"", ",", "\"tags\"", ":", "\"A,K\"", ",", "\"aktiv\"", ":", "True", "}", ",", "{", "\"string\"", ":", "\"zwei\"", ",", "\"integer\"", ":", "2", ",", "\"gruppe\"", ":", "\"B\"", ",", "\"tags\"", ":", "\"B,M\"", ",", "\"aktiv\"", ":", "False", "}", ",", "{", "\"string\"", ":", "\"drei\"", ",", "\"integer\"", ":", "3", ",", "\"gruppe\"", ":", "\"C\"", ",", "\"tags\"", ":", "\"M,K\"", ",", "\"aktiv\"", ":", "True", "}", ",", "{", "\"string\"", ":", "\"vier\"", ",", "\"integer\"", ":", "4", ",", "\"gruppe\"", ":", "\"C\"", ",", "\"aktiv\"", ":", "False", "}", ",", "{", "\"string\"", ":", "\"fünf\",", " ", "integer\":", " ", ",", " ", "gruppe\":", "\"", "B\",", " ", "tags\":", "\"", "A,K\",", " ", "aktiv\":", "T", "rue ", "", "]", ",", "\"dbtestsrel\"", ":", "[", "{", "\"dbtests_id\"", ":", "\"1\"", ",", "\"rstring\"", ":", "\"r_eins\"", ",", "\"rinteger\"", ":", "11", "}", ",", "{", "\"dbtests_id\"", ":", "\"2\"", ",", "\"rstring\"", ":", "\"r_zwei\"", ",", "\"rinteger\"", ":", "12", "}", ",", "{", "\"dbtests_id\"", ":", "\"3\"", ",", "\"rstring\"", ":", "\"r_drei\"", ",", "\"rinteger\"", ":", "13", "}", ",", "{", "\"dbtests_id\"", ":", "\"4\"", ",", "\"rstring\"", ":", "\"r_vier\"", ",", "\"rinteger\"", ":", "14", "}", ",", "{", "\"dbtests_id\"", ":", "\"5\"", ",", "\"rstring\"", ":", "\"r_fünf\",", " ", "rinteger\":", " ", "5 ", "", "]", "}", "for", "d", "in", "data", "[", "\"dbtests\"", "]", ":", "response", "=", "cls", ".", "app", ".", "post", "(", "\"api/dbtests/\"", ",", "headers", "=", "{", "'Content-Type'", ":", "'application/json'", "}", ",", "data", "=", "json", ".", "dumps", "(", "{", "\"data\"", ":", "{", "\"attributes\"", ":", "d", ",", "\"type\"", ":", "\"dbtests\"", "}", "}", ")", ")", "for", "d", "in", "data", "[", "\"dbtestsrel\"", "]", ":", "response", "=", "cls", ".", "app", ".", "post", "(", "\"api/dbtestsrel/\"", ",", "headers", "=", "{", "'Content-Type'", ":", "'application/json'", "}", ",", "data", "=", "json", ".", "dumps", "(", "{", "\"data\"", ":", "{", "\"attributes\"", ":", "d", ",", "\"type\"", ":", "\"dbtestsrel\"", "}", "}", ")", ")" ]
[ 820, 4 ]
[ 918, 15 ]
null
python
de
['de', 'de', 'de']
True
true
null
qa_field.plotProfile
(self, data, metadata={} )
return self.getPlot()
Ein horizontale und vertikale Profilachse plotten Parameters ---------- data : dict metadata : dict profileSize profileTitle - format Ersetzungen aus self.infos sind möglich
Ein horizontale und vertikale Profilachse plotten
def plotProfile(self, data, metadata={} ): """Ein horizontale und vertikale Profilachse plotten Parameters ---------- data : dict metadata : dict profileSize profileTitle - format Ersetzungen aus self.infos sind möglich """ # plotbereiche festlegen und profileSize als imgSize übergeben fig, ax = self.initPlot( imgSize=metadata["profileSize"], nrows=2 ) # axes coordinates are 0,0 is bottom left and 1,1 is upper right # Kurven Informationen if not "profileTitle" in metadata: metadata["profileTitle"] = "{Kennung} - Energie:{energy} Gantry:{gantry:.1f} Kolli:{collimator:.1f}" ax[0].set_title( metadata["profileTitle"].format( **self.infos ) ) #x= np.divide(data["horizontal"]['profile'].values, self.image.dpmm + self.image.cax.x) #ax[0].get_xaxis().set_ticks( np.arange( self.mm2dots_X(-200), self.mm2dots_X(200), self.mm2dots_X(50) ) ) #ax[0].get_xaxis().set_ticklabels([-200,0,200]) #ax[0].set_xlim([ self.mm2dots_X(-210), self.mm2dots_X(210) ]) #ax[0].set_title( 'horizontal' ) # 2. Kurve horizontal # x-Achse ax[0].get_xaxis().set_ticklabels([]) ax[0].get_xaxis().set_ticks( [] ) # y-achse ax[0].get_yaxis().set_ticklabels([]) ax[0].get_yaxis().set_ticks( [] ) # kurve plotten ax[0].plot(data["horizontal"]['profile'].values , color='b') # links rechts min max ax[0].axhline(data["horizontal"]['profile max'], color='g', linewidth=1 ) ax[0].axhline(data["horizontal"]['profile min'], color='g', linewidth=1 ) ax[0].axvline(data["horizontal"]['profile left'], color='g', linewidth=1, linestyle='-.') ax[0].axvline(data["horizontal"]['profile right'], color='g', linewidth=1, linestyle='-.') cax_idx = data["horizontal"]['profile'].fwxm_center() ax[0].axvline(cax_idx, color='g', linewidth=1, linestyle='-.') # limits nach dem autom. setzen der Kurve xlim = ax[0].get_xlim() width = xlim[1] + xlim[0] ylim = ax[0].get_ylim() height = ylim[1] + ylim[0] ax[0].text( width / 2, height / 10, #self.image.mm2dots_X(0), # x-Koordinate: 0 ganz links, 1 ganz rechts #self.image.mm2dots_Y(500), # y-Koordinate: 0 ganz oben, 1 ganz unten 'crossline', # der Text der ausgegeben wird ha='center', # horizontalalignment va='center', # verticalalignment fontsize=20, # 'font' ist äquivalent alpha=.5 # Floatzahl von 0.0 transparent bis 1.0 opak ) #ax[0].text(2.5, 2.5, 'horizontal', ha='center', va='center', size=20, alpha=.5) #ax[0].set_title('Horizontal') # 2. Kurve vertikal # label und Ticks abschalten # x-Achse ax[1].get_xaxis().set_ticklabels([]) ax[1].get_xaxis().set_ticks( [] ) # y-achse ax[1].get_yaxis().set_ticklabels([]) ax[1].get_yaxis().set_ticks( [] ) # Kurve plotten ax[1].plot(data["vertical"]['profile'].values, color='r') # links rechts min max ax[1].axhline(data["vertical"]['profile max'], color='g', linewidth=1) ax[1].axhline(data["vertical"]['profile min'], color='g', linewidth=1) ax[1].axvline(data["vertical"]['profile left'], color='g', linewidth=1, linestyle='-.') ax[1].axvline(data["vertical"]['profile right'], color='g', linewidth=1, linestyle='-.') cax_idx = data["vertical"]['profile'].fwxm_center() ax[1].axvline(cax_idx, color='g', linewidth=1, linestyle='-.') #ax[1].set_title('Vertikal') # limits nach dem autom. setzen der Kurve xlim = ax[0].get_xlim() width = xlim[1] + xlim[0] ylim = ax[0].get_ylim() height = ylim[1] + ylim[0] ax[1].text( width / 2, height / 10, #self.image.mm2dots_X(0), #self.image.mm2dots_Y(500), 'inline', ha='center', va='center', size=20, alpha=.5 ) import matplotlib.pyplot as plt # Layout optimieren plt.tight_layout(pad=0.4, w_pad=1.0, h_pad=1.0) # data der Grafik zurückgeben return self.getPlot()
[ "def", "plotProfile", "(", "self", ",", "data", ",", "metadata", "=", "{", "}", ")", ":", "# plotbereiche festlegen und profileSize als imgSize übergeben", "fig", ",", "ax", "=", "self", ".", "initPlot", "(", "imgSize", "=", "metadata", "[", "\"profileSize\"", "]", ",", "nrows", "=", "2", ")", "# axes coordinates are 0,0 is bottom left and 1,1 is upper right", "# Kurven Informationen", "if", "not", "\"profileTitle\"", "in", "metadata", ":", "metadata", "[", "\"profileTitle\"", "]", "=", "\"{Kennung} - Energie:{energy} Gantry:{gantry:.1f} Kolli:{collimator:.1f}\"", "ax", "[", "0", "]", ".", "set_title", "(", "metadata", "[", "\"profileTitle\"", "]", ".", "format", "(", "*", "*", "self", ".", "infos", ")", ")", "#x= np.divide(data[\"horizontal\"]['profile'].values, self.image.dpmm + self.image.cax.x)", "#ax[0].get_xaxis().set_ticks( np.arange( self.mm2dots_X(-200), self.mm2dots_X(200), self.mm2dots_X(50) ) )", "#ax[0].get_xaxis().set_ticklabels([-200,0,200])", "#ax[0].set_xlim([ self.mm2dots_X(-210), self.mm2dots_X(210) ])", "#ax[0].set_title( 'horizontal' )", "# 2. Kurve horizontal", "# x-Achse", "ax", "[", "0", "]", ".", "get_xaxis", "(", ")", ".", "set_ticklabels", "(", "[", "]", ")", "ax", "[", "0", "]", ".", "get_xaxis", "(", ")", ".", "set_ticks", "(", "[", "]", ")", "# y-achse", "ax", "[", "0", "]", ".", "get_yaxis", "(", ")", ".", "set_ticklabels", "(", "[", "]", ")", "ax", "[", "0", "]", ".", "get_yaxis", "(", ")", ".", "set_ticks", "(", "[", "]", ")", "# kurve plotten", "ax", "[", "0", "]", ".", "plot", "(", "data", "[", "\"horizontal\"", "]", "[", "'profile'", "]", ".", "values", ",", "color", "=", "'b'", ")", "# links rechts min max", "ax", "[", "0", "]", ".", "axhline", "(", "data", "[", "\"horizontal\"", "]", "[", "'profile max'", "]", ",", "color", "=", "'g'", ",", "linewidth", "=", "1", ")", "ax", "[", "0", "]", ".", "axhline", "(", "data", "[", "\"horizontal\"", "]", "[", "'profile min'", "]", ",", "color", "=", "'g'", ",", "linewidth", "=", "1", ")", "ax", "[", "0", "]", ".", "axvline", "(", "data", "[", "\"horizontal\"", "]", "[", "'profile left'", "]", ",", "color", "=", "'g'", ",", "linewidth", "=", "1", ",", "linestyle", "=", "'-.'", ")", "ax", "[", "0", "]", ".", "axvline", "(", "data", "[", "\"horizontal\"", "]", "[", "'profile right'", "]", ",", "color", "=", "'g'", ",", "linewidth", "=", "1", ",", "linestyle", "=", "'-.'", ")", "cax_idx", "=", "data", "[", "\"horizontal\"", "]", "[", "'profile'", "]", ".", "fwxm_center", "(", ")", "ax", "[", "0", "]", ".", "axvline", "(", "cax_idx", ",", "color", "=", "'g'", ",", "linewidth", "=", "1", ",", "linestyle", "=", "'-.'", ")", "# limits nach dem autom. setzen der Kurve", "xlim", "=", "ax", "[", "0", "]", ".", "get_xlim", "(", ")", "width", "=", "xlim", "[", "1", "]", "+", "xlim", "[", "0", "]", "ylim", "=", "ax", "[", "0", "]", ".", "get_ylim", "(", ")", "height", "=", "ylim", "[", "1", "]", "+", "ylim", "[", "0", "]", "ax", "[", "0", "]", ".", "text", "(", "width", "/", "2", ",", "height", "/", "10", ",", "#self.image.mm2dots_X(0), # x-Koordinate: 0 ganz links, 1 ganz rechts", "#self.image.mm2dots_Y(500), # y-Koordinate: 0 ganz oben, 1 ganz unten", "'crossline'", ",", "# der Text der ausgegeben wird", "ha", "=", "'center'", ",", "# horizontalalignment", "va", "=", "'center'", ",", "# verticalalignment", "fontsize", "=", "20", ",", "# 'font' ist äquivalent", "alpha", "=", ".5", "# Floatzahl von 0.0 transparent bis 1.0 opak", ")", "#ax[0].text(2.5, 2.5, 'horizontal', ha='center', va='center', size=20, alpha=.5)", "#ax[0].set_title('Horizontal')", "# 2. Kurve vertikal", "# label und Ticks abschalten", "# x-Achse", "ax", "[", "1", "]", ".", "get_xaxis", "(", ")", ".", "set_ticklabels", "(", "[", "]", ")", "ax", "[", "1", "]", ".", "get_xaxis", "(", ")", ".", "set_ticks", "(", "[", "]", ")", "# y-achse", "ax", "[", "1", "]", ".", "get_yaxis", "(", ")", ".", "set_ticklabels", "(", "[", "]", ")", "ax", "[", "1", "]", ".", "get_yaxis", "(", ")", ".", "set_ticks", "(", "[", "]", ")", "# Kurve plotten", "ax", "[", "1", "]", ".", "plot", "(", "data", "[", "\"vertical\"", "]", "[", "'profile'", "]", ".", "values", ",", "color", "=", "'r'", ")", "# links rechts min max", "ax", "[", "1", "]", ".", "axhline", "(", "data", "[", "\"vertical\"", "]", "[", "'profile max'", "]", ",", "color", "=", "'g'", ",", "linewidth", "=", "1", ")", "ax", "[", "1", "]", ".", "axhline", "(", "data", "[", "\"vertical\"", "]", "[", "'profile min'", "]", ",", "color", "=", "'g'", ",", "linewidth", "=", "1", ")", "ax", "[", "1", "]", ".", "axvline", "(", "data", "[", "\"vertical\"", "]", "[", "'profile left'", "]", ",", "color", "=", "'g'", ",", "linewidth", "=", "1", ",", "linestyle", "=", "'-.'", ")", "ax", "[", "1", "]", ".", "axvline", "(", "data", "[", "\"vertical\"", "]", "[", "'profile right'", "]", ",", "color", "=", "'g'", ",", "linewidth", "=", "1", ",", "linestyle", "=", "'-.'", ")", "cax_idx", "=", "data", "[", "\"vertical\"", "]", "[", "'profile'", "]", ".", "fwxm_center", "(", ")", "ax", "[", "1", "]", ".", "axvline", "(", "cax_idx", ",", "color", "=", "'g'", ",", "linewidth", "=", "1", ",", "linestyle", "=", "'-.'", ")", "#ax[1].set_title('Vertikal')", "# limits nach dem autom. setzen der Kurve", "xlim", "=", "ax", "[", "0", "]", ".", "get_xlim", "(", ")", "width", "=", "xlim", "[", "1", "]", "+", "xlim", "[", "0", "]", "ylim", "=", "ax", "[", "0", "]", ".", "get_ylim", "(", ")", "height", "=", "ylim", "[", "1", "]", "+", "ylim", "[", "0", "]", "ax", "[", "1", "]", ".", "text", "(", "width", "/", "2", ",", "height", "/", "10", ",", "#self.image.mm2dots_X(0),", "#self.image.mm2dots_Y(500),", "'inline'", ",", "ha", "=", "'center'", ",", "va", "=", "'center'", ",", "size", "=", "20", ",", "alpha", "=", ".5", ")", "import", "matplotlib", ".", "pyplot", "as", "plt", "# Layout optimieren", "plt", ".", "tight_layout", "(", "pad", "=", "0.4", ",", "w_pad", "=", "1.0", ",", "h_pad", "=", "1.0", ")", "# data der Grafik zurückgeben", "return", "self", ".", "getPlot", "(", ")" ]
[ 142, 4 ]
[ 258, 29 ]
null
python
de
['de', 'de', 'de']
True
true
null
qa_field.plot4Qprofile
( self, data , metadata={} )
return self.getPlot()
Ein angegebenes 4Q Profil plotten Parameters ---------- data : dict
Ein angegebenes 4Q Profil plotten
def plot4Qprofile( self, data , metadata={} ): """ Ein angegebenes 4Q Profil plotten Parameters ---------- data : dict """ # plotbereiche festlegen fig, ax = self.initPlot( metadata["profileSize"] ) #print("plot4Qprofile", data) ax.set_title(data["name"]) # kurve plotten ax.plot(data["profile"].values, color='b') # y Achsenlimit ax.set_ylim(0.5, 1.5) # x-Achse ax.get_xaxis().set_ticklabels([ data["name"][0:2], data["name"][-2:] ]) ax.get_xaxis().set_ticks( [0, len(data["profile"].values) ] ) # y-achse anzeigen ax.get_yaxis().set_ticklabels( [0.75, 1, 1.25] ) ax.get_yaxis().set_ticks( [0.75, 1, 1.25] ) # grid anzeigen ax.grid( True ) plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0) # data der Grafik zurückgeben return self.getPlot()
[ "def", "plot4Qprofile", "(", "self", ",", "data", ",", "metadata", "=", "{", "}", ")", ":", "# plotbereiche festlegen", "fig", ",", "ax", "=", "self", ".", "initPlot", "(", "metadata", "[", "\"profileSize\"", "]", ")", "#print(\"plot4Qprofile\", data)", "ax", ".", "set_title", "(", "data", "[", "\"name\"", "]", ")", "# kurve plotten", "ax", ".", "plot", "(", "data", "[", "\"profile\"", "]", ".", "values", ",", "color", "=", "'b'", ")", "# y Achsenlimit", "ax", ".", "set_ylim", "(", "0.5", ",", "1.5", ")", "# x-Achse", "ax", ".", "get_xaxis", "(", ")", ".", "set_ticklabels", "(", "[", "data", "[", "\"name\"", "]", "[", "0", ":", "2", "]", ",", "data", "[", "\"name\"", "]", "[", "-", "2", ":", "]", "]", ")", "ax", ".", "get_xaxis", "(", ")", ".", "set_ticks", "(", "[", "0", ",", "len", "(", "data", "[", "\"profile\"", "]", ".", "values", ")", "]", ")", "# y-achse anzeigen", "ax", ".", "get_yaxis", "(", ")", ".", "set_ticklabels", "(", "[", "0.75", ",", "1", ",", "1.25", "]", ")", "ax", ".", "get_yaxis", "(", ")", ".", "set_ticks", "(", "[", "0.75", ",", "1", ",", "1.25", "]", ")", "# grid anzeigen", "ax", ".", "grid", "(", "True", ")", "plt", ".", "tight_layout", "(", "pad", "=", "0.4", ",", "w_pad", "=", "0.5", ",", "h_pad", "=", "1.0", ")", "# data der Grafik zurückgeben", "return", "self", ".", "getPlot", "(", ")" ]
[ 347, 4 ]
[ 380, 29 ]
null
python
de
['de', 'de', 'de']
True
true
null
Jahr.__int__
(self)
return self.jahr
Das Jahr als Zahl
Das Jahr als Zahl
def __int__(self): """Das Jahr als Zahl""" return self.jahr
[ "def", "__int__", "(", "self", ")", ":", "return", "self", ".", "jahr" ]
[ 22, 4 ]
[ 24, 24 ]
null
python
de
['de', 'de', 'de']
True
true
null
ImpfterminService.termin_valid
(self, zeitpunkt)
return zeitpunkt >= öffnung
Hier wird geprüft ob ein einzelner Termin gültig ist.
Hier wird geprüft ob ein einzelner Termin gültig ist.
def termin_valid(self, zeitpunkt): """Hier wird geprüft ob ein einzelner Termin gültig ist. """ öffnung = datetime.fromisoformat('2021-06-07T00:02:00') return zeitpunkt >= öffnung
[ "def", "termin_valid", "(", "self", ",", "zeitpunkt", ")", ":", "öf", "fnung ", " ", "atetime.", "f", "romisoformat(", "'", "2021-06-07T00:02:00')", "", "return", "zeitpunkt", ">=", "öf", "fnung" ]
[ 533, 4 ]
[ 537, 36 ]
null
python
de
['de', 'de', 'de']
True
true
null
ariaDicomClass.getAllGQA
(self, pids=None, testTags:list=None, year:int=None, month:int=None, day:int=None, withInfo=True, withResult=False )
return self.prepareGQA( images, year=year, withInfo=withInfo, withResult=withResult )
Holt für die angegebenen PatientenIds aus allen Courses die Felder mit Angaben in [Radiation].[Comment] und wertet sie entsprechend aus Parameters ---------- pids : list, optional DESCRIPTION. The default is None. testTags : list, optional DESCRIPTION. The default is None. year : int, optional DESCRIPTION. The default is None. month : int, optional DESCRIPTION. The default is None. day : int, optional DESCRIPTION. The default is None. withInfo : TYPE, optional DESCRIPTION. The default is True. withResult : TYPE, optional DESCRIPTION. The default is False. Returns ------- gqa : dict Aufbau:: units: dict <unit>: dict <infoType>: dict ready: dict all: int <energy> : int gqa: dict fields: int energyFields: int counts: dict all: int <energy> : int pdf: dict, items: dict <energy>: dict <SliceUID>: {info} -> dies wird bei run in ein DataFrame umgewandelt series: [],
Holt für die angegebenen PatientenIds aus allen Courses die Felder mit Angaben in [Radiation].[Comment] und wertet sie entsprechend aus
def getAllGQA(self, pids=None, testTags:list=None, year:int=None, month:int=None, day:int=None, withInfo=True, withResult=False ): '''Holt für die angegebenen PatientenIds aus allen Courses die Felder mit Angaben in [Radiation].[Comment] und wertet sie entsprechend aus Parameters ---------- pids : list, optional DESCRIPTION. The default is None. testTags : list, optional DESCRIPTION. The default is None. year : int, optional DESCRIPTION. The default is None. month : int, optional DESCRIPTION. The default is None. day : int, optional DESCRIPTION. The default is None. withInfo : TYPE, optional DESCRIPTION. The default is True. withResult : TYPE, optional DESCRIPTION. The default is False. Returns ------- gqa : dict Aufbau:: units: dict <unit>: dict <infoType>: dict ready: dict all: int <energy> : int gqa: dict fields: int energyFields: int counts: dict all: int <energy> : int pdf: dict, items: dict <energy>: dict <SliceUID>: {info} -> dies wird bei run in ein DataFrame umgewandelt series: [], ''' if not pids: return {} if type(pids) == str: pids = pids.split(",") if not type(pids) == list: pids = [pids] if not pids or len(pids) == 0: return {} # filter zusammenstellen where = "LEN([Radiation].[Comment]) > 0 " subSql = [] for pid in pids: subSql.append( "[Patient].[PatientId]='{}'".format( pid.strip() ) ) if len( subSql ) > 0: where += " AND (" + " OR ".join( subSql ) + ")" images, sql = self.getImages( addWhere=where, AcquisitionYear=year, AcquisitionMonth=month, AcquisitionDay=day, testTags=testTags ) self.lastSQL = sql # Pfad für die PDF Dateien self.initResultsPath( year ) return self.prepareGQA( images, year=year, withInfo=withInfo, withResult=withResult )
[ "def", "getAllGQA", "(", "self", ",", "pids", "=", "None", ",", "testTags", ":", "list", "=", "None", ",", "year", ":", "int", "=", "None", ",", "month", ":", "int", "=", "None", ",", "day", ":", "int", "=", "None", ",", "withInfo", "=", "True", ",", "withResult", "=", "False", ")", ":", "if", "not", "pids", ":", "return", "{", "}", "if", "type", "(", "pids", ")", "==", "str", ":", "pids", "=", "pids", ".", "split", "(", "\",\"", ")", "if", "not", "type", "(", "pids", ")", "==", "list", ":", "pids", "=", "[", "pids", "]", "if", "not", "pids", "or", "len", "(", "pids", ")", "==", "0", ":", "return", "{", "}", "# filter zusammenstellen", "where", "=", "\"LEN([Radiation].[Comment]) > 0 \"", "subSql", "=", "[", "]", "for", "pid", "in", "pids", ":", "subSql", ".", "append", "(", "\"[Patient].[PatientId]='{}'\"", ".", "format", "(", "pid", ".", "strip", "(", ")", ")", ")", "if", "len", "(", "subSql", ")", ">", "0", ":", "where", "+=", "\" AND (\"", "+", "\" OR \"", ".", "join", "(", "subSql", ")", "+", "\")\"", "images", ",", "sql", "=", "self", ".", "getImages", "(", "addWhere", "=", "where", ",", "AcquisitionYear", "=", "year", ",", "AcquisitionMonth", "=", "month", ",", "AcquisitionDay", "=", "day", ",", "testTags", "=", "testTags", ")", "self", ".", "lastSQL", "=", "sql", "# Pfad für die PDF Dateien", "self", ".", "initResultsPath", "(", "year", ")", "return", "self", ".", "prepareGQA", "(", "images", ",", "year", "=", "year", ",", "withInfo", "=", "withInfo", ",", "withResult", "=", "withResult", ")" ]
[ 117, 4 ]
[ 199, 93 ]
null
python
de
['de', 'de', 'de']
True
true
null
PdfGenerator.setContentName
( self, name:str="_", autoNumber=True )
return self.contentName
Bestimmt den Namen eines zusammenhängenden Seitenbereichs. Um auf einen vorhandenen umzuschalten muss autoNumber auf False gesetzt werden Parameters ---------- name : str - default = _ Name des zusammengehörigen Contentbereichts autoNumber: Nummer des Bereichs Returns ------- name : str Der aktuelle contentName
Bestimmt den Namen eines zusammenhängenden Seitenbereichs.
def setContentName( self, name:str="_", autoNumber=True ): """Bestimmt den Namen eines zusammenhängenden Seitenbereichs. Um auf einen vorhandenen umzuschalten muss autoNumber auf False gesetzt werden Parameters ---------- name : str - default = _ Name des zusammengehörigen Contentbereichts autoNumber: Nummer des Bereichs Returns ------- name : str Der aktuelle contentName """ if autoNumber: n = len(self.pageContent) + 1 #self.contentNumber += 1 name = "({}) - {}".format(n, name) self.contentName = name return self.contentName
[ "def", "setContentName", "(", "self", ",", "name", ":", "str", "=", "\"_\"", ",", "autoNumber", "=", "True", ")", ":", "if", "autoNumber", ":", "n", "=", "len", "(", "self", ".", "pageContent", ")", "+", "1", "#self.contentNumber += 1", "name", "=", "\"({}) - {}\"", ".", "format", "(", "n", ",", "name", ")", "self", ".", "contentName", "=", "name", "return", "self", ".", "contentName" ]
[ 757, 4 ]
[ 781, 31 ]
null
python
de
['de', 'de', 'de']
True
true
null
extract_table_old
(obj)
return results
Rekrusion zum abflachen der strukturierten JSON
Rekrusion zum abflachen der strukturierten JSON
def extract_table_old(obj): """Rekrusion zum abflachen der strukturierten JSON""" arr = [] current_Parent = '' def extract_dict(obj, arr, current_Parent): """Recursively search for values of key in JSON tree.""" if isinstance(obj, dict): temp_dict = {} for k, v in obj.items(): if isinstance(v, (dict, list)): new_parent = obj.get('_id', '') extract_dict(v, arr, new_parent) else: temp_dict.update(({'child_of': current_Parent})) temp_dict.update({k: v}) arr.append(temp_dict) elif isinstance(obj, list): for item in obj: extract_dict(item, arr, current_Parent) return arr list_temp = extract_dict(obj, arr, current_Parent) results = [] for item in list_temp: if item not in results: results.append(item) return results
[ "def", "extract_table_old", "(", "obj", ")", ":", "arr", "=", "[", "]", "current_Parent", "=", "''", "def", "extract_dict", "(", "obj", ",", "arr", ",", "current_Parent", ")", ":", "\"\"\"Recursively search for values of key in JSON tree.\"\"\"", "if", "isinstance", "(", "obj", ",", "dict", ")", ":", "temp_dict", "=", "{", "}", "for", "k", ",", "v", "in", "obj", ".", "items", "(", ")", ":", "if", "isinstance", "(", "v", ",", "(", "dict", ",", "list", ")", ")", ":", "new_parent", "=", "obj", ".", "get", "(", "'_id'", ",", "''", ")", "extract_dict", "(", "v", ",", "arr", ",", "new_parent", ")", "else", ":", "temp_dict", ".", "update", "(", "(", "{", "'child_of'", ":", "current_Parent", "}", ")", ")", "temp_dict", ".", "update", "(", "{", "k", ":", "v", "}", ")", "arr", ".", "append", "(", "temp_dict", ")", "elif", "isinstance", "(", "obj", ",", "list", ")", ":", "for", "item", "in", "obj", ":", "extract_dict", "(", "item", ",", "arr", ",", "current_Parent", ")", "return", "arr", "list_temp", "=", "extract_dict", "(", "obj", ",", "arr", ",", "current_Parent", ")", "results", "=", "[", "]", "for", "item", "in", "list_temp", ":", "if", "item", "not", "in", "results", ":", "results", ".", "append", "(", "item", ")", "return", "results" ]
[ 295, 0 ]
[ 322, 18 ]
null
python
de
['de', 'de', 'de']
True
true
null
ispConfig.mqttCleanup
( self )
Schließt mqtt und entfernt den logger.
Schließt mqtt und entfernt den logger.
def mqttCleanup( self ): """Schließt mqtt und entfernt den logger. """ if self._mqtthdlr: # mqtt beenden self._mqtthdlr.shutdown() #print( "config.cleanup _mqtthdlr" ) logger = logging.getLogger( "MQTT" ) # verbindung zu _mqtthdlr im logger entfernen del( logger._mqtthdlr ) for h in logger.handlers: logger.removeHandler(h) self._mqtthdlr = None
[ "def", "mqttCleanup", "(", "self", ")", ":", "if", "self", ".", "_mqtthdlr", ":", "# mqtt beenden", "self", ".", "_mqtthdlr", ".", "shutdown", "(", ")", "#print( \"config.cleanup _mqtthdlr\" )", "logger", "=", "logging", ".", "getLogger", "(", "\"MQTT\"", ")", "# verbindung zu _mqtthdlr im logger entfernen", "del", "(", "logger", ".", "_mqtthdlr", ")", "for", "h", "in", "logger", ".", "handlers", ":", "logger", ".", "removeHandler", "(", "h", ")", "self", ".", "_mqtthdlr", "=", "None" ]
[ 755, 4 ]
[ 770, 33 ]
null
python
de
['de', 'de', 'de']
True
true
null
plotImage.dots2mm_Y
( self, dots )
return ( dots - self.cax.y ) / self.dpmm
Wandelt eine Y dot Angabe im mm Position des Image um. Parameters ---------- dots : int Position in mm. Returns ------- float Umgewandelte Position
Wandelt eine Y dot Angabe im mm Position des Image um. Parameters ---------- dots : int Position in mm.
def dots2mm_Y( self, dots ): """Wandelt eine Y dot Angabe im mm Position des Image um. Parameters ---------- dots : int Position in mm. Returns ------- float Umgewandelte Position """ return ( dots - self.cax.y ) / self.dpmm
[ "def", "dots2mm_Y", "(", "self", ",", "dots", ")", ":", "return", "(", "dots", "-", "self", ".", "cax", ".", "y", ")", "/", "self", ".", "dpmm" ]
[ 93, 4 ]
[ 107, 48 ]
null
python
de
['de', 'de', 'de']
True
true
null
FSImage.__init__
(self, pathOrData=None, **kwargs )
Erweitert PFDicomImage um die eigene DicomImage Klasse
Erweitert PFDicomImage um die eigene DicomImage Klasse
def __init__(self, pathOrData=None, **kwargs ): """ Erweitert PFDicomImage um die eigene DicomImage Klasse """ #print("PFImage.__init__", path, kwargs) # das pylinacpicketfence Image #FlatSym.__init__( self, path, **kwargs ) # die eigene Erweiterung DicomImage.__init__( self, pathOrData )
[ "def", "__init__", "(", "self", ",", "pathOrData", "=", "None", ",", "*", "*", "kwargs", ")", ":", "#print(\"PFImage.__init__\", path, kwargs)", "# das pylinacpicketfence Image", "#FlatSym.__init__( self, path, **kwargs )", "# die eigene Erweiterung", "DicomImage", ".", "__init__", "(", "self", ",", "pathOrData", ")" ]
[ 51, 4 ]
[ 61, 47 ]
null
python
de
['de', 'de', 'de']
True
true
null
GUI.setzen0_ausgaenge
(self)
Alle Ausgänge auf 0 setzen
Alle Ausgänge auf 0 setzen
def setzen0_ausgaenge(self): """ Alle Ausgänge auf 0 setzen """ setzer = Setzer() setzer.setzen0_ausgaenge()
[ "def", "setzen0_ausgaenge", "(", "self", ")", ":", "setzer", "=", "Setzer", "(", ")", "setzer", ".", "setzen0_ausgaenge", "(", ")" ]
[ 195, 4 ]
[ 198, 34 ]
null
python
de
['de', 'de', 'de']
True
true
null
objectfunc
(func, objektliste, feldliste, ignore_empty=False)
return sumdir
Für jedes Property in `feldliste` der Objekte in `objektliste` func ueber alle properties ausführen. Wenn `ignore_empty` übergeben wird, werden leere werte / lull werte nicht an `func` üebrgeben.
Für jedes Property in `feldliste` der Objekte in `objektliste` func ueber alle properties ausführen.
def objectfunc(func, objektliste, feldliste, ignore_empty=False): """Für jedes Property in `feldliste` der Objekte in `objektliste` func ueber alle properties ausführen. Wenn `ignore_empty` übergeben wird, werden leere werte / lull werte nicht an `func` üebrgeben. """ sumdir = {} if hasattr(feldliste, 'split'): feldliste = feldliste.split() for feldname in feldliste: if objektliste: if ignore_empty: sumdir[feldname] = func([getattr(x, feldname, 0) for x in objektliste if getattr(x, feldname, 0)]) else: sumdir[feldname] = func([getattr(x, feldname, 0) for x in objektliste]) else: sumdir[feldname] = 0.0 return sumdir
[ "def", "objectfunc", "(", "func", ",", "objektliste", ",", "feldliste", ",", "ignore_empty", "=", "False", ")", ":", "sumdir", "=", "{", "}", "if", "hasattr", "(", "feldliste", ",", "'split'", ")", ":", "feldliste", "=", "feldliste", ".", "split", "(", ")", "for", "feldname", "in", "feldliste", ":", "if", "objektliste", ":", "if", "ignore_empty", ":", "sumdir", "[", "feldname", "]", "=", "func", "(", "[", "getattr", "(", "x", ",", "feldname", ",", "0", ")", "for", "x", "in", "objektliste", "if", "getattr", "(", "x", ",", "feldname", ",", "0", ")", "]", ")", "else", ":", "sumdir", "[", "feldname", "]", "=", "func", "(", "[", "getattr", "(", "x", ",", "feldname", ",", "0", ")", "for", "x", "in", "objektliste", "]", ")", "else", ":", "sumdir", "[", "feldname", "]", "=", "0.0", "return", "sumdir" ]
[ 156, 0 ]
[ 176, 17 ]
null
python
de
['de', 'de', 'de']
True
true
null
GUI.setzen0_eingaenge
(self)
Alle Eingänge auf 0 setzen
Alle Eingänge auf 0 setzen
def setzen0_eingaenge(self): """ Alle Eingänge auf 0 setzen """ setzer = Setzer() setzer.setzen0_eingaenge() self.aktualisieren_eingangswerte()
[ "def", "setzen0_eingaenge", "(", "self", ")", ":", "setzer", "=", "Setzer", "(", ")", "setzer", ".", "setzen0_eingaenge", "(", ")", "self", ".", "aktualisieren_eingangswerte", "(", ")" ]
[ 200, 4 ]
[ 204, 42 ]
null
python
de
['de', 'de', 'de']
True
true
null
HeistSystem.WriteMessage_IsOnCooldown
(self)
return
Vorbereiten der Cooldown-Nachricht zur Ausgabe in den Chat
Vorbereiten der Cooldown-Nachricht zur Ausgabe in den Chat
def WriteMessage_IsOnCooldown(self): ''' Vorbereiten der Cooldown-Nachricht zur Ausgabe in den Chat ''' thisActionName = "WriteMessage_IsOnCooldown" # Benachrichtigung aus der Datenbank auslesen messageText = self.RandomMessage_ByType( messageType=self.MessageType_IsOnCooldown ) # Verbleibende Cooldown-Zeit ermitteln cooldownTime = myTime.TimePrettyFormatString( self.CD.GetCooldownDuration( scriptname=self.ScriptName, command=self.ClassName ) ) # Nachricht in den Chat schreiben self.chat_WriteTextMessage( messageText=str(messageText).format( time=cooldownTime, command=self.Settings.Game_Command ) ) return
[ "def", "WriteMessage_IsOnCooldown", "(", "self", ")", ":", "thisActionName", "=", "\"WriteMessage_IsOnCooldown\"", "# Benachrichtigung aus der Datenbank auslesen\r", "messageText", "=", "self", ".", "RandomMessage_ByType", "(", "messageType", "=", "self", ".", "MessageType_IsOnCooldown", ")", "# Verbleibende Cooldown-Zeit ermitteln\r", "cooldownTime", "=", "myTime", ".", "TimePrettyFormatString", "(", "self", ".", "CD", ".", "GetCooldownDuration", "(", "scriptname", "=", "self", ".", "ScriptName", ",", "command", "=", "self", ".", "ClassName", ")", ")", "# Nachricht in den Chat schreiben\r", "self", ".", "chat_WriteTextMessage", "(", "messageText", "=", "str", "(", "messageText", ")", ".", "format", "(", "time", "=", "cooldownTime", ",", "command", "=", "self", ".", "Settings", ".", "Game_Command", ")", ")", "return" ]
[ 1090, 4 ]
[ 1114, 14 ]
null
python
de
['de', 'de', 'de']
True
true
null
register_precondition
(func)
return register_type_func(Precondition_TYPES, PreconditionError, func)
Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block. Fügt eine Typ-Funktion dem Dictionary Precondition_TYPES hinzu. :param func: die zu registrierende Funktion :return: Funktion mit try/except-Block
Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block. Fügt eine Typ-Funktion dem Dictionary Precondition_TYPES hinzu.
def register_precondition(func): """Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block. Fügt eine Typ-Funktion dem Dictionary Precondition_TYPES hinzu. :param func: die zu registrierende Funktion :return: Funktion mit try/except-Block """ return register_type_func(Precondition_TYPES, PreconditionError, func)
[ "def", "register_precondition", "(", "func", ")", ":", "return", "register_type_func", "(", "Precondition_TYPES", ",", "PreconditionError", ",", "func", ")" ]
[ 15, 0 ]
[ 22, 74 ]
null
python
de
['de', 'de', 'de']
True
true
null
delete_infoprovider_resource
(path: str)
Löscht den übergebenen Infoprovider aus dem resources-Ordner. Dabei wird :func:`get_infoprovider_path` verwendet, um die richtige Ressource zu finden. Sollte der Infoprovider nicht vorhanden sein, so wird der Löschversuch ignoriert. Wird hingegen versucht einen Ordner zu löschen, so wirft dies einen Fehler. :param path: Infoprovider, welcher gelöscht werden soll, relativ zu `resources/infoprovider` :raises: OSError
Löscht den übergebenen Infoprovider aus dem resources-Ordner.
def delete_infoprovider_resource(path: str): """Löscht den übergebenen Infoprovider aus dem resources-Ordner. Dabei wird :func:`get_infoprovider_path` verwendet, um die richtige Ressource zu finden. Sollte der Infoprovider nicht vorhanden sein, so wird der Löschversuch ignoriert. Wird hingegen versucht einen Ordner zu löschen, so wirft dies einen Fehler. :param path: Infoprovider, welcher gelöscht werden soll, relativ zu `resources/infoprovider` :raises: OSError """ with contextlib.suppress(FileNotFoundError): os.remove(get_infoprovider_path(path))
[ "def", "delete_infoprovider_resource", "(", "path", ":", "str", ")", ":", "with", "contextlib", ".", "suppress", "(", "FileNotFoundError", ")", ":", "os", ".", "remove", "(", "get_infoprovider_path", "(", "path", ")", ")" ]
[ 304, 0 ]
[ 317, 46 ]
null
python
de
['de', 'de', 'de']
True
true
null
status_update
()
Anzeige von Zeit, Tracknr. und Lautstärke (statusbar)
Anzeige von Zeit, Tracknr. und Lautstärke (statusbar)
def status_update(): """ Anzeige von Zeit, Tracknr. und Lautstärke (statusbar)""" info = MPC.currentsong() status = MPC.status() try: songtime = int(info['time']) if songtime == 0: raise KeyError s_min, s_sec = divmod(songtime, 60) s_hour, s_min = divmod(s_min, 60) elapsed = int(status['elapsed'].split('.')[0]) e_min, e_sec = divmod(elapsed, 60) e_hour, e_min = divmod(e_min, 60) if not s_hour: hms_elapsed = "%02d:%02d" % (e_min, e_sec) hms_songtime = "%02d:%02d" % (s_min, s_sec) else: hms_elapsed = "%d:%02d:%02d" % (e_hour, e_min, e_sec) hms_songtime = "%d:%02d:%02d" % (s_hour, s_min, s_sec) time_text = hms_elapsed + '/' + hms_songtime except KeyError: time_text = datetime.datetime.now().strftime('%d.%m.%Y %H:%M') draw_text(STATUS_WIN, time_text, fonts['std'], colors['status'], align='centerx') # Lautstärkeanzeige if 'volume' in status: volume = 'Vol.: ' + status['volume'] + '%' else: volume = 'Vol.: 0' draw_text(STATUS_WIN, volume, fonts['std'], colors['status']) # Anzeige der Stück-/Stationsnummer pln = str(int(info.get('pos', '-1')) + 1) + '/' + status.get('playlistlength', '0') draw_text(STATUS_WIN, pln, fonts['std'], colors['status'], align='topright') # Anzeige der Bitrate bitrate = status.get('bitrate', '0') + 'kbps' draw_text(BITRATE_WIN, bitrate, fonts['std'], colors['status'], align='topright')
[ "def", "status_update", "(", ")", ":", "info", "=", "MPC", ".", "currentsong", "(", ")", "status", "=", "MPC", ".", "status", "(", ")", "try", ":", "songtime", "=", "int", "(", "info", "[", "'time'", "]", ")", "if", "songtime", "==", "0", ":", "raise", "KeyError", "s_min", ",", "s_sec", "=", "divmod", "(", "songtime", ",", "60", ")", "s_hour", ",", "s_min", "=", "divmod", "(", "s_min", ",", "60", ")", "elapsed", "=", "int", "(", "status", "[", "'elapsed'", "]", ".", "split", "(", "'.'", ")", "[", "0", "]", ")", "e_min", ",", "e_sec", "=", "divmod", "(", "elapsed", ",", "60", ")", "e_hour", ",", "e_min", "=", "divmod", "(", "e_min", ",", "60", ")", "if", "not", "s_hour", ":", "hms_elapsed", "=", "\"%02d:%02d\"", "%", "(", "e_min", ",", "e_sec", ")", "hms_songtime", "=", "\"%02d:%02d\"", "%", "(", "s_min", ",", "s_sec", ")", "else", ":", "hms_elapsed", "=", "\"%d:%02d:%02d\"", "%", "(", "e_hour", ",", "e_min", ",", "e_sec", ")", "hms_songtime", "=", "\"%d:%02d:%02d\"", "%", "(", "s_hour", ",", "s_min", ",", "s_sec", ")", "time_text", "=", "hms_elapsed", "+", "'/'", "+", "hms_songtime", "except", "KeyError", ":", "time_text", "=", "datetime", ".", "datetime", ".", "now", "(", ")", ".", "strftime", "(", "'%d.%m.%Y %H:%M'", ")", "draw_text", "(", "STATUS_WIN", ",", "time_text", ",", "fonts", "[", "'std'", "]", ",", "colors", "[", "'status'", "]", ",", "align", "=", "'centerx'", ")", "# Lautstärkeanzeige", "if", "'volume'", "in", "status", ":", "volume", "=", "'Vol.: '", "+", "status", "[", "'volume'", "]", "+", "'%'", "else", ":", "volume", "=", "'Vol.: 0'", "draw_text", "(", "STATUS_WIN", ",", "volume", ",", "fonts", "[", "'std'", "]", ",", "colors", "[", "'status'", "]", ")", "# Anzeige der Stück-/Stationsnummer", "pln", "=", "str", "(", "int", "(", "info", ".", "get", "(", "'pos'", ",", "'-1'", ")", ")", "+", "1", ")", "+", "'/'", "+", "status", ".", "get", "(", "'playlistlength'", ",", "'0'", ")", "draw_text", "(", "STATUS_WIN", ",", "pln", ",", "fonts", "[", "'std'", "]", ",", "colors", "[", "'status'", "]", ",", "align", "=", "'topright'", ")", "# Anzeige der Bitrate", "bitrate", "=", "status", ".", "get", "(", "'bitrate'", ",", "'0'", ")", "+", "'kbps'", "draw_text", "(", "BITRATE_WIN", ",", "bitrate", ",", "fonts", "[", "'std'", "]", ",", "colors", "[", "'status'", "]", ",", "align", "=", "'topright'", ")" ]
[ 887, 0 ]
[ 921, 85 ]
null
python
de
['de', 'de', 'de']
True
true
null
expose_object
( self, safrs_object, url_prefix="", **properties)
Eine eigene expose_object Funktion um swagger doc zu erzeugen. Wird bei Klassen ohne Datanbankanbindung verwendet .. code:: paths: { <__qualname__> : { <__http_method> : <__rest_doc> } } In <__rest_doc>.tags wird wenn nicht angegeben __qualname__ abgelegt In <__rest_doc>.type wird wenn nicht angegeben "string" abgelegt creates a class of the form @api_decorator class Class_API(SAFRSRestAPI): SAFRSObject = safrs_object add the class as an api resource to /SAFRSObject and /SAFRSObject/{id} tablename/collectionname: safrs_object._s_collection_name, e.g. "Users" classname: safrs_object.__name__, e.g. "User" Möglichkeiten: a) /class/ : api_list in class aufrufen b) /class/{objectId} : keine Funktion objectId vorhanden also api_get aufrufen c) /class/test : Vorhandene Funktion test in class aufrufen Parameters ---------- safrs_object : SAFSBase FSBase subclass that we would like to expose. url_prefix : str, optional url prefix. The default is "". **properties : additional flask-restful properties. Returns ------- None.
Eine eigene expose_object Funktion um swagger doc zu erzeugen.
def expose_object( self, safrs_object, url_prefix="", **properties): """Eine eigene expose_object Funktion um swagger doc zu erzeugen. Wird bei Klassen ohne Datanbankanbindung verwendet .. code:: paths: { <__qualname__> : { <__http_method> : <__rest_doc> } } In <__rest_doc>.tags wird wenn nicht angegeben __qualname__ abgelegt In <__rest_doc>.type wird wenn nicht angegeben "string" abgelegt creates a class of the form @api_decorator class Class_API(SAFRSRestAPI): SAFRSObject = safrs_object add the class as an api resource to /SAFRSObject and /SAFRSObject/{id} tablename/collectionname: safrs_object._s_collection_name, e.g. "Users" classname: safrs_object.__name__, e.g. "User" Möglichkeiten: a) /class/ : api_list in class aufrufen b) /class/{objectId} : keine Funktion objectId vorhanden also api_get aufrufen c) /class/test : Vorhandene Funktion test in class aufrufen Parameters ---------- safrs_object : SAFSBase FSBase subclass that we would like to expose. url_prefix : str, optional url prefix. The default is "". **properties : additional flask-restful properties. Returns ------- None. """ # alle methoden der klasse durchgehen und nach __rest_doc suchen docs = { } # alle methoden von safrs_object durchsuchen und bei eigenen methoden mit __rest_doc merken for method_name in dir(safrs_object): # die method selbst bestimmen try: method = getattr(safrs_object, method_name, None) except Exception as exc: # method_name query gibt gibt einen fehler # SQL expression, column, or mapped entity expected - got '<class 'xxxxx'>' #print( "expose_object - error beim bestimmen von", method_name, exc) pass if method and hasattr(method, '__qualname__') and hasattr(method, '__rest_doc'): # full_name bestimmt die eigentliche Funktion full_name = "{}.{}".format(safrs_object.__qualname__, method_name) if method_name == "api_list": # variante a) path_name = "/{}/".format( safrs_object.__qualname__ ) elif method_name == "api_get": # variante b) path_name = "/{}/{}/".format( safrs_object.__qualname__, "{" + safrs_object._s_object_id + "}" ) else: # variante c) path_name = "/{}".format( full_name ) if method and method.__qualname__ == full_name : # für swagger . durch / ersetzen path_name = path_name.replace(".", "/") docs[ path_name ] = {} for hm in getattr(method, "__http_method", [] ): method_doc = getattr( method, '__rest_doc', {} ) if not "tags" in method_doc: method_doc["tags"] = [ safrs_object.__qualname__ ] if not "type" in method_doc: method_doc["type"] = "string" # in docs ablegen docs[ path_name ][ hm.lower() ] = method_doc # wenn in docs was ist dann die Klasse selbst in _swagger_object einfügen if len(docs) > 0: object_doc = parse_object_doc(safrs_object) object_doc["name"] = safrs_object.__qualname__ self._swagger_object["tags"].append(object_doc) custom_swagger = { "paths": docs } # doc im object selbst in _swagger_paths merken safrs_object._swagger_paths = docs _swagger_doc = self.get_swagger_doc() safrs.dict_merge(_swagger_doc, custom_swagger)
[ "def", "expose_object", "(", "self", ",", "safrs_object", ",", "url_prefix", "=", "\"\"", ",", "*", "*", "properties", ")", ":", "# alle methoden der klasse durchgehen und nach __rest_doc suchen", "docs", "=", "{", "}", "# alle methoden von safrs_object durchsuchen und bei eigenen methoden mit __rest_doc merken", "for", "method_name", "in", "dir", "(", "safrs_object", ")", ":", "# die method selbst bestimmen", "try", ":", "method", "=", "getattr", "(", "safrs_object", ",", "method_name", ",", "None", ")", "except", "Exception", "as", "exc", ":", "# method_name query gibt gibt einen fehler", "# SQL expression, column, or mapped entity expected - got '<class 'xxxxx'>'", "#print( \"expose_object - error beim bestimmen von\", method_name, exc)", "pass", "if", "method", "and", "hasattr", "(", "method", ",", "'__qualname__'", ")", "and", "hasattr", "(", "method", ",", "'__rest_doc'", ")", ":", "# full_name bestimmt die eigentliche Funktion", "full_name", "=", "\"{}.{}\"", ".", "format", "(", "safrs_object", ".", "__qualname__", ",", "method_name", ")", "if", "method_name", "==", "\"api_list\"", ":", "# variante a)", "path_name", "=", "\"/{}/\"", ".", "format", "(", "safrs_object", ".", "__qualname__", ")", "elif", "method_name", "==", "\"api_get\"", ":", "# variante b)", "path_name", "=", "\"/{}/{}/\"", ".", "format", "(", "safrs_object", ".", "__qualname__", ",", "\"{\"", "+", "safrs_object", ".", "_s_object_id", "+", "\"}\"", ")", "else", ":", "# variante c)", "path_name", "=", "\"/{}\"", ".", "format", "(", "full_name", ")", "if", "method", "and", "method", ".", "__qualname__", "==", "full_name", ":", "# für swagger . durch / ersetzen", "path_name", "=", "path_name", ".", "replace", "(", "\".\"", ",", "\"/\"", ")", "docs", "[", "path_name", "]", "=", "{", "}", "for", "hm", "in", "getattr", "(", "method", ",", "\"__http_method\"", ",", "[", "]", ")", ":", "method_doc", "=", "getattr", "(", "method", ",", "'__rest_doc'", ",", "{", "}", ")", "if", "not", "\"tags\"", "in", "method_doc", ":", "method_doc", "[", "\"tags\"", "]", "=", "[", "safrs_object", ".", "__qualname__", "]", "if", "not", "\"type\"", "in", "method_doc", ":", "method_doc", "[", "\"type\"", "]", "=", "\"string\"", "# in docs ablegen", "docs", "[", "path_name", "]", "[", "hm", ".", "lower", "(", ")", "]", "=", "method_doc", "# wenn in docs was ist dann die Klasse selbst in _swagger_object einfügen", "if", "len", "(", "docs", ")", ">", "0", ":", "object_doc", "=", "parse_object_doc", "(", "safrs_object", ")", "object_doc", "[", "\"name\"", "]", "=", "safrs_object", ".", "__qualname__", "self", ".", "_swagger_object", "[", "\"tags\"", "]", ".", "append", "(", "object_doc", ")", "custom_swagger", "=", "{", "\"paths\"", ":", "docs", "}", "# doc im object selbst in _swagger_paths merken", "safrs_object", ".", "_swagger_paths", "=", "docs", "_swagger_doc", "=", "self", ".", "get_swagger_doc", "(", ")", "safrs", ".", "dict_merge", "(", "_swagger_doc", ",", "custom_swagger", ")" ]
[ 54, 0 ]
[ 153, 54 ]
null
python
de
['de', 'de', 'de']
True
true
null
run
( config:dict={} )
return webApp
Startet ispBaseWebApp mit zusätzlichen config Angaben Parameters ---------- config : dict, optional DESCRIPTION. The default is {}. Returns ------- webApp : ispBaseWebApp Die gestartete WebApplication
Startet ispBaseWebApp mit zusätzlichen config Angaben Parameters ---------- config : dict, optional DESCRIPTION. The default is {}.
def run( config:dict={} ): ''' Startet ispBaseWebApp mit zusätzlichen config Angaben Parameters ---------- config : dict, optional DESCRIPTION. The default is {}. Returns ------- webApp : ispBaseWebApp Die gestartete WebApplication ''' # Konfiguration öffnen _config = ispConfig( config=config ) # _config.update( config ) #print( _config ) _apiConfig = { "models": [ system, dummy, dbtests, dbtestsrel ], } _webconfig = { # nur um update von webconfig zu testen "name" : "test_isp", } # Webserver starten webApp = ispBaseWebApp( _config, db, webconfig=_webconfig, apiconfig=_apiConfig ) return webApp
[ "def", "run", "(", "config", ":", "dict", "=", "{", "}", ")", ":", "# Konfiguration öffnen", "_config", "=", "ispConfig", "(", "config", "=", "config", ")", "# _config.update( config )", "#print( _config )", "_apiConfig", "=", "{", "\"models\"", ":", "[", "system", ",", "dummy", ",", "dbtests", ",", "dbtestsrel", "]", ",", "}", "_webconfig", "=", "{", "# nur um update von webconfig zu testen", "\"name\"", ":", "\"test_isp\"", ",", "}", "# Webserver starten", "webApp", "=", "ispBaseWebApp", "(", "_config", ",", "db", ",", "webconfig", "=", "_webconfig", ",", "apiconfig", "=", "_apiConfig", ")", "return", "webApp" ]
[ 778, 0 ]
[ 811, 17 ]
null
python
de
['de', 'de', 'de']
True
true
null
qa_wl.findColliCenter
( self )
return self.virtualCenter
Das Drehzentrum der IsoCalPlate bestimmen Nur bei Feldern mit Gantry und Table 0° Die Endwinkel 175 und 185 werden dabei zu einem Punkt zusammengefasst
Das Drehzentrum der IsoCalPlate bestimmen Nur bei Feldern mit Gantry und Table 0° Die Endwinkel 175 und 185 werden dabei zu einem Punkt zusammengefasst
def findColliCenter( self ): """Das Drehzentrum der IsoCalPlate bestimmen Nur bei Feldern mit Gantry und Table 0° Die Endwinkel 175 und 185 werden dabei zu einem Punkt zusammengefasst """ # ohne colField aufgerufen wird Point(0,0,0) zurückgegeben self.virtualCenter = Point( 0, 0) self.virtualCenterDots = Point( 0, 0) if len( self.colFields ) == 0: return self.virtualCenter endPoints = [] points = [] pointsDot = [] endPointsDot = [] sumArray = [] pointsDotPos = {} pointsPos = {} # nur die colFields verwenden for key, f in self.colFields.items(): # isoCal Kugelpositionen bestimmen wlField = qa_field( f ) imageArray = wlField.image.cropField( self.roi ) isoCal, isoCalDot = self._findIsoCalCenter( wlField, True ) # summenfeld erstellen um dies evt. zu verwenden # FIXME: fehler bei unterschiedlichen imgae sizes : warum gibt es die überhaupt? if len( sumArray ) == 0: sumArray = imageArray else: try: sumArray = np.add( sumArray, imageArray ) except: logger.error("qa_wl.findColliCenter np.add shape size ungleich") pass if f["collimator"] == 175 or f["collimator"] == 185: endPointsDot.append( (isoCalDot.x, isoCalDot.y) ) endPoints.append( (isoCal.x, isoCal.y) ) else: pointsDot.append( (isoCalDot.x, isoCalDot.y) ) pointsDotPos[ f["collimator"] ] = isoCalDot points.append( (isoCal.x, isoCal.y) ) pointsPos[ f["collimator"] ] = isoCal # wurde ein summenfeld erstellt if len( sumArray ) > 0: isoCalDots = self._findArrayIsoCalCenter( sumArray, True ) isoCal = wlField.image.dots2mm(isoCalDots) # kollimator 175 und 185 merken um sie zu einem Punkt zusammenzufassen # virtuellen punkte der endPoints 175 und 185 berechnen und als 180° verwenden # zuerst die dots earr = np.asarray(endPointsDot) eMean = earr.mean(axis=0) pointsDot.append( ( eMean[0], eMean[1] ) ) # die dots als 180° ablegen pointsDotPos[ 180.0 ] = Point( eMean[0], eMean[1] ) # dann die koordinaten earr = np.asarray(endPoints) eMean = earr.mean(axis=0) points.append( ( eMean[0], eMean[1] ) ) pointsPos[ 180.0 ] = Point( eMean[0], eMean[1] ) # np array verwenden arr = np.asarray(points) # zentrum der punkte arrDots = np.asarray(pointsDot) vCenterDots = arrDots.mean(axis=0) self.virtualCenterDots = Point( vCenterDots[0], vCenterDots[1] ) # dann die koordinaten vCenter = arr.mean(axis=0) self.virtualCenter = Point( vCenter[0], vCenter[1] ) # # debug # if self.debug: if isoCalDots: print("findColliCenter summfield", isoCalDots, isoCal) print("pointsDotPos", pointsDotPos) cmap=plt.cm.gray ax = plt.subplot(111) ax.imshow(sumArray, cmap=cmap) ax.axis('off') #ax.set_title(title) ax.plot( isoCalDots.x, isoCalDots.y, 'r+', ms=80, markeredgewidth=1 ) ax.plot( self.virtualCenterDots.x, self.virtualCenterDots.y, 'b+', ms=80, markeredgewidth=1 ) for p in pointsDot: ax.plot( p[0], p[1], 'g+', ms=20, markeredgewidth=1 ) ax.plot( len(sumArray)/2, len(sumArray)/2, 'y+', ms=100, markeredgewidth=1 ) # kontroll linien lineA = lines.Line2D( [ pointsDotPos[ 0 ].x, pointsDotPos[ 180 ].x ], [ pointsDotPos[ 0 ].y, pointsDotPos[ 180 ].y ], lw=1, color='green', axes=ax) ax.add_line(lineA) lineB = lines.Line2D( [ pointsDotPos[ 90 ].x, pointsDotPos[ 270 ].x ], [ pointsDotPos[ 90 ].y, pointsDotPos[ 270 ].y ], lw=1, color='green', axes=ax) ax.add_line(lineB) plt.show(ax) print("isoCal=rot, virtualCenter=blau, nulllinie=gelb") print("WL_findColliCenter", self.virtualCenter, pointsPos ) return self.virtualCenter
[ "def", "findColliCenter", "(", "self", ")", ":", "# ohne colField aufgerufen wird Point(0,0,0) zurückgegeben", "self", ".", "virtualCenter", "=", "Point", "(", "0", ",", "0", ")", "self", ".", "virtualCenterDots", "=", "Point", "(", "0", ",", "0", ")", "if", "len", "(", "self", ".", "colFields", ")", "==", "0", ":", "return", "self", ".", "virtualCenter", "endPoints", "=", "[", "]", "points", "=", "[", "]", "pointsDot", "=", "[", "]", "endPointsDot", "=", "[", "]", "sumArray", "=", "[", "]", "pointsDotPos", "=", "{", "}", "pointsPos", "=", "{", "}", "# nur die colFields verwenden ", "for", "key", ",", "f", "in", "self", ".", "colFields", ".", "items", "(", ")", ":", "# isoCal Kugelpositionen bestimmen", "wlField", "=", "qa_field", "(", "f", ")", "imageArray", "=", "wlField", ".", "image", ".", "cropField", "(", "self", ".", "roi", ")", "isoCal", ",", "isoCalDot", "=", "self", ".", "_findIsoCalCenter", "(", "wlField", ",", "True", ")", "# summenfeld erstellen um dies evt. zu verwenden", "# FIXME: fehler bei unterschiedlichen imgae sizes : warum gibt es die überhaupt?", "if", "len", "(", "sumArray", ")", "==", "0", ":", "sumArray", "=", "imageArray", "else", ":", "try", ":", "sumArray", "=", "np", ".", "add", "(", "sumArray", ",", "imageArray", ")", "except", ":", "logger", ".", "error", "(", "\"qa_wl.findColliCenter np.add shape size ungleich\"", ")", "pass", "if", "f", "[", "\"collimator\"", "]", "==", "175", "or", "f", "[", "\"collimator\"", "]", "==", "185", ":", "endPointsDot", ".", "append", "(", "(", "isoCalDot", ".", "x", ",", "isoCalDot", ".", "y", ")", ")", "endPoints", ".", "append", "(", "(", "isoCal", ".", "x", ",", "isoCal", ".", "y", ")", ")", "else", ":", "pointsDot", ".", "append", "(", "(", "isoCalDot", ".", "x", ",", "isoCalDot", ".", "y", ")", ")", "pointsDotPos", "[", "f", "[", "\"collimator\"", "]", "]", "=", "isoCalDot", "points", ".", "append", "(", "(", "isoCal", ".", "x", ",", "isoCal", ".", "y", ")", ")", "pointsPos", "[", "f", "[", "\"collimator\"", "]", "]", "=", "isoCal", "# wurde ein summenfeld erstellt ", "if", "len", "(", "sumArray", ")", ">", "0", ":", "isoCalDots", "=", "self", ".", "_findArrayIsoCalCenter", "(", "sumArray", ",", "True", ")", "isoCal", "=", "wlField", ".", "image", ".", "dots2mm", "(", "isoCalDots", ")", "# kollimator 175 und 185 merken um sie zu einem Punkt zusammenzufassen", "# virtuellen punkte der endPoints 175 und 185 berechnen und als 180° verwenden", "# zuerst die dots", "earr", "=", "np", ".", "asarray", "(", "endPointsDot", ")", "eMean", "=", "earr", ".", "mean", "(", "axis", "=", "0", ")", "pointsDot", ".", "append", "(", "(", "eMean", "[", "0", "]", ",", "eMean", "[", "1", "]", ")", ")", "# die dots als 180° ablegen", "pointsDotPos", "[", "180.0", "]", "=", "Point", "(", "eMean", "[", "0", "]", ",", "eMean", "[", "1", "]", ")", "# dann die koordinaten", "earr", "=", "np", ".", "asarray", "(", "endPoints", ")", "eMean", "=", "earr", ".", "mean", "(", "axis", "=", "0", ")", "points", ".", "append", "(", "(", "eMean", "[", "0", "]", ",", "eMean", "[", "1", "]", ")", ")", "pointsPos", "[", "180.0", "]", "=", "Point", "(", "eMean", "[", "0", "]", ",", "eMean", "[", "1", "]", ")", "# np array verwenden", "arr", "=", "np", ".", "asarray", "(", "points", ")", "# zentrum der punkte", "arrDots", "=", "np", ".", "asarray", "(", "pointsDot", ")", "vCenterDots", "=", "arrDots", ".", "mean", "(", "axis", "=", "0", ")", "self", ".", "virtualCenterDots", "=", "Point", "(", "vCenterDots", "[", "0", "]", ",", "vCenterDots", "[", "1", "]", ")", "# dann die koordinaten", "vCenter", "=", "arr", ".", "mean", "(", "axis", "=", "0", ")", "self", ".", "virtualCenter", "=", "Point", "(", "vCenter", "[", "0", "]", ",", "vCenter", "[", "1", "]", ")", "#", "# debug", "#", "if", "self", ".", "debug", ":", "if", "isoCalDots", ":", "print", "(", "\"findColliCenter summfield\"", ",", "isoCalDots", ",", "isoCal", ")", "print", "(", "\"pointsDotPos\"", ",", "pointsDotPos", ")", "cmap", "=", "plt", ".", "cm", ".", "gray", "ax", "=", "plt", ".", "subplot", "(", "111", ")", "ax", ".", "imshow", "(", "sumArray", ",", "cmap", "=", "cmap", ")", "ax", ".", "axis", "(", "'off'", ")", "#ax.set_title(title)", "ax", ".", "plot", "(", "isoCalDots", ".", "x", ",", "isoCalDots", ".", "y", ",", "'r+'", ",", "ms", "=", "80", ",", "markeredgewidth", "=", "1", ")", "ax", ".", "plot", "(", "self", ".", "virtualCenterDots", ".", "x", ",", "self", ".", "virtualCenterDots", ".", "y", ",", "'b+'", ",", "ms", "=", "80", ",", "markeredgewidth", "=", "1", ")", "for", "p", "in", "pointsDot", ":", "ax", ".", "plot", "(", "p", "[", "0", "]", ",", "p", "[", "1", "]", ",", "'g+'", ",", "ms", "=", "20", ",", "markeredgewidth", "=", "1", ")", "ax", ".", "plot", "(", "len", "(", "sumArray", ")", "/", "2", ",", "len", "(", "sumArray", ")", "/", "2", ",", "'y+'", ",", "ms", "=", "100", ",", "markeredgewidth", "=", "1", ")", "# kontroll linien", "lineA", "=", "lines", ".", "Line2D", "(", "[", "pointsDotPos", "[", "0", "]", ".", "x", ",", "pointsDotPos", "[", "180", "]", ".", "x", "]", ",", "[", "pointsDotPos", "[", "0", "]", ".", "y", ",", "pointsDotPos", "[", "180", "]", ".", "y", "]", ",", "lw", "=", "1", ",", "color", "=", "'green'", ",", "axes", "=", "ax", ")", "ax", ".", "add_line", "(", "lineA", ")", "lineB", "=", "lines", ".", "Line2D", "(", "[", "pointsDotPos", "[", "90", "]", ".", "x", ",", "pointsDotPos", "[", "270", "]", ".", "x", "]", ",", "[", "pointsDotPos", "[", "90", "]", ".", "y", ",", "pointsDotPos", "[", "270", "]", ".", "y", "]", ",", "lw", "=", "1", ",", "color", "=", "'green'", ",", "axes", "=", "ax", ")", "ax", ".", "add_line", "(", "lineB", ")", "plt", ".", "show", "(", "ax", ")", "print", "(", "\"isoCal=rot, virtualCenter=blau, nulllinie=gelb\"", ")", "print", "(", "\"WL_findColliCenter\"", ",", "self", ".", "virtualCenter", ",", "pointsPos", ")", "return", "self", ".", "virtualCenter" ]
[ 140, 4 ]
[ 262, 33 ]
null
python
de
['de', 'de', 'de']
True
true
null
GUI.setzen_abstaende
(self, hauptrahmen, unterrahmen1, unterrahmen2)
Feinschliff Layout - Abstände zwischen Fensterelementen setzen
Feinschliff Layout - Abstände zwischen Fensterelementen setzen
def setzen_abstaende(self, hauptrahmen, unterrahmen1, unterrahmen2): """ Feinschliff Layout - Abstände zwischen Fensterelementen setzen""" for element in hauptrahmen.winfo_children(): element.grid_configure(padx="10", pady="10") for element in unterrahmen1.winfo_children(): element.grid_configure(padx="2", pady="2") for element in unterrahmen2.winfo_children(): element.grid_configure(padx="2", pady="2")
[ "def", "setzen_abstaende", "(", "self", ",", "hauptrahmen", ",", "unterrahmen1", ",", "unterrahmen2", ")", ":", "for", "element", "in", "hauptrahmen", ".", "winfo_children", "(", ")", ":", "element", ".", "grid_configure", "(", "padx", "=", "\"10\"", ",", "pady", "=", "\"10\"", ")", "for", "element", "in", "unterrahmen1", ".", "winfo_children", "(", ")", ":", "element", ".", "grid_configure", "(", "padx", "=", "\"2\"", ",", "pady", "=", "\"2\"", ")", "for", "element", "in", "unterrahmen2", ".", "winfo_children", "(", ")", ":", "element", ".", "grid_configure", "(", "padx", "=", "\"2\"", ",", "pady", "=", "\"2\"", ")" ]
[ 143, 4 ]
[ 150, 54 ]
null
python
de
['de', 'de', 'de']
True
true
null
Geometry.copy_circle
(self, center, radius, start_angle, end_angle, start_line, end_line, inner_circle, outer_circle, e, rtol=1e-04, atol=1e-04, points_inner=None, points_outer=None)
return new_elements
Die Funktion kopiert die Teile eines Kreises, welche sich in der durch die Parameter definierten Teilkreisfläche befinden.
Die Funktion kopiert die Teile eines Kreises, welche sich in der durch die Parameter definierten Teilkreisfläche befinden.
def copy_circle(self, center, radius, start_angle, end_angle, start_line, end_line, inner_circle, outer_circle, e, rtol=1e-04, atol=1e-04, points_inner=None, points_outer=None): """ Die Funktion kopiert die Teile eines Kreises, welche sich in der durch die Parameter definierten Teilkreisfläche befinden. """ assert(isinstance(e, Circle)) if is_same_angle(start_angle, end_angle): pts_inner = inner_circle.intersect_circle(e, rtol, atol, False) pts_outer = outer_circle.intersect_circle(e, rtol, atol, False) points = pts_inner + pts_outer else: pts_start = e.intersect_line(start_line, rtol, atol) pts_end = e.intersect_line(end_line, rtol, atol) pts_inner = inner_circle.intersect_circle(e, rtol, atol, False) pts_outer = outer_circle.intersect_circle(e, rtol, atol, False) points = pts_start + pts_end + pts_inner + pts_outer if points_inner is not None and pts_inner: points_inner += pts_inner if points_outer is not None and pts_outer: points_outer += pts_outer new_elements = [] if len(points) < 2: if is_point_inside_region(e.p1, center, inner_circle.radius, outer_circle.radius, start_angle, end_angle): new_elements.append( Circle(Element(center=e.center, radius=e.radius))) return new_elements sorted_points = [] for p in points: alpha_p = alpha_line(e.center, p) sorted_points.append((alpha_p, p)) sorted_points.sort() x, px = sorted_points[0] del sorted_points[0] p1 = px alpha_start = alpha_line(e.center, p1) for x, p2 in sorted_points: alpha_end = alpha_line(e.center, p2) pm = middle_point_of_arc(e.center, e.radius, p1, p2, rtol=rtol) if is_point_inside_region(pm, center, inner_circle.radius, outer_circle.radius, start_angle, end_angle): alpha_middle = middle_angle(alpha_start, alpha_end) arc1 = Arc(Element(center=e.center, radius=e.radius, start_angle=alpha_start*180/np.pi, end_angle=alpha_middle*180/np.pi)) arc2 = Arc(Element(center=e.center, radius=e.radius, start_angle=alpha_middle*180/np.pi, end_angle=alpha_end*180/np.pi)) new_elements.append(arc1) new_elements.append(arc2) alpha_start = alpha_end p1 = p2 alpha_end = alpha_line(e.center, px) pm = middle_point_of_arc(e.center, e.radius, p1, px, rtol=rtol) if is_point_inside_region(pm, center, inner_circle.radius, outer_circle.radius, start_angle, end_angle): alpha_middle = middle_angle(alpha_start, alpha_end) arc1 = Arc(Element(center=e.center, radius=e.radius, start_angle=alpha_start*180/np.pi, end_angle=alpha_middle*180/np.pi)) arc2 = Arc(Element(center=e.center, radius=e.radius, start_angle=alpha_middle*180/np.pi, end_angle=alpha_end*180/np.pi)) new_elements.append(arc1) new_elements.append(arc2) return new_elements
[ "def", "copy_circle", "(", "self", ",", "center", ",", "radius", ",", "start_angle", ",", "end_angle", ",", "start_line", ",", "end_line", ",", "inner_circle", ",", "outer_circle", ",", "e", ",", "rtol", "=", "1e-04", ",", "atol", "=", "1e-04", ",", "points_inner", "=", "None", ",", "points_outer", "=", "None", ")", ":", "assert", "(", "isinstance", "(", "e", ",", "Circle", ")", ")", "if", "is_same_angle", "(", "start_angle", ",", "end_angle", ")", ":", "pts_inner", "=", "inner_circle", ".", "intersect_circle", "(", "e", ",", "rtol", ",", "atol", ",", "False", ")", "pts_outer", "=", "outer_circle", ".", "intersect_circle", "(", "e", ",", "rtol", ",", "atol", ",", "False", ")", "points", "=", "pts_inner", "+", "pts_outer", "else", ":", "pts_start", "=", "e", ".", "intersect_line", "(", "start_line", ",", "rtol", ",", "atol", ")", "pts_end", "=", "e", ".", "intersect_line", "(", "end_line", ",", "rtol", ",", "atol", ")", "pts_inner", "=", "inner_circle", ".", "intersect_circle", "(", "e", ",", "rtol", ",", "atol", ",", "False", ")", "pts_outer", "=", "outer_circle", ".", "intersect_circle", "(", "e", ",", "rtol", ",", "atol", ",", "False", ")", "points", "=", "pts_start", "+", "pts_end", "+", "pts_inner", "+", "pts_outer", "if", "points_inner", "is", "not", "None", "and", "pts_inner", ":", "points_inner", "+=", "pts_inner", "if", "points_outer", "is", "not", "None", "and", "pts_outer", ":", "points_outer", "+=", "pts_outer", "new_elements", "=", "[", "]", "if", "len", "(", "points", ")", "<", "2", ":", "if", "is_point_inside_region", "(", "e", ".", "p1", ",", "center", ",", "inner_circle", ".", "radius", ",", "outer_circle", ".", "radius", ",", "start_angle", ",", "end_angle", ")", ":", "new_elements", ".", "append", "(", "Circle", "(", "Element", "(", "center", "=", "e", ".", "center", ",", "radius", "=", "e", ".", "radius", ")", ")", ")", "return", "new_elements", "sorted_points", "=", "[", "]", "for", "p", "in", "points", ":", "alpha_p", "=", "alpha_line", "(", "e", ".", "center", ",", "p", ")", "sorted_points", ".", "append", "(", "(", "alpha_p", ",", "p", ")", ")", "sorted_points", ".", "sort", "(", ")", "x", ",", "px", "=", "sorted_points", "[", "0", "]", "del", "sorted_points", "[", "0", "]", "p1", "=", "px", "alpha_start", "=", "alpha_line", "(", "e", ".", "center", ",", "p1", ")", "for", "x", ",", "p2", "in", "sorted_points", ":", "alpha_end", "=", "alpha_line", "(", "e", ".", "center", ",", "p2", ")", "pm", "=", "middle_point_of_arc", "(", "e", ".", "center", ",", "e", ".", "radius", ",", "p1", ",", "p2", ",", "rtol", "=", "rtol", ")", "if", "is_point_inside_region", "(", "pm", ",", "center", ",", "inner_circle", ".", "radius", ",", "outer_circle", ".", "radius", ",", "start_angle", ",", "end_angle", ")", ":", "alpha_middle", "=", "middle_angle", "(", "alpha_start", ",", "alpha_end", ")", "arc1", "=", "Arc", "(", "Element", "(", "center", "=", "e", ".", "center", ",", "radius", "=", "e", ".", "radius", ",", "start_angle", "=", "alpha_start", "*", "180", "/", "np", ".", "pi", ",", "end_angle", "=", "alpha_middle", "*", "180", "/", "np", ".", "pi", ")", ")", "arc2", "=", "Arc", "(", "Element", "(", "center", "=", "e", ".", "center", ",", "radius", "=", "e", ".", "radius", ",", "start_angle", "=", "alpha_middle", "*", "180", "/", "np", ".", "pi", ",", "end_angle", "=", "alpha_end", "*", "180", "/", "np", ".", "pi", ")", ")", "new_elements", ".", "append", "(", "arc1", ")", "new_elements", ".", "append", "(", "arc2", ")", "alpha_start", "=", "alpha_end", "p1", "=", "p2", "alpha_end", "=", "alpha_line", "(", "e", ".", "center", ",", "px", ")", "pm", "=", "middle_point_of_arc", "(", "e", ".", "center", ",", "e", ".", "radius", ",", "p1", ",", "px", ",", "rtol", "=", "rtol", ")", "if", "is_point_inside_region", "(", "pm", ",", "center", ",", "inner_circle", ".", "radius", ",", "outer_circle", ".", "radius", ",", "start_angle", ",", "end_angle", ")", ":", "alpha_middle", "=", "middle_angle", "(", "alpha_start", ",", "alpha_end", ")", "arc1", "=", "Arc", "(", "Element", "(", "center", "=", "e", ".", "center", ",", "radius", "=", "e", ".", "radius", ",", "start_angle", "=", "alpha_start", "*", "180", "/", "np", ".", "pi", ",", "end_angle", "=", "alpha_middle", "*", "180", "/", "np", ".", "pi", ")", ")", "arc2", "=", "Arc", "(", "Element", "(", "center", "=", "e", ".", "center", ",", "radius", "=", "e", ".", "radius", ",", "start_angle", "=", "alpha_middle", "*", "180", "/", "np", ".", "pi", ",", "end_angle", "=", "alpha_end", "*", "180", "/", "np", ".", "pi", ")", ")", "new_elements", ".", "append", "(", "arc1", ")", "new_elements", ".", "append", "(", "arc2", ")", "return", "new_elements" ]
[ 1716, 4 ]
[ 1817, 27 ]
null
python
de
['de', 'de', 'de']
True
true
null
Zeitrechnung.delta
(self)
return self.__delta
Der Jahresunterschied zur Allgemeinen Zeitrechnung :return int
Der Jahresunterschied zur Allgemeinen Zeitrechnung
def delta(self) -> int: """Der Jahresunterschied zur Allgemeinen Zeitrechnung :return int""" return self.__delta
[ "def", "delta", "(", "self", ")", "->", "int", ":", "return", "self", ".", "__delta" ]
[ 47, 4 ]
[ 51, 27 ]
null
python
de
['de', 'de', 'de']
True
true
null
plotClass.getPlot
(self)
return data
Plot als Bytecode zurückgeben. Returns ------- data : BytesIO Bytecode des Plots
Plot als Bytecode zurückgeben.
def getPlot(self): """Plot als Bytecode zurückgeben. Returns ------- data : BytesIO Bytecode des Plots """ data = io.BytesIO() plt.savefig( data ) return data
[ "def", "getPlot", "(", "self", ")", ":", "data", "=", "io", ".", "BytesIO", "(", ")", "plt", ".", "savefig", "(", "data", ")", "return", "data" ]
[ 102, 4 ]
[ 114, 19 ]
null
python
de
['de', 'de', 'de']
True
true
null
loop
(values: dict, data: StepData)
Durchläuft das angegebene Array und führt für jedes Element die angegebenen `"transform"`-Funktionen aus. :param values: Werte aus der JSON-Datei :param data: Daten aus der API
Durchläuft das angegebene Array und führt für jedes Element die angegebenen `"transform"`-Funktionen aus.
def loop(values: dict, data: StepData): """Durchläuft das angegebene Array und führt für jedes Element die angegebenen `"transform"`-Funktionen aus. :param values: Werte aus der JSON-Datei :param data: Daten aus der API """ loop_values = data.deep_format(values.get("values", None), values=values) # if values is none use range if loop_values is None: start = data.get_data(values.get("range_start", 0), values, int) stop = data.get_data(values["range_stop"], values, int) loop_values = range(start, stop) for _ in data.loop_array(loop_values, values): transform(values, data)
[ "def", "loop", "(", "values", ":", "dict", ",", "data", ":", "StepData", ")", ":", "loop_values", "=", "data", ".", "deep_format", "(", "values", ".", "get", "(", "\"values\"", ",", "None", ")", ",", "values", "=", "values", ")", "# if values is none use range", "if", "loop_values", "is", "None", ":", "start", "=", "data", ".", "get_data", "(", "values", ".", "get", "(", "\"range_start\"", ",", "0", ")", ",", "values", ",", "int", ")", "stop", "=", "data", ".", "get_data", "(", "values", "[", "\"range_stop\"", "]", ",", "values", ",", "int", ")", "loop_values", "=", "range", "(", "start", ",", "stop", ")", "for", "_", "in", "data", ".", "loop_array", "(", "loop_values", ",", "values", ")", ":", "transform", "(", "values", ",", "data", ")" ]
[ 395, 0 ]
[ 410, 31 ]
null
python
de
['de', 'de', 'de']
True
true
null
Screen.update
(self)
Darstellung auf dem Touchscreen
Darstellung auf dem Touchscreen
def update(self): """ Darstellung auf dem Touchscreen """ self.tick += 1 if self.tick >= FPS: self.tick = 0 self.sec += 1 if select([MPC], [], [], 0)[0]: self.event = MPC.idle() print(self.event) self.dirty_rects = [] MPC.idle() if not self.screensaver: if self.menu == 1: # Main Screen if self.refresh: skin1_base() if skincfg['text_on_top']: BTN_WIN[0].blit(btn["Status"], ((0, 0))) # msg_frame self.dirty_rects.append(BTN_RECT[0]) self.dirty_rects.append(BTN_RECT[3]) self.dirty_rects.append(BTN_RECT[5]) self.dirty_rects.append(BTN_RECT[6]) self.dirty_rects.append(BTN_RECT[7]) self.dirty_rects.append(BTN_RECT[8]) if 'playlist' in self.event or self.refresh: self.station, self.title, self.artist = get_info() self.station_label = ScrollText(STATION_WIN, self.station, fonts['std'], colors['status'], bg_buf['station_bg'], btn["Status"]) self.title_label = ScrollText(TITLE_WIN, self.title, fonts['title'], (colors['font']), bg_buf['title_bg'], btn["Status"]) if not self.artist: self.artist = "Now Playing:" self.artist_label = ScrollText(ARTIST_WIN, self.artist, fonts['std'], (colors['font']), bg_buf['artist_bg'], btn["Status"]) self.dirty = True if not self.refresh: self.event.remove('playlist') if 'player' in self.event or self.refresh: status = MPC.status() BTN_WIN[1].blit(bg_buf['btn1'], ((0, 0))) BTN_WIN[2].blit(bg_buf['btn2'], ((0, 0))) if 'state' in status: if status['state'] == 'stop': BTN_WIN[1].blit(btn["Play"], ((0, 0))) # Empty BTN_WIN[2].blit(btn["Empty"], ((0, 0))) # Play elif status['state'] == 'pause': BTN_WIN[1].blit(btn["Stop"], ((0, 0))) # stop BTN_WIN[2].blit(btn["Play"], ((0, 0))) # play else: BTN_WIN[1].blit(btn["Stop"], ((0, 0))) # stop BTN_WIN[2].blit(btn["Pause"], ((0, 0))) # pause self.event += 'mixer' self.dirty_rects.append(BTN_RECT[1]) self.dirty_rects.append(BTN_RECT[2]) self.dirty_rects.append(BTN_RECT[4]) self.status_update = True if not self.refresh: self.event.remove('player') if 'mixer' in self.event or self.refresh: BTN_WIN[4].blit(bg_buf['btn4'], ((0, 0))) if self.muted == True: BTN_WIN[4].blit(btn["Unmute"], ((0, 0))) # unmute else: BTN_WIN[4].blit(btn["Mute"], ((0, 0))) # mute self.dirty_rects.append(BTN_RECT[4]) self.status_update = True if not self.refresh: self.event.remove('mixer') if self.refresh: self.refresh = False self.status_update = True if not skincfg['text_on_top']: BTN_WIN[0].blit(btn["Status"], ((0, 0))) # msg_frame self.dirty_rects.append(BTN_RECT[0]) if self.sec: self.sec = 0 self.status_update = True if self.status_update: self.status_update = False STATUS_WIN.blit(bg_buf['status_bg'], (0, 0)) BITRATE_WIN.blit(bg_buf['bitrate_bg'], (0, 0)) status_update() if not skincfg['text_on_top']: LCD.blit(btn["Status"], STATUS_RECT, area=STATUS_RECT) LCD.blit(btn["Status"], BITRATE_RECT, area=BITRATE_RECT) self.dirty_rects.append(BITRATE_RECT) self.dirty_rects.append(STATUS_RECT) pygame.display.update(self.dirty_rects) self.dirty_rects = [] self.station_label.update(self.dirty) self.title_label.update(self.dirty) self.artist_label.update(self.dirty) self.dirty = False elif self.menu == 2: # Playlist Selection if 'playlist' in self.event or 'update' in self.event or self.refresh: LCD.fill(colors['bg']) if bg_buf['bg']: LCD.blit(bg_buf['bg'], (0, 0)) if skincfg['text_on_top']: skin2_base() PLS.show() if config['x_button'] and PLS.in_playlist: pygame.draw.rect(PLUS_WIN, (colors['font']), PLUS_WIN.get_rect(), PLUS_WIN.get_width() // 7) pygame.draw.line(PLUS_WIN, (colors['font']), (PLUS_WIN.get_width() // 4, PLUS_WIN.get_height() // 4), (PLUS_WIN.get_width() * 3 // 4, PLUS_WIN.get_height() * 3 // 4), PLUS_WIN.get_width() // 8) pygame.draw.line(PLUS_WIN, (colors['font']), (PLUS_WIN.get_width() // 4, PLUS_WIN.get_height() * 3 // 4), (PLUS_WIN.get_width() * 3 // 4, PLUS_WIN.get_height() // 4), PLUS_WIN.get_width() // 8) if config['plus_button']: if PLS.lists[PLS.index][0] == 'd': pygame.draw.rect(PLUS_WIN, (colors['font']), PLUS_WIN.get_rect(), PLUS_WIN.get_width() // 7) pygame.draw.line(PLUS_WIN, (colors['font']), (PLUS_WIN.get_width() // 2, PLUS_WIN.get_height() // 4), (PLUS_WIN.get_width() // 2, PLUS_WIN.get_height() * 3 // 4), PLUS_WIN.get_width() // 8) pygame.draw.line(PLUS_WIN, (colors['font']), (PLUS_WIN.get_width() // 4, PLUS_WIN.get_height() // 2), (PLUS_WIN.get_width() * 3 // 4, PLUS_WIN.get_height() // 2), PLUS_WIN.get_width() // 8) if not skincfg['text_on_top']: skin2_base() self.select_label = \ ScrollText(LIST_WIN[0], re.sub(r"\.pls|\.m3u", "", PLS.lists[PLS.index][1].split('/')[-1]), fonts['big'], PLS.lists[PLS.index][2], bg_buf['list_bg'], bg_buf['sel']) pygame.display.flip() if self.refresh: self.refresh = False if 'playlist' in self.event: self.event.remove('playlist') if 'update' in self.event: self.event.remove('update') self.select_label.update() elif self.menu == 3: # MPD Playback Settings if 'options' in self.event or self.refresh: LCD.fill(colors['bg']) if bg_buf['bg']: LCD.blit(bg_buf['bg'], (0, 0)) if skincfg['text_on_top']: skin3_base() if self.xf_page: # Crossfade Seite get_xfade_state() else: # Basic Settings Seite get_playback_state() if not skincfg['text_on_top']: skin3_base() pygame.display.flip() if self.refresh: self.refresh = False else: self.event.remove('options') elif self.menu == 4: # MPD Audio Outputs # Mindestens alle 5 Sekunden Aktualisieren if self.sec > 5: self.sec = 0 self.refresh = True if self.refresh or 'output' in self.event: LCD.fill(colors['bg']) if bg_buf['bg']: LCD.blit(bg_buf['bg'], (0, 0)) if skincfg['text_on_top']: skin4_base() draw_text(MSG_WIN, 'MPD Audio Outputs', fonts['std'], colors['status'], align='centerx') get_outputs() current_time = datetime.datetime.now().strftime('%d.%m.%Y %H:%M') draw_text(STATUS_WIN, current_time, fonts['std'], colors['status']) # get and display ip try: sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.connect(('8.8.8.8', 0)) # dummy IP ip_text = 'IP: ' + sock.getsockname()[0] sock.close() except OSError: # <class 'OSError'>: [Errno 101] Network is unreachable exctype, value = sys.exc_info()[:2] print("Screen.update()", str(exctype) + ': ' + str(value)) ip_text = 'IP: No Network!' # Wlan Level wlanlevel = get_wlan_level(WLAN_DEVICE) if wlanlevel >= 80: STATUS_WIN.blit(btn["wlan100"], (STATUS_WIN.get_width() // 2 - \ btn["wlan100"].get_width() // 2, 0)) elif wlanlevel >= 55: STATUS_WIN.blit(btn["wlan075"], (STATUS_WIN.get_width() // 2 - \ btn["wlan075"].get_width() // 2, 0)) elif wlanlevel >= 30: STATUS_WIN.blit(btn["wlan050"], (STATUS_WIN.get_width() // 2 - \ btn["wlan050"].get_width() // 2, 0)) elif wlanlevel >= 5: STATUS_WIN.blit(btn["wlan025"], (STATUS_WIN.get_width() // 2 - \ btn["wlan025"].get_width() // 2, 0)) else: STATUS_WIN.blit(btn["wlan000"], (STATUS_WIN.get_width() // 2 - \ btn["wlan000"].get_width() // 2, 0)) draw_text(STATUS_WIN, ip_text, fonts['std'], colors['status'], align='topright') if not skincfg['text_on_top']: skin4_base() pygame.display.flip() if self.refresh: self.refresh = False else: self.event.remove('output') elif self.menu == 5: # NewTron-Radio Settings if self.refresh: self.refresh = False LCD.fill(colors['bg']) if bg_buf['bg']: LCD.blit(bg_buf['bg'], (0, 0)) if skincfg['text_on_top']: skin5_base() show_config() if not skincfg['text_on_top']: skin5_base() pygame.display.flip() elif self.menu == 6: # Weather Screen if self.sec > 60 or self.refresh: # Wetter jede Minute abfragen self.sec = 0 WTR.show() if not self.tick and not self.sec % 5: # Alle 5 Sekunden aktualisieren self.refresh = True if self.refresh or 'playlist' in self.event: show_ss_status() if self.refresh: self.refresh = False else: self.event.remove('playlist') else: # self.screensaver == True: if config['screensaver_mode'] == 'weather': if self.sec > 600 or self.refresh: # Wetter alle 10 Minuten abfragen self.sec = 0 WTR.show() if not self.tick and not self.sec % 5: # Alle 5 Sekunden aktualisieren self.refresh = True if self.refresh or 'playlist' in self.event: show_ss_status() if self.refresh: self.refresh = False else: self.event.remove('playlist') elif config['screensaver_mode'] == 'clock': if self.sec > 5 or self.refresh: self.sec = 0 self.refresh = False show_ss_status(big_clock=True) elif config['screensaver_mode'] == 'black': if self.sec > 600 or self.refresh: # Alle 10 Minuten aktualisieren self.sec = 0 self.refresh = False LCD.fill(colors['bg']) pygame.display.flip() else: self.screensaver = False
[ "def", "update", "(", "self", ")", ":", "self", ".", "tick", "+=", "1", "if", "self", ".", "tick", ">=", "FPS", ":", "self", ".", "tick", "=", "0", "self", ".", "sec", "+=", "1", "if", "select", "(", "[", "MPC", "]", ",", "[", "]", ",", "[", "]", ",", "0", ")", "[", "0", "]", ":", "self", ".", "event", "=", "MPC", ".", "idle", "(", ")", "print", "(", "self", ".", "event", ")", "self", ".", "dirty_rects", "=", "[", "]", "MPC", ".", "idle", "(", ")", "if", "not", "self", ".", "screensaver", ":", "if", "self", ".", "menu", "==", "1", ":", "# Main Screen", "if", "self", ".", "refresh", ":", "skin1_base", "(", ")", "if", "skincfg", "[", "'text_on_top'", "]", ":", "BTN_WIN", "[", "0", "]", ".", "blit", "(", "btn", "[", "\"Status\"", "]", ",", "(", "(", "0", ",", "0", ")", ")", ")", "# msg_frame", "self", ".", "dirty_rects", ".", "append", "(", "BTN_RECT", "[", "0", "]", ")", "self", ".", "dirty_rects", ".", "append", "(", "BTN_RECT", "[", "3", "]", ")", "self", ".", "dirty_rects", ".", "append", "(", "BTN_RECT", "[", "5", "]", ")", "self", ".", "dirty_rects", ".", "append", "(", "BTN_RECT", "[", "6", "]", ")", "self", ".", "dirty_rects", ".", "append", "(", "BTN_RECT", "[", "7", "]", ")", "self", ".", "dirty_rects", ".", "append", "(", "BTN_RECT", "[", "8", "]", ")", "if", "'playlist'", "in", "self", ".", "event", "or", "self", ".", "refresh", ":", "self", ".", "station", ",", "self", ".", "title", ",", "self", ".", "artist", "=", "get_info", "(", ")", "self", ".", "station_label", "=", "ScrollText", "(", "STATION_WIN", ",", "self", ".", "station", ",", "fonts", "[", "'std'", "]", ",", "colors", "[", "'status'", "]", ",", "bg_buf", "[", "'station_bg'", "]", ",", "btn", "[", "\"Status\"", "]", ")", "self", ".", "title_label", "=", "ScrollText", "(", "TITLE_WIN", ",", "self", ".", "title", ",", "fonts", "[", "'title'", "]", ",", "(", "colors", "[", "'font'", "]", ")", ",", "bg_buf", "[", "'title_bg'", "]", ",", "btn", "[", "\"Status\"", "]", ")", "if", "not", "self", ".", "artist", ":", "self", ".", "artist", "=", "\"Now Playing:\"", "self", ".", "artist_label", "=", "ScrollText", "(", "ARTIST_WIN", ",", "self", ".", "artist", ",", "fonts", "[", "'std'", "]", ",", "(", "colors", "[", "'font'", "]", ")", ",", "bg_buf", "[", "'artist_bg'", "]", ",", "btn", "[", "\"Status\"", "]", ")", "self", ".", "dirty", "=", "True", "if", "not", "self", ".", "refresh", ":", "self", ".", "event", ".", "remove", "(", "'playlist'", ")", "if", "'player'", "in", "self", ".", "event", "or", "self", ".", "refresh", ":", "status", "=", "MPC", ".", "status", "(", ")", "BTN_WIN", "[", "1", "]", ".", "blit", "(", "bg_buf", "[", "'btn1'", "]", ",", "(", "(", "0", ",", "0", ")", ")", ")", "BTN_WIN", "[", "2", "]", ".", "blit", "(", "bg_buf", "[", "'btn2'", "]", ",", "(", "(", "0", ",", "0", ")", ")", ")", "if", "'state'", "in", "status", ":", "if", "status", "[", "'state'", "]", "==", "'stop'", ":", "BTN_WIN", "[", "1", "]", ".", "blit", "(", "btn", "[", "\"Play\"", "]", ",", "(", "(", "0", ",", "0", ")", ")", ")", "# Empty", "BTN_WIN", "[", "2", "]", ".", "blit", "(", "btn", "[", "\"Empty\"", "]", ",", "(", "(", "0", ",", "0", ")", ")", ")", "# Play", "elif", "status", "[", "'state'", "]", "==", "'pause'", ":", "BTN_WIN", "[", "1", "]", ".", "blit", "(", "btn", "[", "\"Stop\"", "]", ",", "(", "(", "0", ",", "0", ")", ")", ")", "# stop", "BTN_WIN", "[", "2", "]", ".", "blit", "(", "btn", "[", "\"Play\"", "]", ",", "(", "(", "0", ",", "0", ")", ")", ")", "# play", "else", ":", "BTN_WIN", "[", "1", "]", ".", "blit", "(", "btn", "[", "\"Stop\"", "]", ",", "(", "(", "0", ",", "0", ")", ")", ")", "# stop", "BTN_WIN", "[", "2", "]", ".", "blit", "(", "btn", "[", "\"Pause\"", "]", ",", "(", "(", "0", ",", "0", ")", ")", ")", "# pause", "self", ".", "event", "+=", "'mixer'", "self", ".", "dirty_rects", ".", "append", "(", "BTN_RECT", "[", "1", "]", ")", "self", ".", "dirty_rects", ".", "append", "(", "BTN_RECT", "[", "2", "]", ")", "self", ".", "dirty_rects", ".", "append", "(", "BTN_RECT", "[", "4", "]", ")", "self", ".", "status_update", "=", "True", "if", "not", "self", ".", "refresh", ":", "self", ".", "event", ".", "remove", "(", "'player'", ")", "if", "'mixer'", "in", "self", ".", "event", "or", "self", ".", "refresh", ":", "BTN_WIN", "[", "4", "]", ".", "blit", "(", "bg_buf", "[", "'btn4'", "]", ",", "(", "(", "0", ",", "0", ")", ")", ")", "if", "self", ".", "muted", "==", "True", ":", "BTN_WIN", "[", "4", "]", ".", "blit", "(", "btn", "[", "\"Unmute\"", "]", ",", "(", "(", "0", ",", "0", ")", ")", ")", "# unmute", "else", ":", "BTN_WIN", "[", "4", "]", ".", "blit", "(", "btn", "[", "\"Mute\"", "]", ",", "(", "(", "0", ",", "0", ")", ")", ")", "# mute", "self", ".", "dirty_rects", ".", "append", "(", "BTN_RECT", "[", "4", "]", ")", "self", ".", "status_update", "=", "True", "if", "not", "self", ".", "refresh", ":", "self", ".", "event", ".", "remove", "(", "'mixer'", ")", "if", "self", ".", "refresh", ":", "self", ".", "refresh", "=", "False", "self", ".", "status_update", "=", "True", "if", "not", "skincfg", "[", "'text_on_top'", "]", ":", "BTN_WIN", "[", "0", "]", ".", "blit", "(", "btn", "[", "\"Status\"", "]", ",", "(", "(", "0", ",", "0", ")", ")", ")", "# msg_frame", "self", ".", "dirty_rects", ".", "append", "(", "BTN_RECT", "[", "0", "]", ")", "if", "self", ".", "sec", ":", "self", ".", "sec", "=", "0", "self", ".", "status_update", "=", "True", "if", "self", ".", "status_update", ":", "self", ".", "status_update", "=", "False", "STATUS_WIN", ".", "blit", "(", "bg_buf", "[", "'status_bg'", "]", ",", "(", "0", ",", "0", ")", ")", "BITRATE_WIN", ".", "blit", "(", "bg_buf", "[", "'bitrate_bg'", "]", ",", "(", "0", ",", "0", ")", ")", "status_update", "(", ")", "if", "not", "skincfg", "[", "'text_on_top'", "]", ":", "LCD", ".", "blit", "(", "btn", "[", "\"Status\"", "]", ",", "STATUS_RECT", ",", "area", "=", "STATUS_RECT", ")", "LCD", ".", "blit", "(", "btn", "[", "\"Status\"", "]", ",", "BITRATE_RECT", ",", "area", "=", "BITRATE_RECT", ")", "self", ".", "dirty_rects", ".", "append", "(", "BITRATE_RECT", ")", "self", ".", "dirty_rects", ".", "append", "(", "STATUS_RECT", ")", "pygame", ".", "display", ".", "update", "(", "self", ".", "dirty_rects", ")", "self", ".", "dirty_rects", "=", "[", "]", "self", ".", "station_label", ".", "update", "(", "self", ".", "dirty", ")", "self", ".", "title_label", ".", "update", "(", "self", ".", "dirty", ")", "self", ".", "artist_label", ".", "update", "(", "self", ".", "dirty", ")", "self", ".", "dirty", "=", "False", "elif", "self", ".", "menu", "==", "2", ":", "# Playlist Selection", "if", "'playlist'", "in", "self", ".", "event", "or", "'update'", "in", "self", ".", "event", "or", "self", ".", "refresh", ":", "LCD", ".", "fill", "(", "colors", "[", "'bg'", "]", ")", "if", "bg_buf", "[", "'bg'", "]", ":", "LCD", ".", "blit", "(", "bg_buf", "[", "'bg'", "]", ",", "(", "0", ",", "0", ")", ")", "if", "skincfg", "[", "'text_on_top'", "]", ":", "skin2_base", "(", ")", "PLS", ".", "show", "(", ")", "if", "config", "[", "'x_button'", "]", "and", "PLS", ".", "in_playlist", ":", "pygame", ".", "draw", ".", "rect", "(", "PLUS_WIN", ",", "(", "colors", "[", "'font'", "]", ")", ",", "PLUS_WIN", ".", "get_rect", "(", ")", ",", "PLUS_WIN", ".", "get_width", "(", ")", "//", "7", ")", "pygame", ".", "draw", ".", "line", "(", "PLUS_WIN", ",", "(", "colors", "[", "'font'", "]", ")", ",", "(", "PLUS_WIN", ".", "get_width", "(", ")", "//", "4", ",", "PLUS_WIN", ".", "get_height", "(", ")", "//", "4", ")", ",", "(", "PLUS_WIN", ".", "get_width", "(", ")", "*", "3", "//", "4", ",", "PLUS_WIN", ".", "get_height", "(", ")", "*", "3", "//", "4", ")", ",", "PLUS_WIN", ".", "get_width", "(", ")", "//", "8", ")", "pygame", ".", "draw", ".", "line", "(", "PLUS_WIN", ",", "(", "colors", "[", "'font'", "]", ")", ",", "(", "PLUS_WIN", ".", "get_width", "(", ")", "//", "4", ",", "PLUS_WIN", ".", "get_height", "(", ")", "*", "3", "//", "4", ")", ",", "(", "PLUS_WIN", ".", "get_width", "(", ")", "*", "3", "//", "4", ",", "PLUS_WIN", ".", "get_height", "(", ")", "//", "4", ")", ",", "PLUS_WIN", ".", "get_width", "(", ")", "//", "8", ")", "if", "config", "[", "'plus_button'", "]", ":", "if", "PLS", ".", "lists", "[", "PLS", ".", "index", "]", "[", "0", "]", "==", "'d'", ":", "pygame", ".", "draw", ".", "rect", "(", "PLUS_WIN", ",", "(", "colors", "[", "'font'", "]", ")", ",", "PLUS_WIN", ".", "get_rect", "(", ")", ",", "PLUS_WIN", ".", "get_width", "(", ")", "//", "7", ")", "pygame", ".", "draw", ".", "line", "(", "PLUS_WIN", ",", "(", "colors", "[", "'font'", "]", ")", ",", "(", "PLUS_WIN", ".", "get_width", "(", ")", "//", "2", ",", "PLUS_WIN", ".", "get_height", "(", ")", "//", "4", ")", ",", "(", "PLUS_WIN", ".", "get_width", "(", ")", "//", "2", ",", "PLUS_WIN", ".", "get_height", "(", ")", "*", "3", "//", "4", ")", ",", "PLUS_WIN", ".", "get_width", "(", ")", "//", "8", ")", "pygame", ".", "draw", ".", "line", "(", "PLUS_WIN", ",", "(", "colors", "[", "'font'", "]", ")", ",", "(", "PLUS_WIN", ".", "get_width", "(", ")", "//", "4", ",", "PLUS_WIN", ".", "get_height", "(", ")", "//", "2", ")", ",", "(", "PLUS_WIN", ".", "get_width", "(", ")", "*", "3", "//", "4", ",", "PLUS_WIN", ".", "get_height", "(", ")", "//", "2", ")", ",", "PLUS_WIN", ".", "get_width", "(", ")", "//", "8", ")", "if", "not", "skincfg", "[", "'text_on_top'", "]", ":", "skin2_base", "(", ")", "self", ".", "select_label", "=", "ScrollText", "(", "LIST_WIN", "[", "0", "]", ",", "re", ".", "sub", "(", "r\"\\.pls|\\.m3u\"", ",", "\"\"", ",", "PLS", ".", "lists", "[", "PLS", ".", "index", "]", "[", "1", "]", ".", "split", "(", "'/'", ")", "[", "-", "1", "]", ")", ",", "fonts", "[", "'big'", "]", ",", "PLS", ".", "lists", "[", "PLS", ".", "index", "]", "[", "2", "]", ",", "bg_buf", "[", "'list_bg'", "]", ",", "bg_buf", "[", "'sel'", "]", ")", "pygame", ".", "display", ".", "flip", "(", ")", "if", "self", ".", "refresh", ":", "self", ".", "refresh", "=", "False", "if", "'playlist'", "in", "self", ".", "event", ":", "self", ".", "event", ".", "remove", "(", "'playlist'", ")", "if", "'update'", "in", "self", ".", "event", ":", "self", ".", "event", ".", "remove", "(", "'update'", ")", "self", ".", "select_label", ".", "update", "(", ")", "elif", "self", ".", "menu", "==", "3", ":", "# MPD Playback Settings", "if", "'options'", "in", "self", ".", "event", "or", "self", ".", "refresh", ":", "LCD", ".", "fill", "(", "colors", "[", "'bg'", "]", ")", "if", "bg_buf", "[", "'bg'", "]", ":", "LCD", ".", "blit", "(", "bg_buf", "[", "'bg'", "]", ",", "(", "0", ",", "0", ")", ")", "if", "skincfg", "[", "'text_on_top'", "]", ":", "skin3_base", "(", ")", "if", "self", ".", "xf_page", ":", "# Crossfade Seite", "get_xfade_state", "(", ")", "else", ":", "# Basic Settings Seite", "get_playback_state", "(", ")", "if", "not", "skincfg", "[", "'text_on_top'", "]", ":", "skin3_base", "(", ")", "pygame", ".", "display", ".", "flip", "(", ")", "if", "self", ".", "refresh", ":", "self", ".", "refresh", "=", "False", "else", ":", "self", ".", "event", ".", "remove", "(", "'options'", ")", "elif", "self", ".", "menu", "==", "4", ":", "# MPD Audio Outputs", "# Mindestens alle 5 Sekunden Aktualisieren", "if", "self", ".", "sec", ">", "5", ":", "self", ".", "sec", "=", "0", "self", ".", "refresh", "=", "True", "if", "self", ".", "refresh", "or", "'output'", "in", "self", ".", "event", ":", "LCD", ".", "fill", "(", "colors", "[", "'bg'", "]", ")", "if", "bg_buf", "[", "'bg'", "]", ":", "LCD", ".", "blit", "(", "bg_buf", "[", "'bg'", "]", ",", "(", "0", ",", "0", ")", ")", "if", "skincfg", "[", "'text_on_top'", "]", ":", "skin4_base", "(", ")", "draw_text", "(", "MSG_WIN", ",", "'MPD Audio Outputs'", ",", "fonts", "[", "'std'", "]", ",", "colors", "[", "'status'", "]", ",", "align", "=", "'centerx'", ")", "get_outputs", "(", ")", "current_time", "=", "datetime", ".", "datetime", ".", "now", "(", ")", ".", "strftime", "(", "'%d.%m.%Y %H:%M'", ")", "draw_text", "(", "STATUS_WIN", ",", "current_time", ",", "fonts", "[", "'std'", "]", ",", "colors", "[", "'status'", "]", ")", "# get and display ip", "try", ":", "sock", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_DGRAM", ")", "sock", ".", "connect", "(", "(", "'8.8.8.8'", ",", "0", ")", ")", "# dummy IP", "ip_text", "=", "'IP: '", "+", "sock", ".", "getsockname", "(", ")", "[", "0", "]", "sock", ".", "close", "(", ")", "except", "OSError", ":", "# <class 'OSError'>: [Errno 101] Network is unreachable", "exctype", ",", "value", "=", "sys", ".", "exc_info", "(", ")", "[", ":", "2", "]", "print", "(", "\"Screen.update()\"", ",", "str", "(", "exctype", ")", "+", "': '", "+", "str", "(", "value", ")", ")", "ip_text", "=", "'IP: No Network!'", "# Wlan Level", "wlanlevel", "=", "get_wlan_level", "(", "WLAN_DEVICE", ")", "if", "wlanlevel", ">=", "80", ":", "STATUS_WIN", ".", "blit", "(", "btn", "[", "\"wlan100\"", "]", ",", "(", "STATUS_WIN", ".", "get_width", "(", ")", "//", "2", "-", "btn", "[", "\"wlan100\"", "]", ".", "get_width", "(", ")", "//", "2", ",", "0", ")", ")", "elif", "wlanlevel", ">=", "55", ":", "STATUS_WIN", ".", "blit", "(", "btn", "[", "\"wlan075\"", "]", ",", "(", "STATUS_WIN", ".", "get_width", "(", ")", "//", "2", "-", "btn", "[", "\"wlan075\"", "]", ".", "get_width", "(", ")", "//", "2", ",", "0", ")", ")", "elif", "wlanlevel", ">=", "30", ":", "STATUS_WIN", ".", "blit", "(", "btn", "[", "\"wlan050\"", "]", ",", "(", "STATUS_WIN", ".", "get_width", "(", ")", "//", "2", "-", "btn", "[", "\"wlan050\"", "]", ".", "get_width", "(", ")", "//", "2", ",", "0", ")", ")", "elif", "wlanlevel", ">=", "5", ":", "STATUS_WIN", ".", "blit", "(", "btn", "[", "\"wlan025\"", "]", ",", "(", "STATUS_WIN", ".", "get_width", "(", ")", "//", "2", "-", "btn", "[", "\"wlan025\"", "]", ".", "get_width", "(", ")", "//", "2", ",", "0", ")", ")", "else", ":", "STATUS_WIN", ".", "blit", "(", "btn", "[", "\"wlan000\"", "]", ",", "(", "STATUS_WIN", ".", "get_width", "(", ")", "//", "2", "-", "btn", "[", "\"wlan000\"", "]", ".", "get_width", "(", ")", "//", "2", ",", "0", ")", ")", "draw_text", "(", "STATUS_WIN", ",", "ip_text", ",", "fonts", "[", "'std'", "]", ",", "colors", "[", "'status'", "]", ",", "align", "=", "'topright'", ")", "if", "not", "skincfg", "[", "'text_on_top'", "]", ":", "skin4_base", "(", ")", "pygame", ".", "display", ".", "flip", "(", ")", "if", "self", ".", "refresh", ":", "self", ".", "refresh", "=", "False", "else", ":", "self", ".", "event", ".", "remove", "(", "'output'", ")", "elif", "self", ".", "menu", "==", "5", ":", "# NewTron-Radio Settings", "if", "self", ".", "refresh", ":", "self", ".", "refresh", "=", "False", "LCD", ".", "fill", "(", "colors", "[", "'bg'", "]", ")", "if", "bg_buf", "[", "'bg'", "]", ":", "LCD", ".", "blit", "(", "bg_buf", "[", "'bg'", "]", ",", "(", "0", ",", "0", ")", ")", "if", "skincfg", "[", "'text_on_top'", "]", ":", "skin5_base", "(", ")", "show_config", "(", ")", "if", "not", "skincfg", "[", "'text_on_top'", "]", ":", "skin5_base", "(", ")", "pygame", ".", "display", ".", "flip", "(", ")", "elif", "self", ".", "menu", "==", "6", ":", "# Weather Screen", "if", "self", ".", "sec", ">", "60", "or", "self", ".", "refresh", ":", "# Wetter jede Minute abfragen", "self", ".", "sec", "=", "0", "WTR", ".", "show", "(", ")", "if", "not", "self", ".", "tick", "and", "not", "self", ".", "sec", "%", "5", ":", "# Alle 5 Sekunden aktualisieren", "self", ".", "refresh", "=", "True", "if", "self", ".", "refresh", "or", "'playlist'", "in", "self", ".", "event", ":", "show_ss_status", "(", ")", "if", "self", ".", "refresh", ":", "self", ".", "refresh", "=", "False", "else", ":", "self", ".", "event", ".", "remove", "(", "'playlist'", ")", "else", ":", "# self.screensaver == True:", "if", "config", "[", "'screensaver_mode'", "]", "==", "'weather'", ":", "if", "self", ".", "sec", ">", "600", "or", "self", ".", "refresh", ":", "# Wetter alle 10 Minuten abfragen", "self", ".", "sec", "=", "0", "WTR", ".", "show", "(", ")", "if", "not", "self", ".", "tick", "and", "not", "self", ".", "sec", "%", "5", ":", "# Alle 5 Sekunden aktualisieren", "self", ".", "refresh", "=", "True", "if", "self", ".", "refresh", "or", "'playlist'", "in", "self", ".", "event", ":", "show_ss_status", "(", ")", "if", "self", ".", "refresh", ":", "self", ".", "refresh", "=", "False", "else", ":", "self", ".", "event", ".", "remove", "(", "'playlist'", ")", "elif", "config", "[", "'screensaver_mode'", "]", "==", "'clock'", ":", "if", "self", ".", "sec", ">", "5", "or", "self", ".", "refresh", ":", "self", ".", "sec", "=", "0", "self", ".", "refresh", "=", "False", "show_ss_status", "(", "big_clock", "=", "True", ")", "elif", "config", "[", "'screensaver_mode'", "]", "==", "'black'", ":", "if", "self", ".", "sec", ">", "600", "or", "self", ".", "refresh", ":", "# Alle 10 Minuten aktualisieren", "self", ".", "sec", "=", "0", "self", ".", "refresh", "=", "False", "LCD", ".", "fill", "(", "colors", "[", "'bg'", "]", ")", "pygame", ".", "display", ".", "flip", "(", ")", "else", ":", "self", ".", "screensaver", "=", "False" ]
[ 1679, 4 ]
[ 1961, 40 ]
null
python
de
['de', 'de', 'de']
True
true
null
GUI.setzen_digitale_eingaenge
(self)
Digitale Eingänge setzen
Digitale Eingänge setzen
def setzen_digitale_eingaenge(self): """ Digitale Eingänge setzen """ de_daten = [] for i in range(Konfig.DIGMAXLAENGE): de_daten = de_daten + [int(self.DE[i].get())] de_zugriff = DateiZugriff(Konfig.DIGEIN, Konfig.DIGMAXLAENGE) de_zugriff.schreiben_alle(de_daten) self.aktualisieren_eingangswerte()
[ "def", "setzen_digitale_eingaenge", "(", "self", ")", ":", "de_daten", "=", "[", "]", "for", "i", "in", "range", "(", "Konfig", ".", "DIGMAXLAENGE", ")", ":", "de_daten", "=", "de_daten", "+", "[", "int", "(", "self", ".", "DE", "[", "i", "]", ".", "get", "(", ")", ")", "]", "de_zugriff", "=", "DateiZugriff", "(", "Konfig", ".", "DIGEIN", ",", "Konfig", ".", "DIGMAXLAENGE", ")", "de_zugriff", ".", "schreiben_alle", "(", "de_daten", ")", "self", ".", "aktualisieren_eingangswerte", "(", ")" ]
[ 220, 4 ]
[ 227, 42 ]
null
python
de
['de', 'de', 'de']
True
true
null
request_multiple
(values: dict, data: StepData, name: str, save_key, ignore_testing=False)
Fragt für einen variablen Key, mehrere Male gewünschte Daten einer API ab. :param values: Werte aus der JSON-Datei :param data: Daten aus der API :param name: Testdatei, die geladen werden soll. :param save_key: Key, unter dem die Daten gespeichert werden. :param ignore_testing: Ob der Request durchgeführt werden soll, obwohl testing `true` ist.
Fragt für einen variablen Key, mehrere Male gewünschte Daten einer API ab.
def request_multiple(values: dict, data: StepData, name: str, save_key, ignore_testing=False): """Fragt für einen variablen Key, mehrere Male gewünschte Daten einer API ab. :param values: Werte aus der JSON-Datei :param data: Daten aus der API :param name: Testdatei, die geladen werden soll. :param save_key: Key, unter dem die Daten gespeichert werden. :param ignore_testing: Ob der Request durchgeführt werden soll, obwohl testing `true` ist. """ if data.get_config("testing", False) and not ignore_testing: return _load_test_data(values, data, name, save_key) if data.get_data(values.get("use_loop_as_key", False), values, bool): data.insert_data(save_key, {}, values) for _, key in data.loop_array(values["steps_value"], values): fetch(values, data, f"{save_key}|{key}") waiting_time = data.get_data(values.get("timer_between_requests", 0.0), values, float) if waiting_time > 0.0: time.sleep(waiting_time) else: data.insert_data(save_key, [None] * len(values["steps_value"]), values) for idx, _ in data.loop_array(values["steps_value"], values): fetch(values, data, f"{save_key}|{idx}", )
[ "def", "request_multiple", "(", "values", ":", "dict", ",", "data", ":", "StepData", ",", "name", ":", "str", ",", "save_key", ",", "ignore_testing", "=", "False", ")", ":", "if", "data", ".", "get_config", "(", "\"testing\"", ",", "False", ")", "and", "not", "ignore_testing", ":", "return", "_load_test_data", "(", "values", ",", "data", ",", "name", ",", "save_key", ")", "if", "data", ".", "get_data", "(", "values", ".", "get", "(", "\"use_loop_as_key\"", ",", "False", ")", ",", "values", ",", "bool", ")", ":", "data", ".", "insert_data", "(", "save_key", ",", "{", "}", ",", "values", ")", "for", "_", ",", "key", "in", "data", ".", "loop_array", "(", "values", "[", "\"steps_value\"", "]", ",", "values", ")", ":", "fetch", "(", "values", ",", "data", ",", "f\"{save_key}|{key}\"", ")", "waiting_time", "=", "data", ".", "get_data", "(", "values", ".", "get", "(", "\"timer_between_requests\"", ",", "0.0", ")", ",", "values", ",", "float", ")", "if", "waiting_time", ">", "0.0", ":", "time", ".", "sleep", "(", "waiting_time", ")", "else", ":", "data", ".", "insert_data", "(", "save_key", ",", "[", "None", "]", "*", "len", "(", "values", "[", "\"steps_value\"", "]", ")", ",", "values", ")", "for", "idx", ",", "_", "in", "data", ".", "loop_array", "(", "values", "[", "\"steps_value\"", "]", ",", "values", ")", ":", "fetch", "(", "values", ",", "data", ",", "f\"{save_key}|{idx}\"", ",", ")" ]
[ 107, 0 ]
[ 130, 54 ]
null
python
de
['de', 'de', 'de']
True
true
null
PdfGenerator.text
( self, text="", area:dict={}, attrs:dict={}, render=None, replaceNewLine=True, mode:str="text" )
r"""Einen Textabsatz einfügen dabei ``\n`` durch ``<br>`` ersetzen. Parameters ---------- text : str Der einzufügende Text area : Area {left,top,with,height} die Größe der Ausgabe attrs : dict zu ändernde id class oder Style Angaben render : bool sofort rendern oder nur zurückgeben ohne Angabe wird self.autoRender verwendet replaceNewLine : bool - True nur doppelte ``\n\n`` durch ``<br>`` ersetzten oder alle newLine ``\n`` ersetzen mode : str - text Bei angabe von `markdown` als nicht als einfachen text sondern als markdown rendern Returns ------- element_html: str HTML des erzeugten Elements
r"""Einen Textabsatz einfügen dabei ``\n`` durch ``<br>`` ersetzen.
def text( self, text="", area:dict={}, attrs:dict={}, render=None, replaceNewLine=True, mode:str="text" ): r"""Einen Textabsatz einfügen dabei ``\n`` durch ``<br>`` ersetzen. Parameters ---------- text : str Der einzufügende Text area : Area {left,top,with,height} die Größe der Ausgabe attrs : dict zu ändernde id class oder Style Angaben render : bool sofort rendern oder nur zurückgeben ohne Angabe wird self.autoRender verwendet replaceNewLine : bool - True nur doppelte ``\n\n`` durch ``<br>`` ersetzten oder alle newLine ``\n`` ersetzen mode : str - text Bei angabe von `markdown` als nicht als einfachen text sondern als markdown rendern Returns ------- element_html: str HTML des erzeugten Elements """ if mode == "markdown": return self.markdown( text, area, attrs, render ) else: return self._text( text, area, attrs, render, replaceNewLine )
[ "def", "text", "(", "self", ",", "text", "=", "\"\"", ",", "area", ":", "dict", "=", "{", "}", ",", "attrs", ":", "dict", "=", "{", "}", ",", "render", "=", "None", ",", "replaceNewLine", "=", "True", ",", "mode", ":", "str", "=", "\"text\"", ")", ":", "if", "mode", "==", "\"markdown\"", ":", "return", "self", ".", "markdown", "(", "text", ",", "area", ",", "attrs", ",", "render", ")", "else", ":", "return", "self", ".", "_text", "(", "text", ",", "area", ",", "attrs", ",", "render", ",", "replaceNewLine", ")" ]
[ 955, 4 ]
[ 981, 74 ]
null
python
de
['de', 'de', 'de']
True
true
null
XLSwriter.getvalue
(self)
return fd.getvalue()
Dateiinhalt direkt zurück geben.
Dateiinhalt direkt zurück geben.
def getvalue(self): """Dateiinhalt direkt zurück geben.""" fd = StringIO() self.book.save(fd) return fd.getvalue()
[ "def", "getvalue", "(", "self", ")", ":", "fd", "=", "StringIO", "(", ")", "self", ".", "book", ".", "save", "(", "fd", ")", "return", "fd", ".", "getvalue", "(", ")" ]
[ 52, 4 ]
[ 56, 28 ]
null
python
de
['de', 'de', 'de']
True
true
null
ispBaseWebApp.createDocs
( self, docs_path:str="", mode:str="build" )
return True
Dokumentation erzeugen oder erneuern. Parameters ---------- docs_path : str, optional Pfad nach ui-docs. The default is "". mode : str, optional Mit rebuild komplett neu erzeugen sonst nur erneuern. The default is "build". Returns ------- bool ``True`` wenn erzeugt wurde, sonst ``False``.
Dokumentation erzeugen oder erneuern.
def createDocs( self, docs_path:str="", mode:str="build" ): # pragma: no cover """Dokumentation erzeugen oder erneuern. Parameters ---------- docs_path : str, optional Pfad nach ui-docs. The default is "". mode : str, optional Mit rebuild komplett neu erzeugen sonst nur erneuern. The default is "build". Returns ------- bool ``True`` wenn erzeugt wurde, sonst ``False``. """ import sphinx.ext.apidoc as apidoc import sphinx.cmd.build as build if mode == "rebuild" and osp.isdir( docs_path ): from shutil import rmtree try: rmtree( docs_path ) except: return False # ohne docs_path vorlage aus helper/docs kopieren if not osp.isdir( docs_path ) or not osp.isdir( osp.join( docs_path, "build" ) ): # conf und _static kopieren from distutils.dir_util import copy_tree # vorlage kopieren # from_path = osp.join( osp.dirname(osp.abspath( __file__ )), "helper", "sphinx" ) if not osp.isdir( docs_path ): os.mkdir( docs_path ) # das soll eigentlich copy_tree machen os.mkdir( osp.join( docs_path, "source") ) os.mkdir( osp.join( docs_path, "source", "_ext") ) os.mkdir( osp.join( docs_path, "source", "_static") ) try: copy_tree( from_path, docs_path ) except: logger.debug( "ERROR copy_tree {} {}".format( from_path, docs_path ) ) print( "ERROR copy_tree {} {}".format( from_path, docs_path ) ) return False # original docs auch kopieren # org_docs_from_path = osp.join( self._config.get( "BASE_DIR", "") , 'docs' ) if osp.isdir( org_docs_from_path ): org_docs_to = osp.join( docs_path, "source", "docs" ) try: copy_tree( org_docs_from_path, org_docs_to ) except: logger.debug( "ERROR copy_tree {} {}".format( org_docs_from_path, docs_path ) ) # es wurde nichts angelegt - Fehlermeldung ausgeben if not osp.isdir( docs_path ): print("### createDocs no path", docs_path ) return False # ausführungs Pfad auf docs_path ändern os.chdir( docs_path ) # ---- 1. rst Dateien in source erzeugen api_cmd = [ '--force', # force '-osource/', # destdir '../', # module_path '../tests*', # exclude_pattern tests '../ui*' # weitere exclude_pattern ] apidoc.main( api_cmd ) # ---- 2. html aus rst Dateien in build erzeugen # # get project information from main version file import version as v build_cmd = [ 'source', 'build', '-Dcopyright={}'.format( v.__copyright__ ), '-Dauthor={}'.format( v.__author__ ), '-Dproject={}'.format( self._config.get("server.webserver.title", v.__project__) ), '-Dversion={}'.format( v.__version__ ), '-Drelease={}'.format( v.__version__ ) ] build.main( build_cmd ) return True
[ "def", "createDocs", "(", "self", ",", "docs_path", ":", "str", "=", "\"\"", ",", "mode", ":", "str", "=", "\"build\"", ")", ":", "# pragma: no cover", "import", "sphinx", ".", "ext", ".", "apidoc", "as", "apidoc", "import", "sphinx", ".", "cmd", ".", "build", "as", "build", "if", "mode", "==", "\"rebuild\"", "and", "osp", ".", "isdir", "(", "docs_path", ")", ":", "from", "shutil", "import", "rmtree", "try", ":", "rmtree", "(", "docs_path", ")", "except", ":", "return", "False", "# ohne docs_path vorlage aus helper/docs kopieren", "if", "not", "osp", ".", "isdir", "(", "docs_path", ")", "or", "not", "osp", ".", "isdir", "(", "osp", ".", "join", "(", "docs_path", ",", "\"build\"", ")", ")", ":", "# conf und _static kopieren", "from", "distutils", ".", "dir_util", "import", "copy_tree", "# vorlage kopieren", "#", "from_path", "=", "osp", ".", "join", "(", "osp", ".", "dirname", "(", "osp", ".", "abspath", "(", "__file__", ")", ")", ",", "\"helper\"", ",", "\"sphinx\"", ")", "if", "not", "osp", ".", "isdir", "(", "docs_path", ")", ":", "os", ".", "mkdir", "(", "docs_path", ")", "# das soll eigentlich copy_tree machen", "os", ".", "mkdir", "(", "osp", ".", "join", "(", "docs_path", ",", "\"source\"", ")", ")", "os", ".", "mkdir", "(", "osp", ".", "join", "(", "docs_path", ",", "\"source\"", ",", "\"_ext\"", ")", ")", "os", ".", "mkdir", "(", "osp", ".", "join", "(", "docs_path", ",", "\"source\"", ",", "\"_static\"", ")", ")", "try", ":", "copy_tree", "(", "from_path", ",", "docs_path", ")", "except", ":", "logger", ".", "debug", "(", "\"ERROR copy_tree {} {}\"", ".", "format", "(", "from_path", ",", "docs_path", ")", ")", "print", "(", "\"ERROR copy_tree {} {}\"", ".", "format", "(", "from_path", ",", "docs_path", ")", ")", "return", "False", "# original docs auch kopieren", "#", "org_docs_from_path", "=", "osp", ".", "join", "(", "self", ".", "_config", ".", "get", "(", "\"BASE_DIR\"", ",", "\"\"", ")", ",", "'docs'", ")", "if", "osp", ".", "isdir", "(", "org_docs_from_path", ")", ":", "org_docs_to", "=", "osp", ".", "join", "(", "docs_path", ",", "\"source\"", ",", "\"docs\"", ")", "try", ":", "copy_tree", "(", "org_docs_from_path", ",", "org_docs_to", ")", "except", ":", "logger", ".", "debug", "(", "\"ERROR copy_tree {} {}\"", ".", "format", "(", "org_docs_from_path", ",", "docs_path", ")", ")", "# es wurde nichts angelegt - Fehlermeldung ausgeben", "if", "not", "osp", ".", "isdir", "(", "docs_path", ")", ":", "print", "(", "\"### createDocs no path\"", ",", "docs_path", ")", "return", "False", "# ausführungs Pfad auf docs_path ändern", "os", ".", "chdir", "(", "docs_path", ")", "# ---- 1. rst Dateien in source erzeugen", "api_cmd", "=", "[", "'--force'", ",", "# force", "'-osource/'", ",", "# destdir", "'../'", ",", "# module_path", "'../tests*'", ",", "# exclude_pattern tests", "'../ui*'", "# weitere exclude_pattern", "]", "apidoc", ".", "main", "(", "api_cmd", ")", "# ---- 2. html aus rst Dateien in build erzeugen", "#", "# get project information from main version file", "import", "version", "as", "v", "build_cmd", "=", "[", "'source'", ",", "'build'", ",", "'-Dcopyright={}'", ".", "format", "(", "v", ".", "__copyright__", ")", ",", "'-Dauthor={}'", ".", "format", "(", "v", ".", "__author__", ")", ",", "'-Dproject={}'", ".", "format", "(", "self", ".", "_config", ".", "get", "(", "\"server.webserver.title\"", ",", "v", ".", "__project__", ")", ")", ",", "'-Dversion={}'", ".", "format", "(", "v", ".", "__version__", ")", ",", "'-Drelease={}'", ".", "format", "(", "v", ".", "__version__", ")", "]", "build", ".", "main", "(", "build_cmd", ")", "return", "True" ]
[ 792, 4 ]
[ 891, 19 ]
null
python
de
['de', 'de', 'de']
True
true
null
create_app
()
return app
Erstellt die Flask-Server-Instanz. Initialisiert die Serverkonfiguration und registriert alle Endpunkte. Wenn eine `config.py`-Datei im `instaances`-Ordner existiert, wird diese Konfiguration benutzt. :return: Eine Instanz des Servers. :rtype: Flask
Erstellt die Flask-Server-Instanz.
def create_app(): """Erstellt die Flask-Server-Instanz. Initialisiert die Serverkonfiguration und registriert alle Endpunkte. Wenn eine `config.py`-Datei im `instaances`-Ordner existiert, wird diese Konfiguration benutzt. :return: Eine Instanz des Servers. :rtype: Flask """ # create app = Flask(__name__, instance_relative_config=True, static_url_path="/") # configure the app app.config.from_mapping( SECRET_KEY='dev' ) # load the instance config, if it exists app.config.from_pyfile('config.py', silent=True) app.config['JSON_SORT_KEYS'] = False start_backend() # add js as mmetype to ensure that the content-type is correct for js files mimetypes.add_type("text/javascript", ".js") # register the blueprints app.register_blueprint(api.api, url_prefix="/visuanalytics") # serve index.html @app.route("/", methods=["GET"]) def index(): try: return render_template("index.html") except TemplateNotFound: abort(404) return app
[ "def", "create_app", "(", ")", ":", "# create", "app", "=", "Flask", "(", "__name__", ",", "instance_relative_config", "=", "True", ",", "static_url_path", "=", "\"/\"", ")", "# configure the app", "app", ".", "config", ".", "from_mapping", "(", "SECRET_KEY", "=", "'dev'", ")", "# load the instance config, if it exists", "app", ".", "config", ".", "from_pyfile", "(", "'config.py'", ",", "silent", "=", "True", ")", "app", ".", "config", "[", "'JSON_SORT_KEYS'", "]", "=", "False", "start_backend", "(", ")", "# add js as mmetype to ensure that the content-type is correct for js files", "mimetypes", ".", "add_type", "(", "\"text/javascript\"", ",", "\".js\"", ")", "# register the blueprints", "app", ".", "register_blueprint", "(", "api", ".", "api", ",", "url_prefix", "=", "\"/visuanalytics\"", ")", "# serve index.html", "@", "app", ".", "route", "(", "\"/\"", ",", "methods", "=", "[", "\"GET\"", "]", ")", "def", "index", "(", ")", ":", "try", ":", "return", "render_template", "(", "\"index.html\"", ")", "except", "TemplateNotFound", ":", "abort", "(", "404", ")", "return", "app" ]
[ 15, 0 ]
[ 54, 14 ]
null
python
de
['de', 'de', 'de']
True
true
null
GeneratorGUI.exrahiere_optionen
( self)
return opt
Liste mit Dictionaries für Funktionsgeneratoroptionen erstellen
Liste mit Dictionaries für Funktionsgeneratoroptionen erstellen
def exrahiere_optionen( self): """ Liste mit Dictionaries für Funktionsgeneratoroptionen erstellen """ opt = [] for i in range(Konfig.ANAMAXLAENGE): eintrag = { "aktiviert": self.aktiviert[i].get(), "signalform":self.signalform[i].get(), "amplitude": self.amplitude[i].get(),"pdauer":self.pdauer[i].get()} opt = opt + [eintrag] return opt
[ "def", "exrahiere_optionen", "(", "self", ")", ":", "opt", "=", "[", "]", "for", "i", "in", "range", "(", "Konfig", ".", "ANAMAXLAENGE", ")", ":", "eintrag", "=", "{", "\"aktiviert\"", ":", "self", ".", "aktiviert", "[", "i", "]", ".", "get", "(", ")", ",", "\"signalform\"", ":", "self", ".", "signalform", "[", "i", "]", ".", "get", "(", ")", ",", "\"amplitude\"", ":", "self", ".", "amplitude", "[", "i", "]", ".", "get", "(", ")", ",", "\"pdauer\"", ":", "self", ".", "pdauer", "[", "i", "]", ".", "get", "(", ")", "}", "opt", "=", "opt", "+", "[", "eintrag", "]", "return", "opt" ]
[ 106, 4 ]
[ 112, 18 ]
null
python
de
['de', 'de', 'de']
True
true
null
HeistSystem.Writemessage_StakeBelowMinimum
(self, data)
return
Schreibt die Benachrichtigung über einen negativen Einsatz in den Chat
Schreibt die Benachrichtigung über einen negativen Einsatz in den Chat
def Writemessage_StakeBelowMinimum(self, data): ''' Schreibt die Benachrichtigung über einen negativen Einsatz in den Chat ''' thisActionName = "Writemessage_StakeBelowMinimum" # Benachrichtigung aus der Datenbank auslesen messageText = self.RandomMessage_ByType( messageType=self.MessageType_StakeBelowMinimum ) # Nachricht in den Chat schreiben self.chat_WriteTextMessage( messageText=str(messageText).format( user=data.UserName, minpoints=TransformLocale_Decimals( self.Settings.Game_Settings_MinStake), pointsname=self.Parent.GetCurrencyName() ) ) return
[ "def", "Writemessage_StakeBelowMinimum", "(", "self", ",", "data", ")", ":", "thisActionName", "=", "\"Writemessage_StakeBelowMinimum\"", "# Benachrichtigung aus der Datenbank auslesen\r", "messageText", "=", "self", ".", "RandomMessage_ByType", "(", "messageType", "=", "self", ".", "MessageType_StakeBelowMinimum", ")", "# Nachricht in den Chat schreiben\r", "self", ".", "chat_WriteTextMessage", "(", "messageText", "=", "str", "(", "messageText", ")", ".", "format", "(", "user", "=", "data", ".", "UserName", ",", "minpoints", "=", "TransformLocale_Decimals", "(", "self", ".", "Settings", ".", "Game_Settings_MinStake", ")", ",", "pointsname", "=", "self", ".", "Parent", ".", "GetCurrencyName", "(", ")", ")", ")", "return" ]
[ 1257, 4 ]
[ 1276, 14 ]
null
python
de
['de', 'de', 'de']
True
true
null
PdfGenerator._get_attrs
(self, attrs:dict={})
return _id, _class, _style
Erzeugt aus einem attrs Element id, class und style Angaben. Bei Angabe von class und id wird zugeordnet. Alles andere wird als style verwendet Parameters ---------- attrs : dict, optional dict mit Attributen. The default is {}. Returns ------- _id : str Die *id* Angabe aus attrs _class : str Die *class* Angabe aus attrs. _style : str Die *style* Angabe aus attrs.
Erzeugt aus einem attrs Element id, class und style Angaben.
def _get_attrs(self, attrs:dict={}): """Erzeugt aus einem attrs Element id, class und style Angaben. Bei Angabe von class und id wird zugeordnet. Alles andere wird als style verwendet Parameters ---------- attrs : dict, optional dict mit Attributen. The default is {}. Returns ------- _id : str Die *id* Angabe aus attrs _class : str Die *class* Angabe aus attrs. _style : str Die *style* Angabe aus attrs. """ _id = "" _class = "" _style = "" for key, value in attrs.items(): if key == "id": _id = value elif key == "class": _class = value else: _style += str(key) + ":" + str(value) + ";" return _id, _class, _style
[ "def", "_get_attrs", "(", "self", ",", "attrs", ":", "dict", "=", "{", "}", ")", ":", "_id", "=", "\"\"", "_class", "=", "\"\"", "_style", "=", "\"\"", "for", "key", ",", "value", "in", "attrs", ".", "items", "(", ")", ":", "if", "key", "==", "\"id\"", ":", "_id", "=", "value", "elif", "key", "==", "\"class\"", ":", "_class", "=", "value", "else", ":", "_style", "+=", "str", "(", "key", ")", "+", "\":\"", "+", "str", "(", "value", ")", "+", "\";\"", "return", "_id", ",", "_class", ",", "_style" ]
[ 724, 4 ]
[ 755, 34 ]
null
python
de
['de', 'de', 'de']
True
true
null
ariaClass.getMLC
( self, RadiationSer )
DynMLCPlan hat mehrere ControlPoints, deshalb nur ControlPointIndex=0 verwenden Nur die inneren 10 leafs verwenden um die Art zu bestimmen
DynMLCPlan hat mehrere ControlPoints, deshalb nur ControlPointIndex=0 verwenden Nur die inneren 10 leafs verwenden um die Art zu bestimmen
def getMLC( self, RadiationSer ): """DynMLCPlan hat mehrere ControlPoints, deshalb nur ControlPointIndex=0 verwenden Nur die inneren 10 leafs verwenden um die Art zu bestimmen """ sql = """ SELECT TOP 1000 [ControlPointSer] ,[RadiationSer] ,[ControlPointIndex] ,[ControlPointType] ,[MetersetWeight] ,[NominalEnergy] ,[CollX1] ,[CollX2] ,[CollY1] ,[CollY2] ,[GantryRtn] ,[CollRtn] ,[OffPlaneAngle] ,[PlanPosLeaf25A] AS [A-25] ,[PlanPosLeaf26A] AS [A-26] ,[PlanPosLeaf27A] AS [A-27] ,[PlanPosLeaf28A] AS [A-29] ,[PlanPosLeaf29A] AS [A-29] ,[PlanPosLeaf30A] AS [A-30] ,[PlanPosLeaf31A] AS [A-31] ,[PlanPosLeaf32A] AS [A-32] ,[PlanPosLeaf33A] AS [A-33] ,[PlanPosLeaf34A] AS [A-34] ,[PlanPosLeaf25B] AS [B-25] ,[PlanPosLeaf26B] AS [B-26] ,[PlanPosLeaf27B] AS [B-27] ,[PlanPosLeaf28B] AS [B-28] ,[PlanPosLeaf29B] AS [B-29] ,[PlanPosLeaf30B] AS [B-30] ,[PlanPosLeaf31B] AS [B-31] ,[PlanPosLeaf32B] AS [B-32] ,[PlanPosLeaf33B] AS [B-33] ,[PlanPosLeaf34B] AS [B-34] ,[PatientSupportAngle] ,[IsoCenterPositionX] ,[IsoCenterPositionY] ,[IsoCenterPositionZ] FROM [variansystem].[dbo].[ControlPoint] WHERE [ControlPointIndex] = 0 AND RadiationSer = {rs:d} """ mlc = self.execute( sql.format( rs=RadiationSer) ) if len(mlc) > 0: return mlc[0] else: return None
[ "def", "getMLC", "(", "self", ",", "RadiationSer", ")", ":", "sql", "=", "\"\"\"\nSELECT TOP 1000 [ControlPointSer]\n ,[RadiationSer]\n ,[ControlPointIndex]\n ,[ControlPointType]\n ,[MetersetWeight]\n ,[NominalEnergy]\n ,[CollX1]\n ,[CollX2]\n ,[CollY1]\n ,[CollY2]\n ,[GantryRtn]\n ,[CollRtn]\n ,[OffPlaneAngle]\n ,[PlanPosLeaf25A] AS [A-25]\n ,[PlanPosLeaf26A] AS [A-26]\n ,[PlanPosLeaf27A] AS [A-27]\n ,[PlanPosLeaf28A] AS [A-29]\n ,[PlanPosLeaf29A] AS [A-29]\n ,[PlanPosLeaf30A] AS [A-30]\n ,[PlanPosLeaf31A] AS [A-31]\n ,[PlanPosLeaf32A] AS [A-32]\n ,[PlanPosLeaf33A] AS [A-33]\n ,[PlanPosLeaf34A] AS [A-34]\n ,[PlanPosLeaf25B] AS [B-25]\n ,[PlanPosLeaf26B] AS [B-26]\n ,[PlanPosLeaf27B] AS [B-27]\n ,[PlanPosLeaf28B] AS [B-28]\n ,[PlanPosLeaf29B] AS [B-29]\n ,[PlanPosLeaf30B] AS [B-30]\n ,[PlanPosLeaf31B] AS [B-31]\n ,[PlanPosLeaf32B] AS [B-32]\n ,[PlanPosLeaf33B] AS [B-33]\n ,[PlanPosLeaf34B] AS [B-34]\n ,[PatientSupportAngle]\n ,[IsoCenterPositionX]\n ,[IsoCenterPositionY]\n ,[IsoCenterPositionZ]\n FROM [variansystem].[dbo].[ControlPoint]\n WHERE [ControlPointIndex] = 0\n AND RadiationSer = {rs:d}\n \"\"\"", "mlc", "=", "self", ".", "execute", "(", "sql", ".", "format", "(", "rs", "=", "RadiationSer", ")", ")", "if", "len", "(", "mlc", ")", ">", "0", ":", "return", "mlc", "[", "0", "]", "else", ":", "return", "None" ]
[ 67, 4 ]
[ 118, 23 ]
null
python
de
['de', 'de', 'de']
True
true
null
split_string
(values: dict, data: StepData)
Teilt einen String am angegebenen Trennzeichen. Das Trennzeichen können auch mehrere Zeichen sein. Soll die Groß- und Kleinschreibung des Trennzeichens (delimiter) ignoriert werden, setzte `ignore_case` auf `true`. :param values: Werte aus der JSON-Datei :param data: Daten aus der API :return:
Teilt einen String am angegebenen Trennzeichen.
def split_string(values: dict, data: StepData): """Teilt einen String am angegebenen Trennzeichen. Das Trennzeichen können auch mehrere Zeichen sein. Soll die Groß- und Kleinschreibung des Trennzeichens (delimiter) ignoriert werden, setzte `ignore_case` auf `true`. :param values: Werte aus der JSON-Datei :param data: Daten aus der API :return: """ for idx, key in data.loop_key(values["keys"], values): value = data.get_data(key, values) delimiter = data.format(values.get("delimiter", " "), values) new_key = get_new_keys(values, idx) if data.get_data(values.get("ignore_case", False), values, bool): new_value = re.split(delimiter, value, flags=re.IGNORECASE) else: new_value = re.split(delimiter, value) data.insert_data(new_key, new_value, values)
[ "def", "split_string", "(", "values", ":", "dict", ",", "data", ":", "StepData", ")", ":", "for", "idx", ",", "key", "in", "data", ".", "loop_key", "(", "values", "[", "\"keys\"", "]", ",", "values", ")", ":", "value", "=", "data", ".", "get_data", "(", "key", ",", "values", ")", "delimiter", "=", "data", ".", "format", "(", "values", ".", "get", "(", "\"delimiter\"", ",", "\" \"", ")", ",", "values", ")", "new_key", "=", "get_new_keys", "(", "values", ",", "idx", ")", "if", "data", ".", "get_data", "(", "values", ".", "get", "(", "\"ignore_case\"", ",", "False", ")", ",", "values", ",", "bool", ")", ":", "new_value", "=", "re", ".", "split", "(", "delimiter", ",", "value", ",", "flags", "=", "re", ".", "IGNORECASE", ")", "else", ":", "new_value", "=", "re", ".", "split", "(", "delimiter", ",", "value", ")", "data", ".", "insert_data", "(", "new_key", ",", "new_value", ",", "values", ")" ]
[ 748, 0 ]
[ 766, 52 ]
null
python
de
['de', 'de', 'de']
True
true
null
Geometry.copy_line
(self, center, radius, start_angle, end_angle, start_line, end_line, inner_circle, outer_circle, e, rtol=1e-04, atol=1e-04, points_inner=None, points_outer=None)
return new_elements
Die Funktion kopiert die Teile einer Linie, welche sich in der durch die Parameter definierten Teilkreisfläche befinden.
Die Funktion kopiert die Teile einer Linie, welche sich in der durch die Parameter definierten Teilkreisfläche befinden.
def copy_line(self, center, radius, start_angle, end_angle, start_line, end_line, inner_circle, outer_circle, e, rtol=1e-04, atol=1e-04, points_inner=None, points_outer=None): """ Die Funktion kopiert die Teile einer Linie, welche sich in der durch die Parameter definierten Teilkreisfläche befinden. """ assert(isinstance(e, Line)) if is_same_angle(start_angle, end_angle): pts_inner = inner_circle.intersect_line(e, rtol, atol, False) pts_outer = outer_circle.intersect_line(e, rtol, atol, False) points = pts_inner + pts_outer + [e.p2] else: pts_start = e.intersect_line(start_line, rtol, atol, False) pts_end = e.intersect_line(end_line, rtol, atol, False) pts_inner = inner_circle.intersect_line(e, rtol, atol, False) pts_outer = outer_circle.intersect_line(e, rtol, atol, False) points = pts_start + pts_end + \ pts_inner + pts_outer + [e.p2] if points_inner is not None and pts_inner: points_inner += pts_inner if points_outer is not None and pts_outer: points_outer += pts_outer new_elements = [] sorted_points = [] for p in points: dist = distance(e.p1, p) sorted_points.append((dist, p)) sorted_points.sort() p1 = e.p1 for x, p2 in sorted_points: pm = middle_point_of_line(p1, p2) if is_point_inside_region(pm, center, inner_circle.radius, outer_circle.radius, start_angle, end_angle): new_elements.append(Line(Element(start=p1, end=p2))) p1 = p2 return new_elements
[ "def", "copy_line", "(", "self", ",", "center", ",", "radius", ",", "start_angle", ",", "end_angle", ",", "start_line", ",", "end_line", ",", "inner_circle", ",", "outer_circle", ",", "e", ",", "rtol", "=", "1e-04", ",", "atol", "=", "1e-04", ",", "points_inner", "=", "None", ",", "points_outer", "=", "None", ")", ":", "assert", "(", "isinstance", "(", "e", ",", "Line", ")", ")", "if", "is_same_angle", "(", "start_angle", ",", "end_angle", ")", ":", "pts_inner", "=", "inner_circle", ".", "intersect_line", "(", "e", ",", "rtol", ",", "atol", ",", "False", ")", "pts_outer", "=", "outer_circle", ".", "intersect_line", "(", "e", ",", "rtol", ",", "atol", ",", "False", ")", "points", "=", "pts_inner", "+", "pts_outer", "+", "[", "e", ".", "p2", "]", "else", ":", "pts_start", "=", "e", ".", "intersect_line", "(", "start_line", ",", "rtol", ",", "atol", ",", "False", ")", "pts_end", "=", "e", ".", "intersect_line", "(", "end_line", ",", "rtol", ",", "atol", ",", "False", ")", "pts_inner", "=", "inner_circle", ".", "intersect_line", "(", "e", ",", "rtol", ",", "atol", ",", "False", ")", "pts_outer", "=", "outer_circle", ".", "intersect_line", "(", "e", ",", "rtol", ",", "atol", ",", "False", ")", "points", "=", "pts_start", "+", "pts_end", "+", "pts_inner", "+", "pts_outer", "+", "[", "e", ".", "p2", "]", "if", "points_inner", "is", "not", "None", "and", "pts_inner", ":", "points_inner", "+=", "pts_inner", "if", "points_outer", "is", "not", "None", "and", "pts_outer", ":", "points_outer", "+=", "pts_outer", "new_elements", "=", "[", "]", "sorted_points", "=", "[", "]", "for", "p", "in", "points", ":", "dist", "=", "distance", "(", "e", ".", "p1", ",", "p", ")", "sorted_points", ".", "append", "(", "(", "dist", ",", "p", ")", ")", "sorted_points", ".", "sort", "(", ")", "p1", "=", "e", ".", "p1", "for", "x", ",", "p2", "in", "sorted_points", ":", "pm", "=", "middle_point_of_line", "(", "p1", ",", "p2", ")", "if", "is_point_inside_region", "(", "pm", ",", "center", ",", "inner_circle", ".", "radius", ",", "outer_circle", ".", "radius", ",", "start_angle", ",", "end_angle", ")", ":", "new_elements", ".", "append", "(", "Line", "(", "Element", "(", "start", "=", "p1", ",", "end", "=", "p2", ")", ")", ")", "p1", "=", "p2", "return", "new_elements" ]
[ 1561, 4 ]
[ 1624, 27 ]
null
python
de
['de', 'de', 'de']
True
true
null
PdfGenerator.set_class_defaults
(self)
Setzt default Werte für die Klasse. alle _variables Angaben können in config.json im Abschnitt variables überschrieben werden
Setzt default Werte für die Klasse.
def set_class_defaults(self): """Setzt default Werte für die Klasse. alle _variables Angaben können in config.json im Abschnitt variables überschrieben werden """ self.OVERLAY_STYLE = '' self.PAGE_STYLE = '' self.template = """ <!doctype html> <html> <head> <title>{title}</title> <meta charset="utf-8"> {meta} <style>{style}</style> {head} </head> <body> {body} </body> </html> """ self.title = '{{Titel}} - {{Betreff}}' self.meta = '' self.style = 'body{ padding:5mm 0 5mm 0; }' self.head = '' self.header_html = '' self.footer_html = '' self._variables = { "page" : { "size" : "A4 portrait", # a4 = 210x297 width=172.5 (soll 180) "left" : 20, # LibreOffice Vorlage ISP 22.5, "right" : 9, # LibreOffice Vorlage ISP 15.0, "top" : 7.5, "bottom" : 6.0, "header" : 12, "footer": 5, "header-margin": 4, "footer-margin": 2 }, "resources": "{{BASE_DIR}}/resources", # Pfad zu den resources mit CSS und Fonts Angaben MPDF verwendet font format("svg") "logo": "logo.png", "path" : "{{BASE_DIR}}/files", "filename": "noname.pdf", "Klinik" : "", "Abteilung" : "", "Titel" : "", "Betreff" : "", "Schlüsselwörter" : "", "Kommentare" : "", "Datenausgabe" : "{{ now.strftime('%d.%m.%Y') }}", "Erstelldatum" :"", "Erstellt_von" : "", "Geprüft_von" : "", "Gültig_ab" : "", "Version" : "", "Freigegeben_von" : "" } # defaults setzen self.pageContent = {} self.contentName = "_" self.overlayContent = {} self.autoRender = True self.pandas_table_id = 0 self.isRendered = False
[ "def", "set_class_defaults", "(", "self", ")", ":", "self", ".", "OVERLAY_STYLE", "=", "''", "self", ".", "PAGE_STYLE", "=", "''", "self", ".", "template", "=", "\"\"\"\n <!doctype html>\n <html>\n <head>\n <title>{title}</title>\n <meta charset=\"utf-8\">\n {meta}\n <style>{style}</style>\n {head}\n </head>\n <body>\n {body}\n </body>\n </html>\n \"\"\"", "self", ".", "title", "=", "'{{Titel}} - {{Betreff}}'", "self", ".", "meta", "=", "''", "self", ".", "style", "=", "'body{ padding:5mm 0 5mm 0; }'", "self", ".", "head", "=", "''", "self", ".", "header_html", "=", "''", "self", ".", "footer_html", "=", "''", "self", ".", "_variables", "=", "{", "\"page\"", ":", "{", "\"size\"", ":", "\"A4 portrait\"", ",", "# a4 = 210x297 width=172.5 (soll 180)", "\"left\"", ":", "20", ",", "# LibreOffice Vorlage ISP 22.5,", "\"right\"", ":", "9", ",", "# LibreOffice Vorlage ISP 15.0,", "\"top\"", ":", "7.5", ",", "\"bottom\"", ":", "6.0", ",", "\"header\"", ":", "12", ",", "\"footer\"", ":", "5", ",", "\"header-margin\"", ":", "4", ",", "\"footer-margin\"", ":", "2", "}", ",", "\"resources\"", ":", "\"{{BASE_DIR}}/resources\"", ",", "# Pfad zu den resources mit CSS und Fonts Angaben MPDF verwendet font format(\"svg\")", "\"logo\"", ":", "\"logo.png\"", ",", "\"path\"", ":", "\"{{BASE_DIR}}/files\"", ",", "\"filename\"", ":", "\"noname.pdf\"", ",", "\"Klinik\"", ":", "\"\"", ",", "\"Abteilung\"", ":", "\"\"", ",", "\"Titel\"", ":", "\"\"", ",", "\"Betreff\"", ":", "\"\"", ",", "\"Schlüsselwörter\" :", "\"", ",", "", "\"Kommentare\"", ":", "\"\"", ",", "\"Datenausgabe\"", ":", "\"{{ now.strftime('%d.%m.%Y') }}\"", ",", "\"Erstelldatum\"", ":", "\"\"", ",", "\"Erstellt_von\"", ":", "\"\"", ",", "\"Geprüft_von\" ", " ", "\",", "", "\"Gültig_ab\" ", " ", "\",", "", "\"Version\"", ":", "\"\"", ",", "\"Freigegeben_von\"", ":", "\"\"", "}", "# defaults setzen", "self", ".", "pageContent", "=", "{", "}", "self", ".", "contentName", "=", "\"_\"", "self", ".", "overlayContent", "=", "{", "}", "self", ".", "autoRender", "=", "True", "self", ".", "pandas_table_id", "=", "0", "self", ".", "isRendered", "=", "False" ]
[ 232, 4 ]
[ 301, 31 ]
null
python
de
['de', 'de', 'de']
True
true
null
ispDicom.archive_loadSOPInstanceUID
( self, SOPInstanceUID )
return ds
Lädt eine Dicomdatei mit SOPInstanceUID aus dem Archiv Parameters ---------- SOPInstanceUID : str Eine SOPInstanceUID. Returns ------- ds : TYPE DESCRIPTION.
Lädt eine Dicomdatei mit SOPInstanceUID aus dem Archiv
def archive_loadSOPInstanceUID( self, SOPInstanceUID ): """Lädt eine Dicomdatei mit SOPInstanceUID aus dem Archiv Parameters ---------- SOPInstanceUID : str Eine SOPInstanceUID. Returns ------- ds : TYPE DESCRIPTION. """ ds = None exists, filename = self.archive_hasSOPInstanceUID( SOPInstanceUID ) if exists: try: # mit force True einlesen um trotz fehlender headerdaten einzulesen ds = dcmread(filename, force=True) except: # pragma: no cover # alle sonstigen Fehler abfangen logger.error("Fehler beim lesen der DICOM Datei") pass return ds
[ "def", "archive_loadSOPInstanceUID", "(", "self", ",", "SOPInstanceUID", ")", ":", "ds", "=", "None", "exists", ",", "filename", "=", "self", ".", "archive_hasSOPInstanceUID", "(", "SOPInstanceUID", ")", "if", "exists", ":", "try", ":", "# mit force True einlesen um trotz fehlender headerdaten einzulesen", "ds", "=", "dcmread", "(", "filename", ",", "force", "=", "True", ")", "except", ":", "# pragma: no cover", "# alle sonstigen Fehler abfangen", "logger", ".", "error", "(", "\"Fehler beim lesen der DICOM Datei\"", ")", "pass", "return", "ds" ]
[ 1074, 4 ]
[ 1101, 17 ]
null
python
de
['de', 'de', 'de']
True
true
null
open_temp_resource
(path: str, pipeline_id: str, mode: str = "rt")
return open_resource(os.path.join(TEMP_LOCATION, pipeline_id, path), mode)
Öffnet die übergebene Temp-Ressource. Verwendet :func:`get_temp_resource_path` :param pipeline_id: id der Pipeline von der die Funktion aufgerufen wurde. :param path: Pfad zur Resource, relativ zum `resources`-Ordner. :param mode: Mode zum Öffnen der Datei siehe :func:`open`.
Öffnet die übergebene Temp-Ressource.
def open_temp_resource(path: str, pipeline_id: str, mode: str = "rt"): """Öffnet die übergebene Temp-Ressource. Verwendet :func:`get_temp_resource_path` :param pipeline_id: id der Pipeline von der die Funktion aufgerufen wurde. :param path: Pfad zur Resource, relativ zum `resources`-Ordner. :param mode: Mode zum Öffnen der Datei siehe :func:`open`. """ return open_resource(os.path.join(TEMP_LOCATION, pipeline_id, path), mode)
[ "def", "open_temp_resource", "(", "path", ":", "str", ",", "pipeline_id", ":", "str", ",", "mode", ":", "str", "=", "\"rt\"", ")", ":", "return", "open_resource", "(", "os", ".", "path", ".", "join", "(", "TEMP_LOCATION", ",", "pipeline_id", ",", "path", ")", ",", "mode", ")" ]
[ 266, 0 ]
[ 276, 78 ]
null
python
de
['de', 'de', 'de']
True
true
null
convert_character
(string)
return "".join(result)
Konvertierung von nicht-numerischen Zeichen in einer IBAN
Konvertierung von nicht-numerischen Zeichen in einer IBAN
def convert_character(string): """Konvertierung von nicht-numerischen Zeichen in einer IBAN""" result = [] for character in string: if character.isdigit(): result.append(character) else: result.append(str(ascii_uppercase.index(character) + 10)) return "".join(result)
[ "def", "convert_character", "(", "string", ")", ":", "result", "=", "[", "]", "for", "character", "in", "string", ":", "if", "character", ".", "isdigit", "(", ")", ":", "result", ".", "append", "(", "character", ")", "else", ":", "result", ".", "append", "(", "str", "(", "ascii_uppercase", ".", "index", "(", "character", ")", "+", "10", ")", ")", "return", "\"\"", ".", "join", "(", "result", ")" ]
[ 12, 0 ]
[ 21, 26 ]
null
python
de
['de', 'de', 'de']
True
true
null
TNavigator._go
(self, distance)
Bewegt die Turtle um distance nach vorne
Bewegt die Turtle um distance nach vorne
def _go(self, distance): """Bewegt die Turtle um distance nach vorne""" ende = self._position + self._orient * distance self._goto(ende)
[ "def", "_go", "(", "self", ",", "distance", ")", ":", "ende", "=", "self", ".", "_position", "+", "self", ".", "_orient", "*", "distance", "self", ".", "_goto", "(", "ende", ")" ]
[ 1128, 4 ]
[ 1131, 24 ]
null
python
de
['de', 'de', 'de']
True
true
null
close_con_f
(e=None)
Schließt DB-Verbindung innerhalb des Flask-Kontexts.
Schließt DB-Verbindung innerhalb des Flask-Kontexts.
def close_con_f(e=None): """ Schließt DB-Verbindung innerhalb des Flask-Kontexts. """ db = flask.g.pop('db', None) if db is not None: db.close()
[ "def", "close_con_f", "(", "e", "=", "None", ")", ":", "db", "=", "flask", ".", "g", ".", "pop", "(", "'db'", ",", "None", ")", "if", "db", "is", "not", "None", ":", "db", ".", "close", "(", ")" ]
[ 40, 0 ]
[ 45, 18 ]
null
python
de
['de', 'de', 'de']
True
true
null
DicomImage.getFieldRoi
( self )
return self.array[ da["Y1"]:da["Y2"], da["X1"]:da["X2"] ]
holt region of interest der Feldgröße aus image.array
holt region of interest der Feldgröße aus image.array
def getFieldRoi( self ): """ holt region of interest der Feldgröße aus image.array """ da = self.getFieldDots( ) return self.array[ da["Y1"]:da["Y2"], da["X1"]:da["X2"] ]
[ "def", "getFieldRoi", "(", "self", ")", ":", "da", "=", "self", ".", "getFieldDots", "(", ")", "return", "self", ".", "array", "[", "da", "[", "\"Y1\"", "]", ":", "da", "[", "\"Y2\"", "]", ",", "da", "[", "\"X1\"", "]", ":", "da", "[", "\"X2\"", "]", "]" ]
[ 635, 4 ]
[ 640, 65 ]
null
python
de
['de', 'de', 'de']
True
true
null
translate
(values: dict, data: StepData)
Setzt den Wert eines Keys zu einem neuen Key als Wert für die JSON. :param values: Werte aus der JSON-Datei :param data: Daten aus der API
Setzt den Wert eines Keys zu einem neuen Key als Wert für die JSON.
def translate(values: dict, data: StepData): """Setzt den Wert eines Keys zu einem neuen Key als Wert für die JSON. :param values: Werte aus der JSON-Datei :param data: Daten aus der API """ for idx, key in data.loop_key(values["keys"], values): value = str(data.get_data(key, values)) new_key = get_new_keys(values, idx) translation = data.get_data(values["dict"], values, dict) new_value = data.format(translation[value], values) data.insert_data(new_key, new_value, values)
[ "def", "translate", "(", "values", ":", "dict", ",", "data", ":", "StepData", ")", ":", "for", "idx", ",", "key", "in", "data", ".", "loop_key", "(", "values", "[", "\"keys\"", "]", ",", "values", ")", ":", "value", "=", "str", "(", "data", ".", "get_data", "(", "key", ",", "values", ")", ")", "new_key", "=", "get_new_keys", "(", "values", ",", "idx", ")", "translation", "=", "data", ".", "get_data", "(", "values", "[", "\"dict\"", "]", ",", "values", ",", "dict", ")", "new_value", "=", "data", ".", "format", "(", "translation", "[", "value", "]", ",", "values", ")", "data", ".", "insert_data", "(", "new_key", ",", "new_value", ",", "values", ")" ]
[ 225, 0 ]
[ 237, 52 ]
null
python
de
['de', 'de', 'de']
True
true
null
DateiZugriff.__init__
(self, dateiname, max_laenge)
Konstruktor des Datenzugriffs auf die Austauschdateien
Konstruktor des Datenzugriffs auf die Austauschdateien
def __init__(self, dateiname, max_laenge): """ Konstruktor des Datenzugriffs auf die Austauschdateien """ self.dateiname = dateiname self.max_laenge = max_laenge
[ "def", "__init__", "(", "self", ",", "dateiname", ",", "max_laenge", ")", ":", "self", ".", "dateiname", "=", "dateiname", "self", ".", "max_laenge", "=", "max_laenge" ]
[ 15, 4 ]
[ 18, 36 ]
null
python
de
['de', 'de', 'de']
True
true
null
retry_on_failure
(retries=10)
return retry_function
Decorator zum Errorhandling beim Ausführen einer Methode im Loop. Timeout's, wie beispiel bei Serverüberlastung, werden ignoriert. :param retries: Anzahl der Wiederholungsversuche, bevor abgebrochen wird. :return:
Decorator zum Errorhandling beim Ausführen einer Methode im Loop. Timeout's, wie beispiel bei Serverüberlastung, werden ignoriert.
def retry_on_failure(retries=10): """Decorator zum Errorhandling beim Ausführen einer Methode im Loop. Timeout's, wie beispiel bei Serverüberlastung, werden ignoriert. :param retries: Anzahl der Wiederholungsversuche, bevor abgebrochen wird. :return: """ def retry_function(function): def wrapper(self, *args, **kwargs): total_rounds = retries rounds = total_rounds while rounds > 0: r = total_rounds - rounds + 1 try: return function(self, *args, **kwargs) except (TimeoutError, ReadTimeout): # ein Timeout-Error kann passieren, # wenn die Server überlastet sind sind # hier erfolgt ein Timeout-Error meist, # wenn die Cookies abgelaufen sind self.log.error("Timeout exception raised", prefix=function.__name__) if function.__name__ != "renew_cookies": self.renew_cookies() except (ConnectTimeout, ConnectionError): # Keine Internetverbindung self.log.error("Connection exception | Es besteht keine Internetverbindung," "erneuter Versuch in 30 Sekunden", prefix=function.__name__) time.sleep(30) except JSONDecodeError: # die API gibt eine nicht-JSON-Response, # wenn die IP (temporär) gebannt ist, oder die Website # sich im Wartungsmodus befindet self.log.error("JSON parsing exception | IP gebannt oder Website down, " "erneuter Versuch in 30 Sekunden", prefix=function.__name__) time.sleep(30) # Cookies erneuern bei der Terminsuche if function.__name__ == "terminsuche": self.renew_cookies() except Exception as e: exc = type(e).__name__ self.log.error(f"{exc} exception raised - retry {r}", prefix=function.__name__) if rounds == 1: err = "\n".join( x.strip() for x in traceback.format_exc().splitlines()[-3:]) self.log.error(err) return False rounds -= 1 return False return wrapper return retry_function
[ "def", "retry_on_failure", "(", "retries", "=", "10", ")", ":", "def", "retry_function", "(", "function", ")", ":", "def", "wrapper", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "total_rounds", "=", "retries", "rounds", "=", "total_rounds", "while", "rounds", ">", "0", ":", "r", "=", "total_rounds", "-", "rounds", "+", "1", "try", ":", "return", "function", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "except", "(", "TimeoutError", ",", "ReadTimeout", ")", ":", "# ein Timeout-Error kann passieren,", "# wenn die Server überlastet sind sind", "# hier erfolgt ein Timeout-Error meist,", "# wenn die Cookies abgelaufen sind", "self", ".", "log", ".", "error", "(", "\"Timeout exception raised\"", ",", "prefix", "=", "function", ".", "__name__", ")", "if", "function", ".", "__name__", "!=", "\"renew_cookies\"", ":", "self", ".", "renew_cookies", "(", ")", "except", "(", "ConnectTimeout", ",", "ConnectionError", ")", ":", "# Keine Internetverbindung", "self", ".", "log", ".", "error", "(", "\"Connection exception | Es besteht keine Internetverbindung,\"", "\"erneuter Versuch in 30 Sekunden\"", ",", "prefix", "=", "function", ".", "__name__", ")", "time", ".", "sleep", "(", "30", ")", "except", "JSONDecodeError", ":", "# die API gibt eine nicht-JSON-Response,", "# wenn die IP (temporär) gebannt ist, oder die Website", "# sich im Wartungsmodus befindet", "self", ".", "log", ".", "error", "(", "\"JSON parsing exception | IP gebannt oder Website down, \"", "\"erneuter Versuch in 30 Sekunden\"", ",", "prefix", "=", "function", ".", "__name__", ")", "time", ".", "sleep", "(", "30", ")", "# Cookies erneuern bei der Terminsuche", "if", "function", ".", "__name__", "==", "\"terminsuche\"", ":", "self", ".", "renew_cookies", "(", ")", "except", "Exception", "as", "e", ":", "exc", "=", "type", "(", "e", ")", ".", "__name__", "self", ".", "log", ".", "error", "(", "f\"{exc} exception raised - retry {r}\"", ",", "prefix", "=", "function", ".", "__name__", ")", "if", "rounds", "==", "1", ":", "err", "=", "\"\\n\"", ".", "join", "(", "x", ".", "strip", "(", ")", "for", "x", "in", "traceback", ".", "format_exc", "(", ")", ".", "splitlines", "(", ")", "[", "-", "3", ":", "]", ")", "self", ".", "log", ".", "error", "(", "err", ")", "return", "False", "rounds", "-=", "1", "return", "False", "return", "wrapper", "return", "retry_function" ]
[ 16, 0 ]
[ 79, 25 ]
null
python
de
['de', 'de', 'de']
True
true
null
Jahr.__repr__
(self)
return f"<Jahr {self.jahr} {self.zeitrechnung.kuerzel}>"
Ein Representations-String fuer das Objekt
Ein Representations-String fuer das Objekt
def __repr__(self): """Ein Representations-String fuer das Objekt""" return f"<Jahr {self.jahr} {self.zeitrechnung.kuerzel}>"
[ "def", "__repr__", "(", "self", ")", ":", "return", "f\"<Jahr {self.jahr} {self.zeitrechnung.kuerzel}>\"" ]
[ 26, 4 ]
[ 28, 64 ]
null
python
de
['de', 'de', 'de']
True
true
null
ispBase.__init__
( self, config=None, variables={}, dicomData={} )
Initialisiert die Klasse und stellt icons und pdf bereit Parameters ---------- config: Die aktuelle config variables : TYPE, optional Metadaten aus config.metadata. The default is {}. dicomData : TYPE, optional Zu bearbeitende Dicomdaten. The default is {}. Returns ------- None.
Initialisiert die Klasse und stellt icons und pdf bereit
def __init__( self, config=None, variables={}, dicomData={} ): """Initialisiert die Klasse und stellt icons und pdf bereit Parameters ---------- config: Die aktuelle config variables : TYPE, optional Metadaten aus config.metadata. The default is {}. dicomData : TYPE, optional Zu bearbeitende Dicomdaten. The default is {}. Returns ------- None. """ #self.pdf: str = None self.fileCount = 0 self._config = config # mit einer Kopie der Metadaten als DotMap arbeiten self.metadata = dict_merge( DotMap( { "manual": { "filename": variables.testConfig.info["anleitung"], "attrs": {"class":"layout-fill-width"}, }, "plotImage": { }, "plotImage_pdf": {}, "evaluation_table_pdf": {}, "tolerance_pdf": {} }), variables.testConfig.copy() ) self.dicomData: dict = dicomData # ispCore initialisieren #ispCore.__init__( self ) # pdf erstellung bereitstellen self.pdf = ispPdf( variables=variables, config=self._config ) # print("_variables", self.pdf._variables ) # kleine icons size=x1 für result Tabelle bereitstellen self.icons: dict = { 5: self.pdf.resultIcon( 5, iconOnly=True, size="x1", addClass="tableIcon" ), # ok 4: self.pdf.resultIcon( 4, iconOnly=True, size="x1", addClass="tableIcon" ), # Good 3: self.pdf.resultIcon( 3, iconOnly=True, size="x1", addClass="tableIcon" ), # warning 2: self.pdf.resultIcon( 2, iconOnly=True, size="x1", addClass="tableIcon" ), # Insufficient 1: self.pdf.resultIcon( 1, iconOnly=True, size="x1", addClass="tableIcon" ), # error 0: np.nan, # nicht durchgeführt "nan": np.nan, # nicht durchgeführt 999 : np.nan # nicht durchgeführt }
[ "def", "__init__", "(", "self", ",", "config", "=", "None", ",", "variables", "=", "{", "}", ",", "dicomData", "=", "{", "}", ")", ":", "#self.pdf: str = None", "self", ".", "fileCount", "=", "0", "self", ".", "_config", "=", "config", "# mit einer Kopie der Metadaten als DotMap arbeiten", "self", ".", "metadata", "=", "dict_merge", "(", "DotMap", "(", "{", "\"manual\"", ":", "{", "\"filename\"", ":", "variables", ".", "testConfig", ".", "info", "[", "\"anleitung\"", "]", ",", "\"attrs\"", ":", "{", "\"class\"", ":", "\"layout-fill-width\"", "}", ",", "}", ",", "\"plotImage\"", ":", "{", "}", ",", "\"plotImage_pdf\"", ":", "{", "}", ",", "\"evaluation_table_pdf\"", ":", "{", "}", ",", "\"tolerance_pdf\"", ":", "{", "}", "}", ")", ",", "variables", ".", "testConfig", ".", "copy", "(", ")", ")", "self", ".", "dicomData", ":", "dict", "=", "dicomData", "# ispCore initialisieren", "#ispCore.__init__( self )", "# pdf erstellung bereitstellen", "self", ".", "pdf", "=", "ispPdf", "(", "variables", "=", "variables", ",", "config", "=", "self", ".", "_config", ")", "# print(\"_variables\", self.pdf._variables )", "# kleine icons size=x1 für result Tabelle bereitstellen", "self", ".", "icons", ":", "dict", "=", "{", "5", ":", "self", ".", "pdf", ".", "resultIcon", "(", "5", ",", "iconOnly", "=", "True", ",", "size", "=", "\"x1\"", ",", "addClass", "=", "\"tableIcon\"", ")", ",", "# ok", "4", ":", "self", ".", "pdf", ".", "resultIcon", "(", "4", ",", "iconOnly", "=", "True", ",", "size", "=", "\"x1\"", ",", "addClass", "=", "\"tableIcon\"", ")", ",", "# Good", "3", ":", "self", ".", "pdf", ".", "resultIcon", "(", "3", ",", "iconOnly", "=", "True", ",", "size", "=", "\"x1\"", ",", "addClass", "=", "\"tableIcon\"", ")", ",", "# warning", "2", ":", "self", ".", "pdf", ".", "resultIcon", "(", "2", ",", "iconOnly", "=", "True", ",", "size", "=", "\"x1\"", ",", "addClass", "=", "\"tableIcon\"", ")", ",", "# Insufficient", "1", ":", "self", ".", "pdf", ".", "resultIcon", "(", "1", ",", "iconOnly", "=", "True", ",", "size", "=", "\"x1\"", ",", "addClass", "=", "\"tableIcon\"", ")", ",", "# error", "0", ":", "np", ".", "nan", ",", "# nicht durchgeführt", "\"nan\"", ":", "np", ".", "nan", ",", "# nicht durchgeführt", "999", ":", "np", ".", "nan", "# nicht durchgeführt", "}" ]
[ 56, 4 ]
[ 112, 9 ]
null
python
de
['de', 'de', 'de']
True
true
null
ispTest.check_pdf_data
( self, data, contents=-1, pages=-1, intern_check:bool=False )
Prüft pdf data mit vorher gespeicherten data Erzeugt im unittest dir resources ein dir 'check', um dort die Vergleichsdaten zu speichern Gibt es dieses schon werden die dort vorhandenen Dateien als check verwendet Parameters ---------- data : dict - body: dict - overlays: dict - pages: int - pdf_filename: string - pdf_filepath: string - png_filename: string - png_filepath: string contents : int Anzahl der Seiten im Content pages : int Anzahl der Seiten im PDF intern_check: Wenn True wird in tests und nicht im normalem pdf Ablegeort geprüft. Default is False Returns ------- None.
Prüft pdf data mit vorher gespeicherten data Erzeugt im unittest dir resources ein dir 'check', um dort die Vergleichsdaten zu speichern Gibt es dieses schon werden die dort vorhandenen Dateien als check verwendet Parameters ---------- data : dict - body: dict - overlays: dict - pages: int - pdf_filename: string - pdf_filepath: string - png_filename: string - png_filepath: string contents : int Anzahl der Seiten im Content pages : int Anzahl der Seiten im PDF intern_check: Wenn True wird in tests und nicht im normalem pdf Ablegeort geprüft. Default is False Returns ------- None.
def check_pdf_data( self, data, contents=-1, pages=-1, intern_check:bool=False ): ''' Prüft pdf data mit vorher gespeicherten data Erzeugt im unittest dir resources ein dir 'check', um dort die Vergleichsdaten zu speichern Gibt es dieses schon werden die dort vorhandenen Dateien als check verwendet Parameters ---------- data : dict - body: dict - overlays: dict - pages: int - pdf_filename: string - pdf_filepath: string - png_filename: string - png_filepath: string contents : int Anzahl der Seiten im Content pages : int Anzahl der Seiten im PDF intern_check: Wenn True wird in tests und nicht im normalem pdf Ablegeort geprüft. Default is False Returns ------- None. ''' #print( data["content"] ) self.assertIn("pdf_filename", data, "PDF data fehlerhaft filename fehlt" ) self.assertIn("png_filepath", data, "PNG data fehlerhaft filepath fehlt" ) check = {} if intern_check == True: check_dir = osp.join( ABSPATH, "resources", "check" ) else: check_dir = osp.join( os.path.dirname( data["pdf_filepath"] ), "check" ) # create the folders if not already exists if not os.path.exists( check_dir ): try: os.makedirs( check_dir ) except IOError as e: print("Unable to create dir.", e) # Dateiname für den Inhalt festlegen json_check_name = osp.join( check_dir, data["pdf_filename"] ) + ".json" png_check_name = osp.join( check_dir, data["png_filename"] ) png_new_name = data["png_filepath"] # akltuellen content speichern with open( data["pdf_filepath"] + ".json" , "w" ) as json_file: json.dump( data["content"] , json_file, indent=2 ) # beim erstenmal pdfData content in unittest anlegen if not os.path.exists( json_check_name ): with open(json_check_name, "w" ) as json_file: # print("save", json_check_name) json.dump( data["content"] , json_file, indent=2 ) if intern_check == True: pdf_check_name = osp.join( check_dir, data["pdf_filename"] ) # beim erstenmal pdf nach check kopieren if not os.path.exists( pdf_check_name ): # adding exception handling try: copyfile( data["pdf_filepath"], pdf_check_name) except IOError as e: print("Unable to copy file.", e) # beim erstenmal png nach check kopieren if not os.path.exists( png_check_name ): # adding exception handling try: copyfile(png_new_name, png_check_name) except IOError as e: print("Unable to copy file.", e) page_names = data["content"].keys() #print(page_names) # ggf Anzahl der Bereiche prüfen if contents > -1: self.assertEqual( len( page_names ), contents, "Anzahl der content Bereiche in '{}' stimmt nicht.".format( data["pdf_filepath"] ) ) # ggf Anzahl der Seiten prüfen if pages > -1: self.assertEqual( data["pages"], pages, "Anzahl der Seiten in '{}' stimmt nicht.".format( data["pdf_filepath"] ) ) # erzeugte png vergleichen und diff speichern png_check = img_io.imread( png_check_name ) png_new = img_io.imread( png_new_name ) self.assertEqual( png_check.shape, png_new.shape, "Die Bildgrößen in '{}' stimmen nicht.".format( data["pdf_filepath"] ) ) # Bild verleich erstellen und speichern compare = compare_images(png_check, png_new, method='diff') img_io.imsave( png_new_name + ".diff.png", compare ) # passende check daten (json_check_name) laden with open( json_check_name ) as json_file: check = json.load( json_file ) # einige content Inhalte prüfen from bs4 import BeautifulSoup for page_name, content in data["content"].items(): bs_data = BeautifulSoup( content, 'html.parser') bs_check = BeautifulSoup( check[ page_name ], 'html.parser') # zuerst die texte data_text = bs_data.find_all('div', {"class": "text"} ) check_text = bs_check.find_all('div', {"class": "text"} ) self.assertEqual( data_text, check_text, "PDF content .text in '{}' ist fehlerhaft".format( data["pdf_filepath"] ) ) # gesamt check der Bilder def check_mse(imageA, imageB): # the 'Mean Squared Error' between the two images is the # sum of the squared difference between the two images; # NOTE: the two images must have the same dimension err = np.sum((imageA.astype("float") - imageB.astype("float")) ** 2) err /= float(imageA.shape[0] * imageA.shape[1]) # return the MSE, the lower the error, the more "similar" # the two images are return err # MeanCheck durchführen try: mse = check_mse( png_check, png_new ) except: mse = -1 #print( "Der PNG Vergleichsbild MSE von '{}' ist '{}'.".format( data["png_filepath"] + ".diff.png", mse ) ) #mse=0.0 self.assertEqual( 0.0, mse, "Der PNG Vergleichsbild MSE stimmt nicht. Diff image '{}' prüfen. Test erneut durchführen.".format( data["png_filepath"] + ".diff.png" ) )
[ "def", "check_pdf_data", "(", "self", ",", "data", ",", "contents", "=", "-", "1", ",", "pages", "=", "-", "1", ",", "intern_check", ":", "bool", "=", "False", ")", ":", "#print( data[\"content\"] )", "self", ".", "assertIn", "(", "\"pdf_filename\"", ",", "data", ",", "\"PDF data fehlerhaft filename fehlt\"", ")", "self", ".", "assertIn", "(", "\"png_filepath\"", ",", "data", ",", "\"PNG data fehlerhaft filepath fehlt\"", ")", "check", "=", "{", "}", "if", "intern_check", "==", "True", ":", "check_dir", "=", "osp", ".", "join", "(", "ABSPATH", ",", "\"resources\"", ",", "\"check\"", ")", "else", ":", "check_dir", "=", "osp", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "data", "[", "\"pdf_filepath\"", "]", ")", ",", "\"check\"", ")", "# create the folders if not already exists", "if", "not", "os", ".", "path", ".", "exists", "(", "check_dir", ")", ":", "try", ":", "os", ".", "makedirs", "(", "check_dir", ")", "except", "IOError", "as", "e", ":", "print", "(", "\"Unable to create dir.\"", ",", "e", ")", "# Dateiname für den Inhalt festlegen", "json_check_name", "=", "osp", ".", "join", "(", "check_dir", ",", "data", "[", "\"pdf_filename\"", "]", ")", "+", "\".json\"", "png_check_name", "=", "osp", ".", "join", "(", "check_dir", ",", "data", "[", "\"png_filename\"", "]", ")", "png_new_name", "=", "data", "[", "\"png_filepath\"", "]", "# akltuellen content speichern", "with", "open", "(", "data", "[", "\"pdf_filepath\"", "]", "+", "\".json\"", ",", "\"w\"", ")", "as", "json_file", ":", "json", ".", "dump", "(", "data", "[", "\"content\"", "]", ",", "json_file", ",", "indent", "=", "2", ")", "# beim erstenmal pdfData content in unittest anlegen", "if", "not", "os", ".", "path", ".", "exists", "(", "json_check_name", ")", ":", "with", "open", "(", "json_check_name", ",", "\"w\"", ")", "as", "json_file", ":", "# print(\"save\", json_check_name)", "json", ".", "dump", "(", "data", "[", "\"content\"", "]", ",", "json_file", ",", "indent", "=", "2", ")", "if", "intern_check", "==", "True", ":", "pdf_check_name", "=", "osp", ".", "join", "(", "check_dir", ",", "data", "[", "\"pdf_filename\"", "]", ")", "# beim erstenmal pdf nach check kopieren", "if", "not", "os", ".", "path", ".", "exists", "(", "pdf_check_name", ")", ":", "# adding exception handling", "try", ":", "copyfile", "(", "data", "[", "\"pdf_filepath\"", "]", ",", "pdf_check_name", ")", "except", "IOError", "as", "e", ":", "print", "(", "\"Unable to copy file.\"", ",", "e", ")", "# beim erstenmal png nach check kopieren", "if", "not", "os", ".", "path", ".", "exists", "(", "png_check_name", ")", ":", "# adding exception handling", "try", ":", "copyfile", "(", "png_new_name", ",", "png_check_name", ")", "except", "IOError", "as", "e", ":", "print", "(", "\"Unable to copy file.\"", ",", "e", ")", "page_names", "=", "data", "[", "\"content\"", "]", ".", "keys", "(", ")", "#print(page_names)", "# ggf Anzahl der Bereiche prüfen", "if", "contents", ">", "-", "1", ":", "self", ".", "assertEqual", "(", "len", "(", "page_names", ")", ",", "contents", ",", "\"Anzahl der content Bereiche in '{}' stimmt nicht.\"", ".", "format", "(", "data", "[", "\"pdf_filepath\"", "]", ")", ")", "# ggf Anzahl der Seiten prüfen", "if", "pages", ">", "-", "1", ":", "self", ".", "assertEqual", "(", "data", "[", "\"pages\"", "]", ",", "pages", ",", "\"Anzahl der Seiten in '{}' stimmt nicht.\"", ".", "format", "(", "data", "[", "\"pdf_filepath\"", "]", ")", ")", "# erzeugte png vergleichen und diff speichern ", "png_check", "=", "img_io", ".", "imread", "(", "png_check_name", ")", "png_new", "=", "img_io", ".", "imread", "(", "png_new_name", ")", "self", ".", "assertEqual", "(", "png_check", ".", "shape", ",", "png_new", ".", "shape", ",", "\"Die Bildgrößen in '{}' stimmen nicht.\".f", "o", "rmat( ", "d", "ta[\"", "p", "df_filepath\"] ", ")", "", ")", "# Bild verleich erstellen und speichern", "compare", "=", "compare_images", "(", "png_check", ",", "png_new", ",", "method", "=", "'diff'", ")", "img_io", ".", "imsave", "(", "png_new_name", "+", "\".diff.png\"", ",", "compare", ")", "# passende check daten (json_check_name) laden", "with", "open", "(", "json_check_name", ")", "as", "json_file", ":", "check", "=", "json", ".", "load", "(", "json_file", ")", "# einige content Inhalte prüfen ", "from", "bs4", "import", "BeautifulSoup", "for", "page_name", ",", "content", "in", "data", "[", "\"content\"", "]", ".", "items", "(", ")", ":", "bs_data", "=", "BeautifulSoup", "(", "content", ",", "'html.parser'", ")", "bs_check", "=", "BeautifulSoup", "(", "check", "[", "page_name", "]", ",", "'html.parser'", ")", "# zuerst die texte", "data_text", "=", "bs_data", ".", "find_all", "(", "'div'", ",", "{", "\"class\"", ":", "\"text\"", "}", ")", "check_text", "=", "bs_check", ".", "find_all", "(", "'div'", ",", "{", "\"class\"", ":", "\"text\"", "}", ")", "self", ".", "assertEqual", "(", "data_text", ",", "check_text", ",", "\"PDF content .text in '{}' ist fehlerhaft\"", ".", "format", "(", "data", "[", "\"pdf_filepath\"", "]", ")", ")", "# gesamt check der Bilder", "def", "check_mse", "(", "imageA", ",", "imageB", ")", ":", "# the 'Mean Squared Error' between the two images is the", "# sum of the squared difference between the two images;", "# NOTE: the two images must have the same dimension", "err", "=", "np", ".", "sum", "(", "(", "imageA", ".", "astype", "(", "\"float\"", ")", "-", "imageB", ".", "astype", "(", "\"float\"", ")", ")", "**", "2", ")", "err", "/=", "float", "(", "imageA", ".", "shape", "[", "0", "]", "*", "imageA", ".", "shape", "[", "1", "]", ")", "# return the MSE, the lower the error, the more \"similar\"", "# the two images are", "return", "err", "# MeanCheck durchführen ", "try", ":", "mse", "=", "check_mse", "(", "png_check", ",", "png_new", ")", "except", ":", "mse", "=", "-", "1", "#print( \"Der PNG Vergleichsbild MSE von '{}' ist '{}'.\".format( data[\"png_filepath\"] + \".diff.png\", mse ) )", "#mse=0.0", "self", ".", "assertEqual", "(", "0.0", ",", "mse", ",", "\"Der PNG Vergleichsbild MSE stimmt nicht. Diff image '{}' prüfen. Test erneut durchführen.\".f", "o", "rmat( ", "d", "ta[\"", "p", "ng_filepath\"] ", "+", "\"", "diff.png\" )", "", ")" ]
[ 2255, 4 ]
[ 2414, 9 ]
null
python
de
['de', 'de', 'de']
True
true
null
ChatbotMessages.WriteChatMessage_NotEnoughPoints
( self, user, gameCost, command, commandName = False )
return
Schreibe eine Nachricht bezüglich mangelnder Punkte in den Chat
Schreibe eine Nachricht bezüglich mangelnder Punkte in den Chat
def WriteChatMessage_NotEnoughPoints( self, user, gameCost, command, commandName = False ): ''' Schreibe eine Nachricht bezüglich mangelnder Punkte in den Chat ''' thisActionName = "WriteChatMessage_NotEnoughPoints" thisUserDisplayName = self.Parent.GetDisplayName( str.lower( user ) ) thisUserCurrentPoints = int( self.Parent.GetPoints( str.lower( user ) ) ) # auf Grund Kompatibilität if commandName: self.CommandName = commandName else: self.CommandName = command self.Parent.SendStreamMessage( "/me : 😟 Tut mir Leid {0}, aber du hast nicht die erforderlichen {2} {3} für das Kommando '{1}'. 😟".format(thisUserDisplayName, self.CommandName, gameCost, self.CurrencyName ) ) return
[ "def", "WriteChatMessage_NotEnoughPoints", "(", "self", ",", "user", ",", "gameCost", ",", "command", ",", "commandName", "=", "False", ")", ":", "thisActionName", "=", "\"WriteChatMessage_NotEnoughPoints\"", "thisUserDisplayName", "=", "self", ".", "Parent", ".", "GetDisplayName", "(", "str", ".", "lower", "(", "user", ")", ")", "thisUserCurrentPoints", "=", "int", "(", "self", ".", "Parent", ".", "GetPoints", "(", "str", ".", "lower", "(", "user", ")", ")", ")", "# auf Grund Kompatibilität\r", "if", "commandName", ":", "self", ".", "CommandName", "=", "commandName", "else", ":", "self", ".", "CommandName", "=", "command", "self", ".", "Parent", ".", "SendStreamMessage", "(", "\"/me : 😟 Tut mir Leid {0}, aber du hast nicht die erforderlichen {2} {3} für das Kommando '{1}'. 😟\".format", "(", "thisUs", "e", "rDisplayName, \r", "", "self", ".", "CommandName", ",", "gameCost", ",", "self", ".", "CurrencyName", ")", ")", "return" ]
[ 172, 4 ]
[ 191, 14 ]
null
python
de
['de', 'de', 'de']
True
true
null
checkMlc.doJT_LeafSpeed
( self, fileData )
return self.pdf.finish(), result
Jahrestest: (Lamellengeschwindigkeit) Messorte: Mean des kompletten Feldes (12x40) Verwendet/berechnet: (DR600 V1 - DR600 V1 OF) / DR600 V1 OF Geschwindigkeiten: V1, V2, V3 Anzeigen: Tabelle mit Dosiswerten Parameters ---------- fileData : Pandas Returns ------- pdfFilename : str Name der erzeugten Pdfdatei result : list list mit dicts der Testergebnisse See Also -------- isp.results : Aufbau von result
Jahrestest: (Lamellengeschwindigkeit) Messorte: Mean des kompletten Feldes (12x40) Verwendet/berechnet: (DR600 V1 - DR600 V1 OF) / DR600 V1 OF Geschwindigkeiten: V1, V2, V3
def doJT_LeafSpeed( self, fileData ): """Jahrestest: (Lamellengeschwindigkeit) Messorte: Mean des kompletten Feldes (12x40) Verwendet/berechnet: (DR600 V1 - DR600 V1 OF) / DR600 V1 OF Geschwindigkeiten: V1, V2, V3 Anzeigen: Tabelle mit Dosiswerten Parameters ---------- fileData : Pandas Returns ------- pdfFilename : str Name der erzeugten Pdfdatei result : list list mit dicts der Testergebnisse See Also -------- isp.results : Aufbau von result """ result=[] # wird für progress verwendet filesMax=len( fileData ) self.fileCount = 0 # metadata vorbereiten md = dict_merge( DotMap( { "field_count": 1, "manual": { "filename": self.metadata.info["anleitung"], "attrs": {"class":"layout-fill-width", "margin-bottom": "5mm"}, }, "_text": { "left":0, "top": 0 }, "_table": { "class":"layout-40-width", "margin-top": "1mm"}, # "_table": { "left":0, "top":55, "width":65 }, "_infobox" : { "left":75, "top":50, "width":100 }, "_clip" : { "width":"80mm", "height":"30mm", "margin-left":"10mm", "margin-top":"5mm" }, "_clipLegend" : { "margin-top": "5mm" }, "_chart": { "left":75, "top":135, "width":100, "height":100}, "_toleranz": { "left":75, "top":235, "width" : 100 }, "table_fields": [ {'field': 'doserate', 'label':'Dosisleistung', 'format':'{0}'}, {'field': 'gantry', 'label':'Gantry', 'format':'{0:.1f}' }, {'field': 'collimator', 'label':'Kolli', 'format':'{0:.1f}' }, {'field': 'speed', 'label':'Geschw.', 'format':'{0}'}, {'field': 'delta', 'label':'Delta [%]', 'format':'{0:.1f}' }, {'field': 'delta_passed', 'label':'Passed', 'style': [('max-height', '10px'), ('vertical-align','top')] } ] } ), self.metadata ) def groupBy( df_group ): """ """ # das Datum vom ersten Datensatz verwenden checkDate=df_group['AcquisitionDateTime'].iloc[0].strftime("%d.%m.%Y") self.pdf.setContentName( checkDate ) # # Anleitung # self.pdf.textFile( **md.manual ) # # Infobox # mit MLC Clip und Text # html = '' html += self.pdf.image( "qa/LeafSpeed.svg", attrs=md["_clip"], render=False) html += self.pdf.textFile("qa/LeafSpeed_Legend.md", attrs=md["_clipLegend"], render=False) data = [] # pro speed def groupBySpeed(df_speed): #print( len(df_speed) ) #print( df_speed) df_base = df_speed.query("open == 'OF'") df_fields = df_speed.query("open != 'OF'") # alles notwendige da? errors = self.checkFields( md, df_base, df_fields, md["field_count"]) if len(errors) > 0: result.append( self.pdf_error_result( md, date=checkDate, group_len=len( result ), errors=errors ) ) return ''' if not self.checkFields( md, df_base, df_field, 1 ): return ''' check = qa_mlc( checkField=self.getFullData( df_fields.iloc[0] ), baseField=self.getFullData( df_base.iloc[0] ), normalize="prozent" ) # Daten merken data.append( { "doserate" : check.infos["doserate"], "speed" : check.infos["speed"], "gantry" : check.infos["gantry"], "collimator" : check.infos["collimator"], "delta" : check.image.getFieldRoi().mean() * 100 }) # progress pro file stimmt nicht immer genau (baseimage) # 40% für die dicom daten 40% für die Auswertung 20 % für das pdf self.fileCount += 2 if hasattr( logger, "progress"): logger.progress( md["testId"], 40 + ( 40 / filesMax * self.fileCount ) ) # alle speed arten durch gehen df_group.groupby( [ "gantry", "collimator", "doserate", "speed" ] ).apply( groupBySpeed ) self.pdf.html( html, md["_infobox"], { "class" : "infobox" }) # es wurden keine Felder gefunden (checkFields fehler) if len( data ) < 1: result.append( self.pdf_error_result( md, date=checkDate, group_len=len( result ), msg='<b>Datenfehler</b>: keine Felder gefunden oder das offene Feld fehlt.', pos={ "top":150 } ) ) return df = pd.DataFrame(data) # # Abweichung ausrechnen und Passed setzen # check = [ { "field": 'delta', 'tolerance':'default' } ] acceptance = self.check_acceptance( df, md, check ) # # Ergebnis in result merken # result.append( self.createResult( df, md, check, df_group['AcquisitionDateTime'].iloc[0].strftime("%Y%m%d"), len( result ), # bisherige Ergebnisse in result acceptance ) ) # # Tabelle erzeugen # self.pdf.pandas( df, attrs=md["_table"], fields=md["table_fields"] ) # # chart # # plot anlegen plot = plotClass( ) fig, ax = plot.initPlot( md["_chart"] , True ) # data frame gruppieren und mit neuem index versehen df_chart = df.set_index(['gantry', 'collimator', 'speed', 'doserate' ])['delta'].unstack() # als bar plot ausgeben df_chart.plot( ax=ax, kind='bar', rot=75) # limits legende und grid ax.set_ylim( [-2.0, 2.0] ) ax.grid( ) ax.legend( ) plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0) # chart im PDF anzeigen self.pdf.image( plot.getPlot(), md["_chart"] ) # toleranz anzeigen text_values = { "f_warning": md.current.tolerance.default.warning.get("f",""), "f_error": md.current.tolerance.default.error.get("f","") } text = """<br> Warnung bei: <b style="position:absolute;left:25mm;">{f_warning}</b><br> Fehler bei: <b style="position:absolute;left:25mm;">{f_error}</b> """.format( **text_values ).replace("{value}", "Delta") self.pdf.text( text, md["_toleranz"] ) # Gesamt check - das schlechteste aus der tabelle self.pdf.resultIcon( acceptance ) # zusätzliche Felder für speed und open anlegen # def splitSpeed(value, **args): pos = args.get("pos", 1) s = value.split() if len(s) > pos: return s[pos] else: return "" # speed und open aus RadiationId erzeugen fileData["speed"] = fileData["RadiationId"].apply( splitSpeed, pos=1 ) fileData["open"] = fileData["RadiationId"].apply( splitSpeed, pos=2 ) # # Gruppiert nach day abarbeiten # (fileData .sort_values(["gantry", "collimator", "doserate", "speed"]) .groupby( [ 'day' ] ) # , 'SeriesNumber' .apply( groupBy ) ) # abschließen pdfdaten und result zurückgeben return self.pdf.finish(), result
[ "def", "doJT_LeafSpeed", "(", "self", ",", "fileData", ")", ":", "result", "=", "[", "]", "# wird für progress verwendet", "filesMax", "=", "len", "(", "fileData", ")", "self", ".", "fileCount", "=", "0", "# metadata vorbereiten", "md", "=", "dict_merge", "(", "DotMap", "(", "{", "\"field_count\"", ":", "1", ",", "\"manual\"", ":", "{", "\"filename\"", ":", "self", ".", "metadata", ".", "info", "[", "\"anleitung\"", "]", ",", "\"attrs\"", ":", "{", "\"class\"", ":", "\"layout-fill-width\"", ",", "\"margin-bottom\"", ":", "\"5mm\"", "}", ",", "}", ",", "\"_text\"", ":", "{", "\"left\"", ":", "0", ",", "\"top\"", ":", "0", "}", ",", "\"_table\"", ":", "{", "\"class\"", ":", "\"layout-40-width\"", ",", "\"margin-top\"", ":", "\"1mm\"", "}", ",", "# \"_table\": { \"left\":0, \"top\":55, \"width\":65 },", "\"_infobox\"", ":", "{", "\"left\"", ":", "75", ",", "\"top\"", ":", "50", ",", "\"width\"", ":", "100", "}", ",", "\"_clip\"", ":", "{", "\"width\"", ":", "\"80mm\"", ",", "\"height\"", ":", "\"30mm\"", ",", "\"margin-left\"", ":", "\"10mm\"", ",", "\"margin-top\"", ":", "\"5mm\"", "}", ",", "\"_clipLegend\"", ":", "{", "\"margin-top\"", ":", "\"5mm\"", "}", ",", "\"_chart\"", ":", "{", "\"left\"", ":", "75", ",", "\"top\"", ":", "135", ",", "\"width\"", ":", "100", ",", "\"height\"", ":", "100", "}", ",", "\"_toleranz\"", ":", "{", "\"left\"", ":", "75", ",", "\"top\"", ":", "235", ",", "\"width\"", ":", "100", "}", ",", "\"table_fields\"", ":", "[", "{", "'field'", ":", "'doserate'", ",", "'label'", ":", "'Dosisleistung'", ",", "'format'", ":", "'{0}'", "}", ",", "{", "'field'", ":", "'gantry'", ",", "'label'", ":", "'Gantry'", ",", "'format'", ":", "'{0:.1f}'", "}", ",", "{", "'field'", ":", "'collimator'", ",", "'label'", ":", "'Kolli'", ",", "'format'", ":", "'{0:.1f}'", "}", ",", "{", "'field'", ":", "'speed'", ",", "'label'", ":", "'Geschw.'", ",", "'format'", ":", "'{0}'", "}", ",", "{", "'field'", ":", "'delta'", ",", "'label'", ":", "'Delta [%]'", ",", "'format'", ":", "'{0:.1f}'", "}", ",", "{", "'field'", ":", "'delta_passed'", ",", "'label'", ":", "'Passed'", ",", "'style'", ":", "[", "(", "'max-height'", ",", "'10px'", ")", ",", "(", "'vertical-align'", ",", "'top'", ")", "]", "}", "]", "}", ")", ",", "self", ".", "metadata", ")", "def", "groupBy", "(", "df_group", ")", ":", "\"\"\"\n \"\"\"", "# das Datum vom ersten Datensatz verwenden", "checkDate", "=", "df_group", "[", "'AcquisitionDateTime'", "]", ".", "iloc", "[", "0", "]", ".", "strftime", "(", "\"%d.%m.%Y\"", ")", "self", ".", "pdf", ".", "setContentName", "(", "checkDate", ")", "#", "# Anleitung", "#", "self", ".", "pdf", ".", "textFile", "(", "*", "*", "md", ".", "manual", ")", "#", "# Infobox", "# mit MLC Clip und Text", "#", "html", "=", "''", "html", "+=", "self", ".", "pdf", ".", "image", "(", "\"qa/LeafSpeed.svg\"", ",", "attrs", "=", "md", "[", "\"_clip\"", "]", ",", "render", "=", "False", ")", "html", "+=", "self", ".", "pdf", ".", "textFile", "(", "\"qa/LeafSpeed_Legend.md\"", ",", "attrs", "=", "md", "[", "\"_clipLegend\"", "]", ",", "render", "=", "False", ")", "data", "=", "[", "]", "# pro speed", "def", "groupBySpeed", "(", "df_speed", ")", ":", "#print( len(df_speed) )", "#print( df_speed)", "df_base", "=", "df_speed", ".", "query", "(", "\"open == 'OF'\"", ")", "df_fields", "=", "df_speed", ".", "query", "(", "\"open != 'OF'\"", ")", "# alles notwendige da?", "errors", "=", "self", ".", "checkFields", "(", "md", ",", "df_base", ",", "df_fields", ",", "md", "[", "\"field_count\"", "]", ")", "if", "len", "(", "errors", ")", ">", "0", ":", "result", ".", "append", "(", "self", ".", "pdf_error_result", "(", "md", ",", "date", "=", "checkDate", ",", "group_len", "=", "len", "(", "result", ")", ",", "errors", "=", "errors", ")", ")", "return", "'''\n if not self.checkFields( md, df_base, df_field, 1 ):\n return\n '''", "check", "=", "qa_mlc", "(", "checkField", "=", "self", ".", "getFullData", "(", "df_fields", ".", "iloc", "[", "0", "]", ")", ",", "baseField", "=", "self", ".", "getFullData", "(", "df_base", ".", "iloc", "[", "0", "]", ")", ",", "normalize", "=", "\"prozent\"", ")", "# Daten merken", "data", ".", "append", "(", "{", "\"doserate\"", ":", "check", ".", "infos", "[", "\"doserate\"", "]", ",", "\"speed\"", ":", "check", ".", "infos", "[", "\"speed\"", "]", ",", "\"gantry\"", ":", "check", ".", "infos", "[", "\"gantry\"", "]", ",", "\"collimator\"", ":", "check", ".", "infos", "[", "\"collimator\"", "]", ",", "\"delta\"", ":", "check", ".", "image", ".", "getFieldRoi", "(", ")", ".", "mean", "(", ")", "*", "100", "}", ")", "# progress pro file stimmt nicht immer genau (baseimage)", "# 40% für die dicom daten 40% für die Auswertung 20 % für das pdf", "self", ".", "fileCount", "+=", "2", "if", "hasattr", "(", "logger", ",", "\"progress\"", ")", ":", "logger", ".", "progress", "(", "md", "[", "\"testId\"", "]", ",", "40", "+", "(", "40", "/", "filesMax", "*", "self", ".", "fileCount", ")", ")", "# alle speed arten durch gehen", "df_group", ".", "groupby", "(", "[", "\"gantry\"", ",", "\"collimator\"", ",", "\"doserate\"", ",", "\"speed\"", "]", ")", ".", "apply", "(", "groupBySpeed", ")", "self", ".", "pdf", ".", "html", "(", "html", ",", "md", "[", "\"_infobox\"", "]", ",", "{", "\"class\"", ":", "\"infobox\"", "}", ")", "# es wurden keine Felder gefunden (checkFields fehler)", "if", "len", "(", "data", ")", "<", "1", ":", "result", ".", "append", "(", "self", ".", "pdf_error_result", "(", "md", ",", "date", "=", "checkDate", ",", "group_len", "=", "len", "(", "result", ")", ",", "msg", "=", "'<b>Datenfehler</b>: keine Felder gefunden oder das offene Feld fehlt.'", ",", "pos", "=", "{", "\"top\"", ":", "150", "}", ")", ")", "return", "df", "=", "pd", ".", "DataFrame", "(", "data", ")", "#", "# Abweichung ausrechnen und Passed setzen", "#", "check", "=", "[", "{", "\"field\"", ":", "'delta'", ",", "'tolerance'", ":", "'default'", "}", "]", "acceptance", "=", "self", ".", "check_acceptance", "(", "df", ",", "md", ",", "check", ")", "#", "# Ergebnis in result merken", "#", "result", ".", "append", "(", "self", ".", "createResult", "(", "df", ",", "md", ",", "check", ",", "df_group", "[", "'AcquisitionDateTime'", "]", ".", "iloc", "[", "0", "]", ".", "strftime", "(", "\"%Y%m%d\"", ")", ",", "len", "(", "result", ")", ",", "# bisherige Ergebnisse in result", "acceptance", ")", ")", "#", "# Tabelle erzeugen", "#", "self", ".", "pdf", ".", "pandas", "(", "df", ",", "attrs", "=", "md", "[", "\"_table\"", "]", ",", "fields", "=", "md", "[", "\"table_fields\"", "]", ")", "#", "# chart", "#", "# plot anlegen", "plot", "=", "plotClass", "(", ")", "fig", ",", "ax", "=", "plot", ".", "initPlot", "(", "md", "[", "\"_chart\"", "]", ",", "True", ")", "# data frame gruppieren und mit neuem index versehen", "df_chart", "=", "df", ".", "set_index", "(", "[", "'gantry'", ",", "'collimator'", ",", "'speed'", ",", "'doserate'", "]", ")", "[", "'delta'", "]", ".", "unstack", "(", ")", "# als bar plot ausgeben", "df_chart", ".", "plot", "(", "ax", "=", "ax", ",", "kind", "=", "'bar'", ",", "rot", "=", "75", ")", "# limits legende und grid", "ax", ".", "set_ylim", "(", "[", "-", "2.0", ",", "2.0", "]", ")", "ax", ".", "grid", "(", ")", "ax", ".", "legend", "(", ")", "plt", ".", "tight_layout", "(", "pad", "=", "0.4", ",", "w_pad", "=", "0.5", ",", "h_pad", "=", "1.0", ")", "# chart im PDF anzeigen", "self", ".", "pdf", ".", "image", "(", "plot", ".", "getPlot", "(", ")", ",", "md", "[", "\"_chart\"", "]", ")", "# toleranz anzeigen", "text_values", "=", "{", "\"f_warning\"", ":", "md", ".", "current", ".", "tolerance", ".", "default", ".", "warning", ".", "get", "(", "\"f\"", ",", "\"\"", ")", ",", "\"f_error\"", ":", "md", ".", "current", ".", "tolerance", ".", "default", ".", "error", ".", "get", "(", "\"f\"", ",", "\"\"", ")", "}", "text", "=", "\"\"\"<br>\n Warnung bei: <b style=\"position:absolute;left:25mm;\">{f_warning}</b><br>\n Fehler bei: <b style=\"position:absolute;left:25mm;\">{f_error}</b>\n \"\"\"", ".", "format", "(", "*", "*", "text_values", ")", ".", "replace", "(", "\"{value}\"", ",", "\"Delta\"", ")", "self", ".", "pdf", ".", "text", "(", "text", ",", "md", "[", "\"_toleranz\"", "]", ")", "# Gesamt check - das schlechteste aus der tabelle", "self", ".", "pdf", ".", "resultIcon", "(", "acceptance", ")", "# zusätzliche Felder für speed und open anlegen", "#", "def", "splitSpeed", "(", "value", ",", "*", "*", "args", ")", ":", "pos", "=", "args", ".", "get", "(", "\"pos\"", ",", "1", ")", "s", "=", "value", ".", "split", "(", ")", "if", "len", "(", "s", ")", ">", "pos", ":", "return", "s", "[", "pos", "]", "else", ":", "return", "\"\"", "# speed und open aus RadiationId erzeugen", "fileData", "[", "\"speed\"", "]", "=", "fileData", "[", "\"RadiationId\"", "]", ".", "apply", "(", "splitSpeed", ",", "pos", "=", "1", ")", "fileData", "[", "\"open\"", "]", "=", "fileData", "[", "\"RadiationId\"", "]", ".", "apply", "(", "splitSpeed", ",", "pos", "=", "2", ")", "#", "# Gruppiert nach day abarbeiten", "#", "(", "fileData", ".", "sort_values", "(", "[", "\"gantry\"", ",", "\"collimator\"", ",", "\"doserate\"", ",", "\"speed\"", "]", ")", ".", "groupby", "(", "[", "'day'", "]", ")", "# , 'SeriesNumber'", ".", "apply", "(", "groupBy", ")", ")", "# abschließen pdfdaten und result zurückgeben", "return", "self", ".", "pdf", ".", "finish", "(", ")", ",", "result" ]
[ 1488, 4 ]
[ 1715, 40 ]
null
python
de
['de', 'de', 'de']
False
true
null
register_diagram
(func)
return register_type_func(DIAGRAM_TYPES, DiagramError, func)
Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block. Fügt eine Typ-Funktion dem Dictionary DIAGRAM_TYPES hinzu. :param func: die zu registrierende Funktion :return: Funktion mit try/except-Block
Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block. Fügt eine Typ-Funktion dem Dictionary DIAGRAM_TYPES hinzu.
def register_diagram(func): """Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block. Fügt eine Typ-Funktion dem Dictionary DIAGRAM_TYPES hinzu. :param func: die zu registrierende Funktion :return: Funktion mit try/except-Block """ return register_type_func(DIAGRAM_TYPES, DiagramError, func)
[ "def", "register_diagram", "(", "func", ")", ":", "return", "register_type_func", "(", "DIAGRAM_TYPES", ",", "DiagramError", ",", "func", ")" ]
[ 31, 0 ]
[ 38, 64 ]
null
python
de
['de', 'de', 'de']
True
true
null
PdfGenerator.markdown
( self, text="", area:dict={}, attrs:dict={}, render=None )
return element_html
Einen Markdowntext einfügen. Parameters ---------- text : str Der einzufügende Text area : Area {left,top,with,height} die Größe der Ausgabe attrs : dict zu ändernde id class oder Style Angaben render : bool sofort rendern oder nur zurückgeben ohne Angabe wird self.autoRender verwendet Returns ------- element_html: str HTML des erzeugten Elements
Einen Markdowntext einfügen.
def markdown( self, text="", area:dict={}, attrs:dict={}, render=None ): """Einen Markdowntext einfügen. Parameters ---------- text : str Der einzufügende Text area : Area {left,top,with,height} die Größe der Ausgabe attrs : dict zu ändernde id class oder Style Angaben render : bool sofort rendern oder nur zurückgeben ohne Angabe wird self.autoRender verwendet Returns ------- element_html: str HTML des erzeugten Elements """ if render == None: render = self.autoRender html = markdown.markdown( text, extensions=['extra', 'codehilite'] ) # Eigenschaften des Elements if not "font-size" in attrs: attrs["font-size"] = "8pt" _id, _class, _style = self._get_attrs( attrs ) _area = self._get_area_style( area ) element_html = '\n\t<div class="text markdown {_class}" style="{_style} {_area}" >{content}</div>'.format( _class = _class, _style = _style, _area = _area, content = html ) if render: self._html( element_html ) return element_html
[ "def", "markdown", "(", "self", ",", "text", "=", "\"\"", ",", "area", ":", "dict", "=", "{", "}", ",", "attrs", ":", "dict", "=", "{", "}", ",", "render", "=", "None", ")", ":", "if", "render", "==", "None", ":", "render", "=", "self", ".", "autoRender", "html", "=", "markdown", ".", "markdown", "(", "text", ",", "extensions", "=", "[", "'extra'", ",", "'codehilite'", "]", ")", "# Eigenschaften des Elements", "if", "not", "\"font-size\"", "in", "attrs", ":", "attrs", "[", "\"font-size\"", "]", "=", "\"8pt\"", "_id", ",", "_class", ",", "_style", "=", "self", ".", "_get_attrs", "(", "attrs", ")", "_area", "=", "self", ".", "_get_area_style", "(", "area", ")", "element_html", "=", "'\\n\\t<div class=\"text markdown {_class}\" style=\"{_style} {_area}\" >{content}</div>'", ".", "format", "(", "_class", "=", "_class", ",", "_style", "=", "_style", ",", "_area", "=", "_area", ",", "content", "=", "html", ")", "if", "render", ":", "self", ".", "_html", "(", "element_html", ")", "return", "element_html" ]
[ 914, 4 ]
[ 953, 27 ]
null
python
de
['de', 'de', 'de']
True
true
null
ispSAFRS._int_parse_args
(cls, kwargs:dict={}, method=None, swagger_path=None )
return has_args
Parsed die request parameter mit den Angaben aus cls._swagger_paths. Swagger datatypes:: string number integer boolean array object Parameters ---------- kwargs : dict, optional Alle request Parameter. The default is {}. method : str, optional The request method. (For example ``'GET'`` or ``'POST'``). The default is None. swagger_path : str, optional Der zum request passende Pfad der swagger Beschreibung. The default is None. Returns ------- has_args : dict Die überprüften Parameter. RequestParser kann auch so bestimmt werden:: from flask_restplus import RequestParser parser = RequestParser()
Parsed die request parameter mit den Angaben aus cls._swagger_paths.
def _int_parse_args(cls, kwargs:dict={}, method=None, swagger_path=None ): """Parsed die request parameter mit den Angaben aus cls._swagger_paths. Swagger datatypes:: string number integer boolean array object Parameters ---------- kwargs : dict, optional Alle request Parameter. The default is {}. method : str, optional The request method. (For example ``'GET'`` or ``'POST'``). The default is None. swagger_path : str, optional Der zum request passende Pfad der swagger Beschreibung. The default is None. Returns ------- has_args : dict Die überprüften Parameter. RequestParser kann auch so bestimmt werden:: from flask_restplus import RequestParser parser = RequestParser() """ if not method: method=request.method.lower() paths = cls._api.get_swagger_doc().get("paths", {}) # parameter für swagger_path holen (cls._swagger_paths) parameters = paths.get(swagger_path, {}).get( method, {} ).get("parameters", {} ) parser = get_parser( parameters ) # alle fehler sammeln (TypeError in value) parser.bundle_errors = True # request parsen args = parser.parse_args( ) has_args = {} # alle args druchgehen und fehlerhafte rauswerfen for key, value in args.items(): if not type(value) == TypeError: # ohne Fehler sofort verwenden has_args[key] = value else: # value aus request nehmen value = request.args.get(key, "") check_type = "" # Versuchen den parameter selbst umzuwandeln (object) # parameter suchen - immer defaults type=string for parameter in parameters: #log.warning("_int_parse_args parameter {}".format( json.dumps(parameter) ) ) if parameter.get( "name", "") == key: check_type = parameter.get("type", "string" ) break # schleife abbrechen # umwandlung versuchen if not check_type: # pragma: no cover # Fehlermeldung in appError - sollte auber nicht vorkommen siehe parameter suchen cls.appError( "swagger Parameter Error", "{}={}".format( key, value ) ) elif check_type == "object": # swagger freeform object (arbitrary property/value pairs) try: has_args[key] = json.loads( value ) except: # includes simplejson.decoder.JSONDecodeError cls.appError( "swagger Parameter Json Error", "{}={}".format( key, value ) ) pass elif check_type == "number": has_args[key] = float( value ) else: has_args[key] = value return has_args
[ "def", "_int_parse_args", "(", "cls", ",", "kwargs", ":", "dict", "=", "{", "}", ",", "method", "=", "None", ",", "swagger_path", "=", "None", ")", ":", "if", "not", "method", ":", "method", "=", "request", ".", "method", ".", "lower", "(", ")", "paths", "=", "cls", ".", "_api", ".", "get_swagger_doc", "(", ")", ".", "get", "(", "\"paths\"", ",", "{", "}", ")", "# parameter für swagger_path holen (cls._swagger_paths)", "parameters", "=", "paths", ".", "get", "(", "swagger_path", ",", "{", "}", ")", ".", "get", "(", "method", ",", "{", "}", ")", ".", "get", "(", "\"parameters\"", ",", "{", "}", ")", "parser", "=", "get_parser", "(", "parameters", ")", "# alle fehler sammeln (TypeError in value)", "parser", ".", "bundle_errors", "=", "True", "# request parsen", "args", "=", "parser", ".", "parse_args", "(", ")", "has_args", "=", "{", "}", "# alle args druchgehen und fehlerhafte rauswerfen", "for", "key", ",", "value", "in", "args", ".", "items", "(", ")", ":", "if", "not", "type", "(", "value", ")", "==", "TypeError", ":", "# ohne Fehler sofort verwenden", "has_args", "[", "key", "]", "=", "value", "else", ":", "# value aus request nehmen", "value", "=", "request", ".", "args", ".", "get", "(", "key", ",", "\"\"", ")", "check_type", "=", "\"\"", "# Versuchen den parameter selbst umzuwandeln (object)", "# parameter suchen - immer defaults type=string", "for", "parameter", "in", "parameters", ":", "#log.warning(\"_int_parse_args parameter {}\".format( json.dumps(parameter) ) )", "if", "parameter", ".", "get", "(", "\"name\"", ",", "\"\"", ")", "==", "key", ":", "check_type", "=", "parameter", ".", "get", "(", "\"type\"", ",", "\"string\"", ")", "break", "# schleife abbrechen", "# umwandlung versuchen", "if", "not", "check_type", ":", "# pragma: no cover", "# Fehlermeldung in appError - sollte auber nicht vorkommen siehe parameter suchen", "cls", ".", "appError", "(", "\"swagger Parameter Error\"", ",", "\"{}={}\"", ".", "format", "(", "key", ",", "value", ")", ")", "elif", "check_type", "==", "\"object\"", ":", "# swagger freeform object (arbitrary property/value pairs)", "try", ":", "has_args", "[", "key", "]", "=", "json", ".", "loads", "(", "value", ")", "except", ":", "# includes simplejson.decoder.JSONDecodeError", "cls", ".", "appError", "(", "\"swagger Parameter Json Error\"", ",", "\"{}={}\"", ".", "format", "(", "key", ",", "value", ")", ")", "pass", "elif", "check_type", "==", "\"number\"", ":", "has_args", "[", "key", "]", "=", "float", "(", "value", ")", "else", ":", "has_args", "[", "key", "]", "=", "value", "return", "has_args" ]
[ 732, 4 ]
[ 815, 23 ]
null
python
de
['de', 'de', 'de']
True
true
null
OpenWeatherMap.get_data
(self)
Hole Wetterdaten
Hole Wetterdaten
def get_data(self): """ Hole Wetterdaten """ if not self.owm_key: waiting('Please get an API-Key from', 'openweathermap.org/appid') pygame.time.wait(5000) pygame.event.get() # werfe aufgelaufene Events weg if not self.owm_loc: waiting('Please set a location', 'for openweathermap') pygame.time.wait(5000) pygame.event.get() # werfe aufgelaufene Events weg openweather_base = 'http://api.openweathermap.org/data/2.5/' try: weather = request.urlopen(openweather_base + 'weather?' + \ self.owm_loc + '&units=metric&lang=de&mode=json&APPID=' + self.owm_key) weather_data = json.loads(weather.read().decode('utf-8')) self.stadt = weather_data['name'] # -273.15 if units!=metric self.temperatur = str(int(round(weather_data['main']['temp'], 0))) self.luftdruck = str(int(weather_data['main']['pressure'])) self.luftfeuchte = str(int(weather_data['main']['humidity'])) self.wetterlage = weather_data['weather'][0]['icon'] except (error.URLError, TypeError, UnboundLocalError): print(datetime.datetime.now().strftime('%H:%M') + ': No Weather Data.') try: # Älterer owm_key pygame.time.wait(150) # Warte 150ms um HttpError 429 zu vermeiden daily = request.urlopen(openweather_base + 'forecast/daily?' + \ self.owm_loc + '&units=metric&lang=de&mode=json&APPID=' + self.owm_key) daily_data = json.loads(daily.read().decode('utf-8')) self.heute_min = str(round(daily_data['list'][0]['temp']['min'], 1)) self.heute_max = str(round(daily_data['list'][0]['temp']['max'], 1)) self.morgen_min = str(round(daily_data['list'][1]['temp']['min'], 1)) self.morgen_max = str(round(daily_data['list'][1]['temp']['max'], 1)) self.vorschau = daily_data['list'][1]['weather'][0]['icon'] print("Alter OWM_KEY") except (error.URLError, TypeError, UnboundLocalError): try: # Neuerer owm_key pygame.time.wait(150) # Warte 150ms um HttpError 429 zu vermeiden forecast5 = request.urlopen(openweather_base + 'forecast?' + self.owm_loc + '&units=metric&lang=de&mode=json&APPID=' + self.owm_key) forecast5_data = json.loads(forecast5.read().decode('utf-8')) # 5-Tagesvorhersage in dreistunden Abschnitten # Wir brauchen "nur" den Rest von heute und den ganzen Tag morgen # Minimum und maximum self.temperatur werden zwischen 6 und 24Uhr ermittelt. # dt_txt -> 2018-12-17 21:00:00 # dt_txt[11:13] = '21' zeit = int(forecast5_data['list'][0]['dt_txt'][11:13]) for idx in range(8, 0, -1): if 3 * idx + zeit == 24: heute_24uhr = idx heute_start = max((heute_24uhr - 6), 0) break t_list = [] # Vorhandene Temp.-Daten bis Mitternacht for idx in range(heute_start, heute_24uhr + 1): t_list.append(forecast5_data['list'][idx]['main']['temp']) t_list.append(weather_data['main']['temp']) # aktuelle Temp. mit einbeziehen self.heute_max = str(round(max(t_list), 1)) self.heute_min = str(round(min(t_list), 1)) t_list = [] # Alle Temp.-Daten von morgen ab 6 Uhr for idx in range(heute_24uhr + 2, heute_24uhr + 9): t_list.append(forecast5_data['list'][idx]['main']['temp']) self.morgen_max = str(round(max(t_list), 1)) self.morgen_min = str(round(min(t_list), 1)) # Icon von morgen 12 Uhr self.vorschau = forecast5_data['list'][heute_24uhr + 4]['weather'][0]['icon'] except (error.URLError, TypeError, UnboundLocalError): print(datetime.datetime.now().strftime('%H:%M') + ': No Forecast Data.')
[ "def", "get_data", "(", "self", ")", ":", "if", "not", "self", ".", "owm_key", ":", "waiting", "(", "'Please get an API-Key from'", ",", "'openweathermap.org/appid'", ")", "pygame", ".", "time", ".", "wait", "(", "5000", ")", "pygame", ".", "event", ".", "get", "(", ")", "# werfe aufgelaufene Events weg", "if", "not", "self", ".", "owm_loc", ":", "waiting", "(", "'Please set a location'", ",", "'for openweathermap'", ")", "pygame", ".", "time", ".", "wait", "(", "5000", ")", "pygame", ".", "event", ".", "get", "(", ")", "# werfe aufgelaufene Events weg", "openweather_base", "=", "'http://api.openweathermap.org/data/2.5/'", "try", ":", "weather", "=", "request", ".", "urlopen", "(", "openweather_base", "+", "'weather?'", "+", "self", ".", "owm_loc", "+", "'&units=metric&lang=de&mode=json&APPID='", "+", "self", ".", "owm_key", ")", "weather_data", "=", "json", ".", "loads", "(", "weather", ".", "read", "(", ")", ".", "decode", "(", "'utf-8'", ")", ")", "self", ".", "stadt", "=", "weather_data", "[", "'name'", "]", "# -273.15 if units!=metric", "self", ".", "temperatur", "=", "str", "(", "int", "(", "round", "(", "weather_data", "[", "'main'", "]", "[", "'temp'", "]", ",", "0", ")", ")", ")", "self", ".", "luftdruck", "=", "str", "(", "int", "(", "weather_data", "[", "'main'", "]", "[", "'pressure'", "]", ")", ")", "self", ".", "luftfeuchte", "=", "str", "(", "int", "(", "weather_data", "[", "'main'", "]", "[", "'humidity'", "]", ")", ")", "self", ".", "wetterlage", "=", "weather_data", "[", "'weather'", "]", "[", "0", "]", "[", "'icon'", "]", "except", "(", "error", ".", "URLError", ",", "TypeError", ",", "UnboundLocalError", ")", ":", "print", "(", "datetime", ".", "datetime", ".", "now", "(", ")", ".", "strftime", "(", "'%H:%M'", ")", "+", "': No Weather Data.'", ")", "try", ":", "# Älterer owm_key", "pygame", ".", "time", ".", "wait", "(", "150", ")", "# Warte 150ms um HttpError 429 zu vermeiden", "daily", "=", "request", ".", "urlopen", "(", "openweather_base", "+", "'forecast/daily?'", "+", "self", ".", "owm_loc", "+", "'&units=metric&lang=de&mode=json&APPID='", "+", "self", ".", "owm_key", ")", "daily_data", "=", "json", ".", "loads", "(", "daily", ".", "read", "(", ")", ".", "decode", "(", "'utf-8'", ")", ")", "self", ".", "heute_min", "=", "str", "(", "round", "(", "daily_data", "[", "'list'", "]", "[", "0", "]", "[", "'temp'", "]", "[", "'min'", "]", ",", "1", ")", ")", "self", ".", "heute_max", "=", "str", "(", "round", "(", "daily_data", "[", "'list'", "]", "[", "0", "]", "[", "'temp'", "]", "[", "'max'", "]", ",", "1", ")", ")", "self", ".", "morgen_min", "=", "str", "(", "round", "(", "daily_data", "[", "'list'", "]", "[", "1", "]", "[", "'temp'", "]", "[", "'min'", "]", ",", "1", ")", ")", "self", ".", "morgen_max", "=", "str", "(", "round", "(", "daily_data", "[", "'list'", "]", "[", "1", "]", "[", "'temp'", "]", "[", "'max'", "]", ",", "1", ")", ")", "self", ".", "vorschau", "=", "daily_data", "[", "'list'", "]", "[", "1", "]", "[", "'weather'", "]", "[", "0", "]", "[", "'icon'", "]", "print", "(", "\"Alter OWM_KEY\"", ")", "except", "(", "error", ".", "URLError", ",", "TypeError", ",", "UnboundLocalError", ")", ":", "try", ":", "# Neuerer owm_key", "pygame", ".", "time", ".", "wait", "(", "150", ")", "# Warte 150ms um HttpError 429 zu vermeiden", "forecast5", "=", "request", ".", "urlopen", "(", "openweather_base", "+", "'forecast?'", "+", "self", ".", "owm_loc", "+", "'&units=metric&lang=de&mode=json&APPID='", "+", "self", ".", "owm_key", ")", "forecast5_data", "=", "json", ".", "loads", "(", "forecast5", ".", "read", "(", ")", ".", "decode", "(", "'utf-8'", ")", ")", "# 5-Tagesvorhersage in dreistunden Abschnitten", "# Wir brauchen \"nur\" den Rest von heute und den ganzen Tag morgen", "# Minimum und maximum self.temperatur werden zwischen 6 und 24Uhr ermittelt.", "# dt_txt -> 2018-12-17 21:00:00", "# dt_txt[11:13] = '21'", "zeit", "=", "int", "(", "forecast5_data", "[", "'list'", "]", "[", "0", "]", "[", "'dt_txt'", "]", "[", "11", ":", "13", "]", ")", "for", "idx", "in", "range", "(", "8", ",", "0", ",", "-", "1", ")", ":", "if", "3", "*", "idx", "+", "zeit", "==", "24", ":", "heute_24uhr", "=", "idx", "heute_start", "=", "max", "(", "(", "heute_24uhr", "-", "6", ")", ",", "0", ")", "break", "t_list", "=", "[", "]", "# Vorhandene Temp.-Daten bis Mitternacht", "for", "idx", "in", "range", "(", "heute_start", ",", "heute_24uhr", "+", "1", ")", ":", "t_list", ".", "append", "(", "forecast5_data", "[", "'list'", "]", "[", "idx", "]", "[", "'main'", "]", "[", "'temp'", "]", ")", "t_list", ".", "append", "(", "weather_data", "[", "'main'", "]", "[", "'temp'", "]", ")", "# aktuelle Temp. mit einbeziehen", "self", ".", "heute_max", "=", "str", "(", "round", "(", "max", "(", "t_list", ")", ",", "1", ")", ")", "self", ".", "heute_min", "=", "str", "(", "round", "(", "min", "(", "t_list", ")", ",", "1", ")", ")", "t_list", "=", "[", "]", "# Alle Temp.-Daten von morgen ab 6 Uhr", "for", "idx", "in", "range", "(", "heute_24uhr", "+", "2", ",", "heute_24uhr", "+", "9", ")", ":", "t_list", ".", "append", "(", "forecast5_data", "[", "'list'", "]", "[", "idx", "]", "[", "'main'", "]", "[", "'temp'", "]", ")", "self", ".", "morgen_max", "=", "str", "(", "round", "(", "max", "(", "t_list", ")", ",", "1", ")", ")", "self", ".", "morgen_min", "=", "str", "(", "round", "(", "min", "(", "t_list", ")", ",", "1", ")", ")", "# Icon von morgen 12 Uhr", "self", ".", "vorschau", "=", "forecast5_data", "[", "'list'", "]", "[", "heute_24uhr", "+", "4", "]", "[", "'weather'", "]", "[", "0", "]", "[", "'icon'", "]", "except", "(", "error", ".", "URLError", ",", "TypeError", ",", "UnboundLocalError", ")", ":", "print", "(", "datetime", ".", "datetime", ".", "now", "(", ")", ".", "strftime", "(", "'%H:%M'", ")", "+", "': No Forecast Data.'", ")" ]
[ 520, 4 ]
[ 589, 88 ]
null
python
de
['de', 'de', 'de']
False
true
null
compare
(values, data)
return audio_parts(execute_type_compare(values, data), data)
Vergleicht zwei Werte miteinander und führt je nachdem, ob =, !=, < oder >, die danach aufgeführten `"audio_parts"`-Funktionen aus. Wenn `value_left` gleich `value_right`, führe "transform"-Typen aus on_equal durch. Wenn `value_left` ungleich `value_right`, führe "transform"-Typen aus on_not_equal durch. Wenn `value_left` größer `value_right`, führe "transform"-Typen aus on_higher durch. Wenn `value_left` kleiner `value_right`, führe "transform"-Typen aus on_lower durch. :param values: Werte aus der JSON-Datei :param data: Daten aus der API
Vergleicht zwei Werte miteinander und führt je nachdem, ob =, !=, < oder >, die danach aufgeführten `"audio_parts"`-Funktionen aus.
def compare(values, data): """Vergleicht zwei Werte miteinander und führt je nachdem, ob =, !=, < oder >, die danach aufgeführten `"audio_parts"`-Funktionen aus. Wenn `value_left` gleich `value_right`, führe "transform"-Typen aus on_equal durch. Wenn `value_left` ungleich `value_right`, führe "transform"-Typen aus on_not_equal durch. Wenn `value_left` größer `value_right`, führe "transform"-Typen aus on_higher durch. Wenn `value_left` kleiner `value_right`, führe "transform"-Typen aus on_lower durch. :param values: Werte aus der JSON-Datei :param data: Daten aus der API """ return audio_parts(execute_type_compare(values, data), data)
[ "def", "compare", "(", "values", ",", "data", ")", ":", "return", "audio_parts", "(", "execute_type_compare", "(", "values", ",", "data", ")", ",", "data", ")" ]
[ 58, 0 ]
[ 70, 64 ]
null
python
de
['de', 'de', 'de']
True
true
null
_bezierKurvenEinheitskreis
()
return [[[1,0],[1,c],[c,1],[0,1]], [[0,1],[-c,1],[-1,c],[-1,0]], [[-1,0],[-1,-c],[-c,-1],[0,-1]], [[0,-1],[c,-1],[1,-c],[1,0]]]
Einheitskreises als Liste von Bezierkurven https://stackoverflow.com/questions/1734745/how-to-create-circle-with-bézier-curves https://spencermortensen.com/articles/bezier-circle/ https://stackoverflow.com/questions/734076/how-to-best-approximate-a-geometrical-arc-with-a-bezier-curve
Einheitskreises als Liste von Bezierkurven https://stackoverflow.com/questions/1734745/how-to-create-circle-with-bézier-curves https://spencermortensen.com/articles/bezier-circle/ https://stackoverflow.com/questions/734076/how-to-best-approximate-a-geometrical-arc-with-a-bezier-curve
def _bezierKurvenEinheitskreis(): '''Einheitskreises als Liste von Bezierkurven https://stackoverflow.com/questions/1734745/how-to-create-circle-with-bézier-curves https://spencermortensen.com/articles/bezier-circle/ https://stackoverflow.com/questions/734076/how-to-best-approximate-a-geometrical-arc-with-a-bezier-curve''' c=0.552 #0.5522 or 0.5522847 or 0.551915 return [[[1,0],[1,c],[c,1],[0,1]], [[0,1],[-c,1],[-1,c],[-1,0]], [[-1,0],[-1,-c],[-c,-1],[0,-1]], [[0,-1],[c,-1],[1,-c],[1,0]]]
[ "def", "_bezierKurvenEinheitskreis", "(", ")", ":", "c", "=", "0.552", "#0.5522 or 0.5522847 or 0.551915", "return", "[", "[", "[", "1", ",", "0", "]", ",", "[", "1", ",", "c", "]", ",", "[", "c", ",", "1", "]", ",", "[", "0", ",", "1", "]", "]", ",", "[", "[", "0", ",", "1", "]", ",", "[", "-", "c", ",", "1", "]", ",", "[", "-", "1", ",", "c", "]", ",", "[", "-", "1", ",", "0", "]", "]", ",", "[", "[", "-", "1", ",", "0", "]", ",", "[", "-", "1", ",", "-", "c", "]", ",", "[", "-", "c", ",", "-", "1", "]", ",", "[", "0", ",", "-", "1", "]", "]", ",", "[", "[", "0", ",", "-", "1", "]", ",", "[", "c", ",", "-", "1", "]", ",", "[", "1", ",", "-", "c", "]", ",", "[", "1", ",", "0", "]", "]", "]" ]
[ 44, 0 ]
[ 53, 41 ]
null
python
de
['de', 'de', 'de']
True
true
null
ispDicom.getInfo
( self )
return obj
Wie print( self.ae ) gibt aber ein object zurück Returns ------- obj : dict dict mit Server Informationen.
Wie print( self.ae ) gibt aber ein object zurück
def getInfo( self ): """Wie print( self.ae ) gibt aber ein object zurück Returns ------- obj : dict dict mit Server Informationen. """ obj = { "dicomPath": self.dicomPath } if self.ae: obj["title"] = self.ae.ae_title obj["active_associations"] = len(self.ae.active_associations) obj["maximum_associations"] = self.ae.maximum_associations obj["acse_timeout"] = self.ae.acse_timeout obj["dimse_timeout"] = self.ae.dimse_timeout obj["network_timeout"] = self.ae.network_timeout obj["associations"] = [] for assoc in self.ae.active_associations: associations = { "ae_title" : assoc.remote['ae_title'], "address" : assoc.remote['address'], "port" : assoc.remote['port'], "accepted_contexts" : [] } for cx in assoc.accepted_contexts: #print( "cx", cx ) associations["accepted_contexts"].append( { "Context" : cx.abstract_syntax, "SCP_role" : cx.as_scp, "SCU_role" : cx.as_scu }) obj["associations"].append( associations ) return obj
[ "def", "getInfo", "(", "self", ")", ":", "obj", "=", "{", "\"dicomPath\"", ":", "self", ".", "dicomPath", "}", "if", "self", ".", "ae", ":", "obj", "[", "\"title\"", "]", "=", "self", ".", "ae", ".", "ae_title", "obj", "[", "\"active_associations\"", "]", "=", "len", "(", "self", ".", "ae", ".", "active_associations", ")", "obj", "[", "\"maximum_associations\"", "]", "=", "self", ".", "ae", ".", "maximum_associations", "obj", "[", "\"acse_timeout\"", "]", "=", "self", ".", "ae", ".", "acse_timeout", "obj", "[", "\"dimse_timeout\"", "]", "=", "self", ".", "ae", ".", "dimse_timeout", "obj", "[", "\"network_timeout\"", "]", "=", "self", ".", "ae", ".", "network_timeout", "obj", "[", "\"associations\"", "]", "=", "[", "]", "for", "assoc", "in", "self", ".", "ae", ".", "active_associations", ":", "associations", "=", "{", "\"ae_title\"", ":", "assoc", ".", "remote", "[", "'ae_title'", "]", ",", "\"address\"", ":", "assoc", ".", "remote", "[", "'address'", "]", ",", "\"port\"", ":", "assoc", ".", "remote", "[", "'port'", "]", ",", "\"accepted_contexts\"", ":", "[", "]", "}", "for", "cx", "in", "assoc", ".", "accepted_contexts", ":", "#print( \"cx\", cx )", "associations", "[", "\"accepted_contexts\"", "]", ".", "append", "(", "{", "\"Context\"", ":", "cx", ".", "abstract_syntax", ",", "\"SCP_role\"", ":", "cx", ".", "as_scp", ",", "\"SCU_role\"", ":", "cx", ".", "as_scu", "}", ")", "obj", "[", "\"associations\"", "]", ".", "append", "(", "associations", ")", "return", "obj" ]
[ 459, 4 ]
[ 496, 18 ]
null
python
de
['de', 'de', 'de']
True
true
null
select_range
(values: dict, data: StepData)
Entfernt alle Werte aus `"array_key"`, die nicht in `"range"` sind. :param values: Werte aus der JSON-Datei :param data: Daten aus der API
Entfernt alle Werte aus `"array_key"`, die nicht in `"range"` sind.
def select_range(values: dict, data: StepData): """Entfernt alle Werte aus `"array_key"`, die nicht in `"range"` sind. :param values: Werte aus der JSON-Datei :param data: Daten aus der API """ value_array = data.get_data(values["array_key"], values) range_start = data.get_data(values.get("range_start", 0), values, int) range_end = data.get_data(values["range_end"], values, int) data.insert_data(values["array_key"], value_array[range_start:range_end], values)
[ "def", "select_range", "(", "values", ":", "dict", ",", "data", ":", "StepData", ")", ":", "value_array", "=", "data", ".", "get_data", "(", "values", "[", "\"array_key\"", "]", ",", "values", ")", "range_start", "=", "data", ".", "get_data", "(", "values", ".", "get", "(", "\"range_start\"", ",", "0", ")", ",", "values", ",", "int", ")", "range_end", "=", "data", ".", "get_data", "(", "values", "[", "\"range_end\"", "]", ",", "values", ",", "int", ")", "data", ".", "insert_data", "(", "values", "[", "\"array_key\"", "]", ",", "value_array", "[", "range_start", ":", "range_end", "]", ",", "values", ")" ]
[ 127, 0 ]
[ 137, 85 ]
null
python
de
['de', 'de', 'de']
True
true
null
Line.intersect_line
(self, line, rtol=1e-03, atol=1e-03, include_end=False, all=False)
return []
Von zwei Line-Objekten wird der Schnittpunkt bestimmt und in einer Liste ausgegeben.
Von zwei Line-Objekten wird der Schnittpunkt bestimmt und in einer Liste ausgegeben.
def intersect_line(self, line, rtol=1e-03, atol=1e-03, include_end=False, all=False): """ Von zwei Line-Objekten wird der Schnittpunkt bestimmt und in einer Liste ausgegeben. """ point = [] m_L1 = self.m() m_L2 = line.m() if m_L1 is None: if m_L2 is None: return [] else: y = line_n([line.p1[0]-self.p1[0], line.p1[1]], m_L2) point = (self.p1[0], y) else: if m_L2 is None: y = line_n([self.p1[0]-line.p1[0], self.p1[1]], m_L1) point = (line.p1[0], y) else: if np.isclose(m_L1, m_L2): return [] else: point = lines_intersect_point(self.p1, m_L1, self.n(m_L1), line.p1, m_L2, line.n(m_L2)) if all: return[point] if line.is_point_inside(point, rtol, atol, include_end): if self.is_point_inside(point, rtol, atol, include_end): return [point] return []
[ "def", "intersect_line", "(", "self", ",", "line", ",", "rtol", "=", "1e-03", ",", "atol", "=", "1e-03", ",", "include_end", "=", "False", ",", "all", "=", "False", ")", ":", "point", "=", "[", "]", "m_L1", "=", "self", ".", "m", "(", ")", "m_L2", "=", "line", ".", "m", "(", ")", "if", "m_L1", "is", "None", ":", "if", "m_L2", "is", "None", ":", "return", "[", "]", "else", ":", "y", "=", "line_n", "(", "[", "line", ".", "p1", "[", "0", "]", "-", "self", ".", "p1", "[", "0", "]", ",", "line", ".", "p1", "[", "1", "]", "]", ",", "m_L2", ")", "point", "=", "(", "self", ".", "p1", "[", "0", "]", ",", "y", ")", "else", ":", "if", "m_L2", "is", "None", ":", "y", "=", "line_n", "(", "[", "self", ".", "p1", "[", "0", "]", "-", "line", ".", "p1", "[", "0", "]", ",", "self", ".", "p1", "[", "1", "]", "]", ",", "m_L1", ")", "point", "=", "(", "line", ".", "p1", "[", "0", "]", ",", "y", ")", "else", ":", "if", "np", ".", "isclose", "(", "m_L1", ",", "m_L2", ")", ":", "return", "[", "]", "else", ":", "point", "=", "lines_intersect_point", "(", "self", ".", "p1", ",", "m_L1", ",", "self", ".", "n", "(", "m_L1", ")", ",", "line", ".", "p1", ",", "m_L2", ",", "line", ".", "n", "(", "m_L2", ")", ")", "if", "all", ":", "return", "[", "point", "]", "if", "line", ".", "is_point_inside", "(", "point", ",", "rtol", ",", "atol", ",", "include_end", ")", ":", "if", "self", ".", "is_point_inside", "(", "point", ",", "rtol", ",", "atol", ",", "include_end", ")", ":", "return", "[", "point", "]", "return", "[", "]" ]
[ 1032, 4 ]
[ 1063, 17 ]
null
python
de
['de', 'de', 'de']
True
true
null
Scheduler.start_unblocking
(self)
Startet den Scheduler in einem neuen Thread. Testet jede Minute, ob Jobs ausgeführt werden müssen. Ist dies der Fall, werden diese in einem anderen Thread ausgeführt.
Startet den Scheduler in einem neuen Thread.
def start_unblocking(self): """Startet den Scheduler in einem neuen Thread. Testet jede Minute, ob Jobs ausgeführt werden müssen. Ist dies der Fall, werden diese in einem anderen Thread ausgeführt. """ threading.Thread(target=self.start, daemon=True).start()
[ "def", "start_unblocking", "(", "self", ")", ":", "threading", ".", "Thread", "(", "target", "=", "self", ".", "start", ",", "daemon", "=", "True", ")", ".", "start", "(", ")" ]
[ 131, 4 ]
[ 137, 64 ]
null
python
de
['de', 'de', 'de']
True
true
null
get_memory_path
(path: str, name: str, job_name: str)
return get_resource_path(os.path.join(MEMORY_LOCATION, job_name, name, path))
Erstellt einen absoluten Pfad zu der übergebenen Ressource im Memory-Ordner. :param path: Pfad zur Ressource, relativ zum `resources/memory`-Ordner. :param job_name: Name des Jobs von der die Funktion aufgerufen wurde.
Erstellt einen absoluten Pfad zu der übergebenen Ressource im Memory-Ordner.
def get_memory_path(path: str, name: str, job_name: str): """Erstellt einen absoluten Pfad zu der übergebenen Ressource im Memory-Ordner. :param path: Pfad zur Ressource, relativ zum `resources/memory`-Ordner. :param job_name: Name des Jobs von der die Funktion aufgerufen wurde. """ return get_resource_path(os.path.join(MEMORY_LOCATION, job_name, name, path))
[ "def", "get_memory_path", "(", "path", ":", "str", ",", "name", ":", "str", ",", "job_name", ":", "str", ")", ":", "return", "get_resource_path", "(", "os", ".", "path", ".", "join", "(", "MEMORY_LOCATION", ",", "job_name", ",", "name", ",", "path", ")", ")" ]
[ 178, 0 ]
[ 184, 81 ]
null
python
de
['de', 'de', 'de']
True
true
null
get_relative_temp_resource_path
(path: str, pipeline_id: str)
return os.path.join(RESOURCES_LOCATION, TEMP_LOCATION, pipeline_id, path)
Erstellt einen Relativen Pfad zu der übergebene Ressource im Temp Ordner. :param path: Pfad zur Ressource, relativ zum `resources/temp` Ordner. :param pipeline_id: id der Pipeline, von der die Funktion aufgerufen wurde. :type pipeline_id: str
Erstellt einen Relativen Pfad zu der übergebene Ressource im Temp Ordner.
def get_relative_temp_resource_path(path: str, pipeline_id: str): """Erstellt einen Relativen Pfad zu der übergebene Ressource im Temp Ordner. :param path: Pfad zur Ressource, relativ zum `resources/temp` Ordner. :param pipeline_id: id der Pipeline, von der die Funktion aufgerufen wurde. :type pipeline_id: str """ return os.path.join(RESOURCES_LOCATION, TEMP_LOCATION, pipeline_id, path)
[ "def", "get_relative_temp_resource_path", "(", "path", ":", "str", ",", "pipeline_id", ":", "str", ")", ":", "return", "os", ".", "path", ".", "join", "(", "RESOURCES_LOCATION", ",", "TEMP_LOCATION", ",", "pipeline_id", ",", "path", ")" ]
[ 168, 0 ]
[ 175, 77 ]
null
python
de
['de', 'de', 'de']
True
true
null
register_sequence
(func)
return register_type_func(SEQUENCE_TYPES, SequenceError, func)
Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block. Fügt eine Typ-Funktion dem Dictionary SEQUENCE_TYPES hinzu. :param func: die zu registrierende Funktion :return: Funktion mit try/except-Block
Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block. Fügt eine Typ-Funktion dem Dictionary SEQUENCE_TYPES hinzu.
def register_sequence(func): """Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block. Fügt eine Typ-Funktion dem Dictionary SEQUENCE_TYPES hinzu. :param func: die zu registrierende Funktion :return: Funktion mit try/except-Block """ return register_type_func(SEQUENCE_TYPES, SequenceError, func)
[ "def", "register_sequence", "(", "func", ")", ":", "return", "register_type_func", "(", "SEQUENCE_TYPES", ",", "SequenceError", ",", "func", ")" ]
[ 22, 0 ]
[ 29, 66 ]
null
python
de
['de', 'de', 'de']
True
true
null
run
( overlay:dict={} )
return webApp
Startet ispBaseWebApp mit zusätzlichen config Angaben Parameters ---------- overlay : dict, optional Overlay Angaben für config. The default is {}. Returns ------- webApp : ispBaseWebApp Die gestartete WebApplication
Startet ispBaseWebApp mit zusätzlichen config Angaben
def run( overlay:dict={} ): ''' Startet ispBaseWebApp mit zusätzlichen config Angaben Parameters ---------- overlay : dict, optional Overlay Angaben für config. The default is {}. Returns ------- webApp : ispBaseWebApp Die gestartete WebApplication ''' # Konfiguration öffnen _config = ispConfig( mqttlevel=logging.WARNING ) _apiConfig = { "models": [ gqa, gqadb, system ], } # Webserver starten webApp = ispBaseWebApp( _config, db, apiconfig=_apiConfig, overlay=overlay ) # mqtt in config schließen _config.mqttCleanup( ) return webApp
[ "def", "run", "(", "overlay", ":", "dict", "=", "{", "}", ")", ":", "# Konfiguration öffnen", "_config", "=", "ispConfig", "(", "mqttlevel", "=", "logging", ".", "WARNING", ")", "_apiConfig", "=", "{", "\"models\"", ":", "[", "gqa", ",", "gqadb", ",", "system", "]", ",", "}", "# Webserver starten", "webApp", "=", "ispBaseWebApp", "(", "_config", ",", "db", ",", "apiconfig", "=", "_apiConfig", ",", "overlay", "=", "overlay", ")", "# mqtt in config schließen", "_config", ".", "mqttCleanup", "(", ")", "return", "webApp" ]
[ 245, 0 ]
[ 272, 17 ]
null
python
de
['de', 'de', 'de']
True
true
null
show_script_infos
()
Zeigt einige Infos zum Script an
Zeigt einige Infos zum Script an
def show_script_infos(): """ Zeigt einige Infos zum Script an""" print("AppName: {}".format(APPNAME)) print("Script: {}".format(SCRIPT)) print("Script-Pfad: {}".format(SCRIPT_PATH)) print("Working Dir: {}".format(WORKING_DIR)) print("Properties File: {}".format(PROPS_FILE))
[ "def", "show_script_infos", "(", ")", ":", "print", "(", "\"AppName: {}\"", ".", "format", "(", "APPNAME", ")", ")", "print", "(", "\"Script: {}\"", ".", "format", "(", "SCRIPT", ")", ")", "print", "(", "\"Script-Pfad: {}\"", ".", "format", "(", "SCRIPT_PATH", ")", ")", "print", "(", "\"Working Dir: {}\"", ".", "format", "(", "WORKING_DIR", ")", ")", "print", "(", "\"Properties File: {}\"", ".", "format", "(", "PROPS_FILE", ")", ")" ]
[ 43, 0 ]
[ 50, 52 ]
null
python
de
['de', 'de', 'de']
True
true
null